diff --git a/.env.example b/.env.example index ce2d956a4..ce8220e01 100644 --- a/.env.example +++ b/.env.example @@ -6,25 +6,18 @@ COZO_HOST=http://memory-store:9070 COZO_PORT=9070 COZO_ROCKSDB_DIR=cozo.db DTYPE=float16 -EMBEDDING_SERVICE_URL=http://text-embeddings-inference/embed +EMBEDDING_SERVICE_BASE=http://text-embeddings-inference +EMBEDDING_SERVICE_URL=${EMBEDDING_SERVICE_BASE}/embed GATEWAY_PORT=80 GPU_MEMORY_UTILIZATION=0.90 -HF_TOKEN="" -HUGGING_FACE_HUB_TOKEN="" +HF_TOKEN= +HUGGING_FACE_HUB_TOKEN= JWT_SHARED_KEY= MAX_MODEL_LEN=8192 MAX_NUM_SEQS=1 MNT_DIR=/data -MODEL_API_KEY=myauthkey -MODEL_API_KEY_HEADER_NAME=Authorization -MODEL_API_URL=http://model-serving:8000 -MODEL_INFERENCE_URL=http://model-serving:8000/v1 -MODEL_ID=BAAI/bge-m3 - -# MODEL_NAME="OpenPipe/Hermes-2-Theta-Llama-3-8B-32k" -MODEL_NAME="julep-ai/Hermes-2-Theta-Llama-3-8B" SKIP_CHECK_DEVELOPER_HEADERS=true SUMMARIZATION_TOKENS_THRESHOLD=2048 @@ -40,4 +33,22 @@ WORKER_URL=temporal:7233 AGENTS_API_DEBUG=false OPENAI_API_KEY= -ANTHROPIC_API_KEY= \ No newline at end of file +ANTHROPIC_API_KEY= +GROQ_API_KEY= +CLOUDFLARE_API_KEY= +CLOUDFLARE_ACCOUNT_ID= +NVIDIA_NIM_API_KEY= +GITHUB_API_KEY= +VOYAGE_API_KEY= +GOOGLE_APPLICATION_CREDENTIALS= + +LITELLM_URL=http://litellm:4000 +POSTGRES_DB=litellm +POSTGRES_USER=llmproxy +POSTGRES_PASSWORD= +LITELLM_DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@litellm-db:5432/${POSTGRES_DB} +LITELLM_MASTER_KEY= +LITELLM_REDIS_HOST=litellm-redis +LITELLM_REDIS_PORT=6379 +LITELLM_REDIS_PASSWORD= +REDIS_ARGS="--requirepass ${LITELLM_REDIS_PASSWORD}" \ No newline at end of file diff --git a/.github/workflows/lint-and-format.yml b/.github/workflows/lint-and-format.yml index 88ae16211..a58ec8737 100644 --- a/.github/workflows/lint-and-format.yml +++ b/.github/workflows/lint-and-format.yml @@ -9,7 +9,7 @@ jobs: strategy: matrix: - directory: [agents-api, model-serving, sdks/python] + directory: [agents-api, sdks/python] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/push-to-hub.yml b/.github/workflows/push-to-hub.yml index 80e76601c..9e8b6a428 100644 --- a/.github/workflows/push-to-hub.yml +++ b/.github/workflows/push-to-hub.yml @@ -131,7 +131,6 @@ jobs: service-directory: - gateway - memory-store - # - model-serving steps: - uses: actions/checkout@v4 diff --git a/agents-api/.tool-versions b/agents-api/.tool-versions index 47cd22e3c..8aa451a5c 100644 --- a/agents-api/.tool-versions +++ b/agents-api/.tool-versions @@ -1 +1 @@ -python 3.10.13 +python 3.11.9 diff --git a/agents-api/agents_api/activities/__init__.py b/agents-api/agents_api/activities/__init__.py index a804127fc..c641ab7b9 100644 --- a/agents-api/agents_api/activities/__init__.py +++ b/agents-api/agents_api/activities/__init__.py @@ -1,14 +1,5 @@ """ -The `activities` module within the agents-api package is designed to facilitate various activities related to agent interactions. This includes handling memory management, generating insights from dialogues, summarizing relationships, and more. Each file within the module offers specific functionality: - -- `co_density.py`: Conducts cognitive density analysis to generate concise, entity-dense summaries. -- `demo.py`: Provides a simple demonstration of defining an activity with Temporal. -- `dialog_insights.py`: Extracts insights from dialogues, identifying details that participants might find interesting. -- `mem_mgmt.py`: Manages memory by updating and incorporating new personality information from dialogues. -- `mem_rating.py`: Rates memories based on their poignancy and importance. -- `relationship_summary.py`: Summarizes the relationship between individuals based on provided statements. -- `salient_questions.py`: Identifies salient questions from a set of statements. -- `summarization.py`: Summarizes dialogues and updates memory based on the conversation context. +The `activities` module within the agents-api package is designed to facilitate various activities related to agent interactions. This includes handling memory management, generating insights from dialogues, summarizing relationships, and more. This module plays a crucial role in enhancing the capabilities of agents by providing them with the tools to understand and process information more effectively. """ diff --git a/agents-api/agents_api/activities/co_density.py b/agents-api/agents_api/activities/co_density.py deleted file mode 100644 index 8d276b401..000000000 --- a/agents-api/agents_api/activities/co_density.py +++ /dev/null @@ -1,114 +0,0 @@ -from textwrap import dedent -from typing import Callable - -from temporalio import activity - -from ..clients.model import julep_client -from .types import MemoryDensityTaskArgs - - -def make_prompt(args: MemoryDensityTaskArgs): - # Unpack - memory = args.memory - - # Template - template = dedent( - """\ - [[Memory from a Dialog]] - {memory} - - [[Instruction]] - You will generate increasingly concise, entity-dense summaries of the above Memory. - - Repeat the following 2 steps 5 times. - - Step 1: Identify 1-3 informative Entities (";" delimited) from the Memory which are missing from the previously generated summary. - Step 2: Write a new, denser summary of identical length which covers every entity and detail from the previous summary plus the Missing Entities. - - A Missing Entity is: - - Relevant: to the main story. - - Specific: descriptive yet concise (5 words or fewer). - - Novel: not in the previous summary. - - Faithful: present in the Memory. - - Anywhere: located anywhere in the Memory. - - Guidelines: - - The first summary should be long (4-5 sentences, ~80 words) yet highly non-specific, containing little information beyond the entities marked as missing. Use overly verbose language and fillers (e.g., "this article discusses") to reach ~80 words. - - Make every word count: rewrite the previous summary to improve flow and make space for additional entities. - - Make space with fusion, compression, and removal of uninformative phrases like "the memory discusses." - - The summaries should become highly dense and concise yet self-contained, e.g., easily understood without the Memory. - - Missing entities can appear anywhere in the new summary. - - Never drop entities from the previous summary. If space cannot be made, add fewer new entities. - - Remember, use the exact same number of words for each summary. - - Answer in JSON. The JSON should be a list (length 5) of dictionaries whose keys are "Missing_Entities", "Denser_Summary" and "Density_Score" (between 1-10, higher is better). - - [[Result]] - ```json - """ - ).strip() - - prompt = template.format(memory=memory) - - return prompt - - -async def run_prompt( - memory: str, - model: str = "julep-ai/samantha-1-turbo", - max_tokens: int = 400, - temperature: float = 0.2, - parser: Callable[[str], str] = lambda x: x, -) -> str: - prompt = make_prompt(MemoryDensityTaskArgs(memory=memory)) - - response = await julep_client.chat.completions.create( - model=model, - messages=[ - { - "content": prompt, - "role": "user", - } - ], - max_tokens=max_tokens, - temperature=temperature, - stop=["<", "<|"], - stream=False, - ) - - content = response.choices[0].message.content - - return parser(content.strip() if content is not None else "") - - -@activity.defn -async def co_density(memory: str) -> None: - # session_id = UUID(session_id) - # entries = [ - # Entry(**row) - # for _, row in client.run( - # get_toplevel_entries_query(session_id=session_id) - # ).iterrows() - # ] - - # assert len(entries) > 0, "no need to summarize on empty entries list" - - await run_prompt(memory=memory) - - # new_entry = Entry( - # session_id=session_id, - # source="summarizer", - # role="system", - # name="information", - # content=response, - # timestamp=entries[-1].timestamp + 0.01, - # ) - - # client.run( - # entries_summarization_query( - # session_id=session_id, - # new_entry=new_entry, - # old_entry_ids=[e.id for e in entries], - # ) - # ) diff --git a/agents-api/agents_api/activities/demo.py b/agents-api/agents_api/activities/demo.py index a0edcde3c..f6d63f206 100644 --- a/agents-api/agents_api/activities/demo.py +++ b/agents-api/agents_api/activities/demo.py @@ -1,10 +1,19 @@ -#!/usr/bin/env python3 +from typing import Callable from temporalio import activity +from ..env import testing -@activity.defn -async def say_hello(name: str) -> str: - message = f"Hello, {name}!" - print(message) - return message + +async def demo_activity(a: int, b: int) -> int: + # Should throw an error if testing is not enabled + raise Exception("This should not be called in production") + + +async def mock_demo_activity(a: int, b: int) -> int: + return a + b + + +demo_activity: Callable[[int, int], int] = activity.defn(name="demo_activity")( + demo_activity if not testing else mock_demo_activity +) diff --git a/agents-api/agents_api/activities/dialog_insights.py b/agents-api/agents_api/activities/dialog_insights.py deleted file mode 100644 index d6b10ae01..000000000 --- a/agents-api/agents_api/activities/dialog_insights.py +++ /dev/null @@ -1,117 +0,0 @@ -from textwrap import dedent -from typing import Callable - -from temporalio import activity - -from ..clients.model import julep_client -from .types import ChatML, DialogInsightsTaskArgs - - -def make_prompt( - args: DialogInsightsTaskArgs, - max_turns: int = 20, -): - # Unpack - dialog = args.dialog - person1 = args.person1 - person2 = args.person2 - - # Template - template = dedent( - """\ - [[Conversation]] - {dialog_context} - - --- - - Write down if there are any details from the conversation above that {person1} might have found interesting from {person2}'s perspective, in a full sentence. Write down point by point only the most important points. Answer must be in third person. - - Answer: " - """ - ).strip() - - # Filter dialog (keep only user and assistant sections) - dialog = [entry for entry in dialog if entry.role != "system"] - - # Truncate to max_turns - dialog = dialog[-max_turns:] - - # Prepare dialog context - dialog_context = "\n".join( - [ - f'{e.name or ("User" if e.role == "user" else "Assistant")}: {e.content}' - for e in dialog - ] - ) - - prompt = template.format( - dialog_context=dialog_context, - person1=person1, - person2=person2, - ) - - return prompt - - -async def run_prompt( - dialog: list[ChatML], - person1: str, - person2: str, - model: str = "julep-ai/samantha-1-turbo", - max_tokens: int = 400, - temperature: float = 0.4, - parser: Callable[[str], str] = lambda x: x, -) -> str: - prompt = make_prompt( - DialogInsightsTaskArgs(dialog=dialog, person1=person1, person2=person2) - ) - - response = await julep_client.chat.completions.create( - model=model, - messages=[ - { - "content": prompt, - "role": "user", - } - ], - max_tokens=max_tokens, - temperature=temperature, - stop=["<", "<|"], - stream=False, - ) - - content = response.choices[0].message.content - - return parser(content.strip() if content is not None else "") - - -@activity.defn -async def dialog_insights(dialog: list[ChatML], person1: str, person2: str) -> None: - # session_id = UUID(session_id) - # entries = [ - # Entry(**row) - # for _, row in client.run( - # get_toplevel_entries_query(session_id=session_id) - # ).iterrows() - # ] - - # assert len(entries) > 0, "no need to summarize on empty entries list" - - await run_prompt(dialog, person1, person2) - - # new_entry = Entry( - # session_id=session_id, - # source="summarizer", - # role="system", - # name="information", - # content=response, - # timestamp=entries[-1].timestamp + 0.01, - # ) - - # client.run( - # entries_summarization_query( - # session_id=session_id, - # new_entry=new_entry, - # old_entry_ids=[e.id for e in entries], - # ) - # ) diff --git a/agents-api/agents_api/activities/embed_docs.py b/agents-api/agents_api/activities/embed_docs.py index e2316d59e..3222ff9e7 100644 --- a/agents-api/agents_api/activities/embed_docs.py +++ b/agents-api/agents_api/activities/embed_docs.py @@ -1,31 +1,42 @@ -from pydantic import UUID4 +from beartype import beartype from temporalio import activity -from agents_api.embed_models_registry import EmbeddingModel -from agents_api.env import embedding_model_id -from agents_api.models.docs.embed_docs import ( - embed_docs_snippets_query, -) +from ..clients import cozo +from ..clients import embed as embedder +from ..env import testing +from ..models.docs.embed_snippets import embed_snippets as embed_snippets_query +from .types import EmbedDocsPayload -snippet_embed_instruction = "Encode this passage for retrieval: " +@beartype +async def embed_docs(payload: EmbedDocsPayload, cozo_client=None) -> None: + indices, snippets = list(zip(*enumerate(payload.content))) + embed_instruction: str = payload.embed_instruction or "" + title: str = payload.title or "" -@activity.defn -async def embed_docs(doc_id: UUID4, title: str, content: list[str]) -> None: - indices, snippets = list(zip(*enumerate(content))) - model = EmbeddingModel.from_model_name(embedding_model_id) - embeddings = await model.embed( + embeddings = await embedder.embed( [ - { - "instruction": snippet_embed_instruction, - "text": title + "\n\n" + snippet, - } + ( + embed_instruction + (title + "\n\n" + snippet) if title else snippet + ).strip() for snippet in snippets ] ) - embed_docs_snippets_query( - doc_id=doc_id, + embed_snippets_query( + developer_id=payload.developer_id, + doc_id=payload.doc_id, snippet_indices=indices, embeddings=embeddings, + client=cozo_client or cozo.get_cozo_client(), ) + + +async def mock_embed_docs(payload: EmbedDocsPayload, cozo_client=None) -> None: + # Does nothing + return None + + +embed_docs = activity.defn(name="embed_docs")( + embed_docs if not testing else mock_embed_docs +) diff --git a/agents-api/agents_api/activities/logger.py b/agents-api/agents_api/activities/logger.py index f3c31fece..ed18019dc 100644 --- a/agents-api/agents_api/activities/logger.py +++ b/agents-api/agents_api/activities/logger.py @@ -1,7 +1,8 @@ import logging +from typing import TextIO -logger = logging.getLogger(__name__) -h = logging.StreamHandler() -fmt = logging.Formatter("[%(asctime)s/%(levelname)s] - %(message)s") +logger: logging.Logger = logging.getLogger(__name__) +h: logging.StreamHandler[TextIO] = logging.StreamHandler() +fmt: logging.Formatter = logging.Formatter("[%(asctime)s/%(levelname)s] - %(message)s") h.setFormatter(fmt) logger.addHandler(h) diff --git a/agents-api/agents_api/activities/mem_mgmt.py b/agents-api/agents_api/activities/mem_mgmt.py index 4f661ca46..7cd4a7d6b 100644 --- a/agents-api/agents_api/activities/mem_mgmt.py +++ b/agents-api/agents_api/activities/mem_mgmt.py @@ -2,10 +2,12 @@ from typing import Callable from uuid import UUID +from beartype import beartype from temporalio import activity -from ..clients.model import julep_client -from .types import ChatML, MemoryManagementTaskArgs +from ..autogen.openapi_model import InputChatMLMessage +from ..clients import litellm +from .types import MemoryManagementTaskArgs example_previous_memory = """ Speaker 1: Composes and listens to music. Likes to buy basketball shoes but doesn't wear them often. @@ -117,10 +119,10 @@ def make_prompt( async def run_prompt( - dialog: list[ChatML], + dialog: list[InputChatMLMessage], session_id: UUID, previous_memories: list[str] = [], - model: str = "julep-ai/samantha-1-turbo", + model: str = "gpt-4o", max_tokens: int = 400, temperature: float = 0.4, parser: Callable[[str], str] = lambda x: x, @@ -134,7 +136,7 @@ async def run_prompt( ) ) - response = await julep_client.chat.completions.create( + response = await litellm.acompletion( model=model, messages=[ { @@ -154,8 +156,11 @@ async def run_prompt( @activity.defn +@beartype async def mem_mgmt( - dialog: list[ChatML], session_id: UUID, previous_memories: list[str] = [] + dialog: list[InputChatMLMessage], + session_id: UUID, + previous_memories: list[str] = [], ) -> None: # session_id = UUID(session_id) # entries = [ diff --git a/agents-api/agents_api/activities/mem_rating.py b/agents-api/agents_api/activities/mem_rating.py index bc35ac82d..c681acbc3 100644 --- a/agents-api/agents_api/activities/mem_rating.py +++ b/agents-api/agents_api/activities/mem_rating.py @@ -1,9 +1,10 @@ from textwrap import dedent from typing import Callable +from beartype import beartype from temporalio import activity -from ..clients.model import julep_client +from ..clients import litellm from .types import MemoryRatingTaskArgs @@ -40,14 +41,14 @@ def make_prompt(args: MemoryRatingTaskArgs): async def run_prompt( memory: str, - model: str = "julep-ai/samantha-1-turbo", + model: str = "gpt-4o", max_tokens: int = 400, temperature: float = 0.1, parser: Callable[[str], str] = lambda x: x, ) -> str: prompt = make_prompt(MemoryRatingTaskArgs(memory=memory)) - response = await julep_client.chat.completions.create( + response = await litellm.acompletion( model=model, messages=[ { @@ -67,6 +68,7 @@ async def run_prompt( @activity.defn +@beartype async def mem_rating(memory: str) -> None: # session_id = UUID(session_id) # entries = [ diff --git a/agents-api/agents_api/activities/relationship_summary.py b/agents-api/agents_api/activities/relationship_summary.py deleted file mode 100644 index 5346040d3..000000000 --- a/agents-api/agents_api/activities/relationship_summary.py +++ /dev/null @@ -1,102 +0,0 @@ -from textwrap import dedent -from typing import Callable - -from temporalio import activity - -from ..clients.model import julep_client -from .types import RelationshipSummaryTaskArgs - - -def make_prompt(args: RelationshipSummaryTaskArgs): - # Unpack - statements = args.statements - person1 = args.person1 - person2 = args.person2 - - # Template - template = dedent( - """\ - Statements: - - {statements_joined} - - Based on the statements above, summarize {person1} and {person2}'s relationship in a 2-3 sentences. What do they feel or know about each other? - - Answer: " - """ - ).strip() - - prompt = template.format( - statements_joined="\n- ".join(statements), - person1=person1, - person2=person2, - ) - - return prompt - - -async def run_prompt( - statements: list[str], - person1: str, - person2: str, - model: str = "julep-ai/samantha-1-turbo", - max_tokens: int = 400, - temperature: float = 0.6, - parser: Callable[[str], str] = lambda x: x, -) -> str: - prompt = make_prompt( - RelationshipSummaryTaskArgs( - statements=statements, person1=person1, person2=person2 - ) - ) - - response = await julep_client.chat.completions.create( - model=model, - messages=[ - { - "content": prompt, - "role": "user", - } - ], - max_tokens=max_tokens, - temperature=temperature, - stop=["<", "<|"], - stream=False, - ) - - content = response.choices[0].message.content - - return parser(content.strip() if content is not None else "") - - -@activity.defn -async def relationship_summary( - statements: list[str], person1: str, person2: str -) -> None: - # session_id = UUID(session_id) - # entries = [ - # Entry(**row) - # for _, row in client.run( - # get_toplevel_entries_query(session_id=session_id) - # ).iterrows() - # ] - - # assert len(entries) > 0, "no need to summarize on empty entries list" - - await run_prompt(statements=statements, person1=person1, person2=person2) - - # new_entry = Entry( - # session_id=session_id, - # source="summarizer", - # role="system", - # name="information", - # content=response, - # timestamp=entries[-1].timestamp + 0.01, - # ) - - # client.run( - # entries_summarization_query( - # session_id=session_id, - # new_entry=new_entry, - # old_entry_ids=[e.id for e in entries], - # ) - # ) diff --git a/agents-api/agents_api/activities/salient_questions.py b/agents-api/agents_api/activities/salient_questions.py deleted file mode 100644 index 6a34409d6..000000000 --- a/agents-api/agents_api/activities/salient_questions.py +++ /dev/null @@ -1,91 +0,0 @@ -from textwrap import dedent -from typing import Callable - -from temporalio import activity - -from ..clients.model import julep_client -from .types import SalientQuestionsTaskArgs - - -def make_prompt(args: SalientQuestionsTaskArgs): - # Unpack - statements = args.statements - num = args.num - - # Template - template = dedent( - """\ - Statements: - - {statements_joined} - - Given only the information above, what are the {num} most salient high-level questions we can answer about the subjects grounded in the statements? - - """ - ).strip() - - prompt = template.format( - statements_joined="\n- ".join(statements), - num=num, - ) - - return prompt - - -async def run_prompt( - statements: list[str], - num: int = 3, - model: str = "julep-ai/samantha-1-turbo", - max_tokens: int = 400, - temperature: float = 0.6, - parser: Callable[[str], str] = lambda x: x, -) -> str: - prompt = make_prompt(SalientQuestionsTaskArgs(statements=statements, num=num)) - - response = await julep_client.chat.completions.create( - model=model, - messages=[ - { - "content": prompt, - "role": "user", - } - ], - max_tokens=max_tokens, - temperature=temperature, - stop=["<", "<|"], - stream=False, - ) - - content = response.choices[0].message.content - - return parser(content.strip() if content is not None else "") - - -@activity.defn -async def salient_questions(statements: list[str], num: int = 3) -> None: - # session_id = UUID(session_id) - # entries = [ - # Entry(**row) - # for _, row in client.run( - # get_toplevel_entries_query(session_id=session_id) - # ).iterrows() - # ] - - # assert len(entries) > 0, "no need to summarize on empty entries list" - - await run_prompt(statements=statements, num=num) - - # new_entry = Entry( - # session_id=session_id, - # source="summarizer", - # role="system", - # name="information", - # content=response, - # timestamp=entries[-1].timestamp + 0.01, - # ) - - # client.run( - # entries_summarization_query( - # session_id=session_id, - # new_entry=new_entry, - # old_entry_ids=[e.id for e in entries], - # ) - # ) diff --git a/agents-api/agents_api/activities/summarization.py b/agents-api/agents_api/activities/summarization.py index 81e694bd5..a02237bf7 100644 --- a/agents-api/agents_api/activities/summarization.py +++ b/agents-api/agents_api/activities/summarization.py @@ -1,221 +1,80 @@ #!/usr/bin/env python3 -import asyncio -from textwrap import dedent -from typing import Callable -from uuid import UUID -from litellm import acompletion +import pandas as pd +from beartype import beartype from temporalio import activity -from agents_api.common.protocol.entries import Entry -from agents_api.models.entry.entries_summarization import ( - entries_summarization_query, - get_toplevel_entries_query, -) -from agents_api.rec_sum.entities import get_entities -from agents_api.rec_sum.summarize import summarize_messages -from agents_api.rec_sum.trim import trim_messages +# from agents_api.models.entry.entries_summarization import ( +# entries_summarization_query, +# get_toplevel_entries_query, +# ) -from ..env import model_api_key, model_inference_url, summarization_model_name -from ..model_registry import LOCAL_MODELS -example_previous_memory = """ -Speaker 1: Composes and listens to music. Likes to buy basketball shoes but doesn't wear them often. -""".strip() +# TODO: remove stubs +def entries_summarization_query(*args, **kwargs) -> pd.DataFrame: + return pd.DataFrame() -example_dialog_context = """ -Speaker 1: Did you find a place to donate your shoes? -Speaker 2: I did! I was driving to the grocery store the other day, when I noticed a bin labeled "Donation for Shoes and Clothing." It was easier than I thought! How about you? Why do you have so many pairs of sandals? -Speaker 1: I don't understand myself! When I look them online I just have the urge to buy them, even when I know I don't need them. This addiction is getting worse and worse. -Speaker 2: I completely agree that buying shoes can become an addiction! Are there any ways you can make money from home while waiting for a job offer from a call center? -Speaker 1: Well I already got the job so I just need to learn using the software. When I was still searching for jobs, we actually do a yard sale to sell many of my random items that are never used and clearly aren't needed either. -Speaker 2: Congratulations on getting the job! I know it'll help you out so much. And of course, maybe I should turn to yard sales as well, for they can be a great way to make some extra cash! -Speaker 1: Do you have another job or do you compose music for a living? How does your shopping addiction go? -Speaker 2: As a matter of fact, I do have another job in addition to composing music. I'm actually a music teacher at a private school, and on the side, I compose music for friends and family. As far as my shopping addiction goes, it's getting better. I promised myself that I wouldn't buy myself any more shoes this year! -Speaker 1: Ah, I remember the time I promised myself the same thing on not buying random things anymore, never work so far. Good luck with yours! -Speaker 2: Thanks! I need the good luck wishes. I've been avoiding malls and shopping outlets. Maybe you can try the same! -Speaker 1: I can avoid them physically, but with my job enable me sitting in front of my computer for a long period of time, I already turn the shopping addiction into online-shopping addiction. lol. Wish me luck! -Speaker 2: Sure thing! You know, and speaking of spending time before a computer, I need to look up information about Precious Moments figurines. I'd still like to know what they are! -""".strip() -example_updated_memory = """ -Speaker 1: -- Enjoys composing and listening to music. -- Recently got a job that requires the use of specialized software. -- Displays a shopping addiction, particularly for shoes, that has transitioned to online-shopping due to job nature. -- Previously attempted to mitigate shopping addiction without success. -- Had organized a yard sale to sell unused items when job searching. - -Speaker 2: -- Also enjoys buying shoes and admits to it being addictive. -- Works as a music teacher at a private school in addition to composing music. -- Takes active measures to control his shopping addiction, including avoiding malls. -- Is interested in Precious Moments figurines. -""".strip() - - -def make_prompt( - dialog: list[Entry], - previous_memories: list[str], - max_turns: int = 10, - num_sentences: int = 10, -): - # Template - template = dedent( - """\ - **Instructions** - You are an advanced AI language model with the ability to store and update a memory to keep track of key personality information for people. You will receive a memory and a dialogue between two people. - - Your goal is to update the memory by incorporating the new personality information for both participants while ensuring that the memory does not exceed {num_sentences} sentences. - - To successfully update the memory, follow these steps: - - 1. Carefully analyze the existing memory and extract the key personality information of the participants from it. - 2. Consider the dialogue provided to identify any new or changed personality traits of either participant that need to be incorporated into the memory. - 3. Combine the old and new personality information to create an updated representation of the participants' traits. - 4. Structure the updated memory in a clear and concise manner, ensuring that it does not exceed {num_sentences} sentences. - 5. Pay attention to the relevance and importance of the personality information, focusing on capturing the most significant aspects while maintaining the overall coherence of the memory. - - Remember, the memory should serve as a reference point to maintain continuity in the dialogue and help accurately set context in future conversations based on the personality traits of the participants. - - **Test Example** - [[Previous Memory]] - {example_previous_memory} - - [[Dialogue Context]] - {example_dialog_context} - - [[Updated Memory]] - {example_updated_memory} - - **Actual Run** - [[Previous Memory]] - {previous_memory} - - [[Dialogue Context]] - {dialog_context} - - [[Updated Memory]] - """ - ).strip() - - # Filter dialog (keep only user and assistant sections) - dialog = [entry for entry in dialog if entry.role != "system"] - - # Truncate to max_turns - dialog = dialog[-max_turns:] - - # Prepare dialog context - dialog_context = "\n".join( - [ - f'{e.name or ("User" if e.role == "user" else "Assistant")}: {e.content}' - for e in dialog - ] - ) - - prompt = template.format( - dialog_context=dialog_context, - previous_memory="\n".join(previous_memories), - num_sentences=num_sentences, - example_dialog_context=example_dialog_context, - example_previous_memory=example_previous_memory, - example_updated_memory=example_updated_memory, - ) - - return prompt - - -async def run_prompt( - dialog: list[Entry], - previous_memories: list[str], - model: str = "julep-ai/samantha-1-turbo", - max_tokens: int = 400, - temperature: float = 0.1, - parser: Callable[[str], str] = lambda x: x, - **kwargs, -) -> str: - api_base = None - api_key = None - if model in LOCAL_MODELS: - api_base = model_inference_url - api_key = model_api_key - model = f"openai/{model}" - prompt = make_prompt(dialog, previous_memories, **kwargs) - response = await acompletion( - model=model, - messages=[ - { - "content": prompt, - "role": "user", - } - ], - max_tokens=max_tokens, - temperature=temperature, - stop=["<", "<|"], - stream=False, - api_base=api_base, - api_key=api_key, - ) - - content = response.choices[0].message.content - - return parser(content.strip() if content is not None else "") +def get_toplevel_entries_query(*args, **kwargs) -> pd.DataFrame: + return pd.DataFrame() @activity.defn +@beartype async def summarization(session_id: str) -> None: - session_id = UUID(session_id) - entries = [] - entities_entry_ids = [] - for _, row in get_toplevel_entries_query(session_id=session_id).iterrows(): - if row["role"] == "system" and row.get("name") == "entities": - entities_entry_ids.append(UUID(row["entry_id"], version=4)) - else: - entries.append(row) - - assert len(entries) > 0, "no need to summarize on empty entries list" - - summarized, entities = await asyncio.gather( - summarize_messages(entries, model=summarization_model_name), - get_entities(entries, model=summarization_model_name), - ) - trimmed_messages = await trim_messages(summarized, model=summarization_model_name) - ts_delta = (entries[1]["timestamp"] - entries[0]["timestamp"]) / 2 - new_entities_entry = Entry( - session_id=session_id, - source="summarizer", - role="system", - name="entities", - content=entities["content"], - timestamp=entries[0]["timestamp"] + ts_delta, - ) - - entries_summarization_query( - session_id=session_id, - new_entry=new_entities_entry, - old_entry_ids=entities_entry_ids, - ) - - trimmed_map = { - m["index"]: m["content"] for m in trimmed_messages if m.get("index") is not None - } - - for idx, msg in enumerate(summarized): - new_entry = Entry( - session_id=session_id, - source="summarizer", - role="system", - name="information", - content=trimmed_map.get(idx, msg["content"]), - timestamp=entries[-1]["timestamp"] + 0.01, - ) - - entries_summarization_query( - session_id=session_id, - new_entry=new_entry, - old_entry_ids=[ - UUID(entries[idx - 1]["entry_id"], version=4) - for idx in msg["summarizes"] - ], - ) + raise NotImplementedError() + # session_id = UUID(session_id) + # entries = [] + # entities_entry_ids = [] + # for _, row in get_toplevel_entries_query(session_id=session_id).iterrows(): + # if row["role"] == "system" and row.get("name") == "entities": + # entities_entry_ids.append(UUID(row["entry_id"], version=4)) + # else: + # entries.append(row) + + # assert len(entries) > 0, "no need to summarize on empty entries list" + + # summarized, entities = await asyncio.gather( + # summarize_messages(entries, model=summarization_model_name), + # get_entities(entries, model=summarization_model_name), + # ) + # trimmed_messages = await trim_messages(summarized, model=summarization_model_name) + # ts_delta = (entries[1]["timestamp"] - entries[0]["timestamp"]) / 2 + # new_entities_entry = Entry( + # session_id=session_id, + # source="summarizer", + # role="system", + # name="entities", + # content=entities["content"], + # timestamp=entries[0]["timestamp"] + ts_delta, + # ) + + # entries_summarization_query( + # session_id=session_id, + # new_entry=new_entities_entry, + # old_entry_ids=entities_entry_ids, + # ) + + # trimmed_map = { + # m["index"]: m["content"] for m in trimmed_messages if m.get("index") is not None + # } + + # for idx, msg in enumerate(summarized): + # new_entry = Entry( + # session_id=session_id, + # source="summarizer", + # role="system", + # name="information", + # content=trimmed_map.get(idx, msg["content"]), + # timestamp=entries[-1]["timestamp"] + 0.01, + # ) + + # entries_summarization_query( + # session_id=session_id, + # new_entry=new_entry, + # old_entry_ids=[ + # UUID(entries[idx - 1]["entry_id"], version=4) + # for idx in msg["summarizes"] + # ], + # ) diff --git a/agents-api/agents_api/activities/task_steps/__init__.py b/agents-api/agents_api/activities/task_steps/__init__.py index 1029e409f..9646152cf 100644 --- a/agents-api/agents_api/activities/task_steps/__init__.py +++ b/agents-api/agents_api/activities/task_steps/__init__.py @@ -1,147 +1,16 @@ -import asyncio -from uuid import uuid4 - -from openai.types.chat.chat_completion import ChatCompletion -from simpleeval import simple_eval -from temporalio import activity - -from ...autogen.openapi_model import ( - EvaluateStep, - # ErrorWorkflowStep, - IfElseWorkflowStep, - InputChatMLMessage, - PromptStep, - ToolCallStep, - YieldStep, -) -from ...clients.worker.types import ChatML -from ...common.protocol.tasks import ( - StepContext, - TransitionInfo, -) -from ...common.utils.template import render_template -from ...models.execution.create_execution_transition import ( - create_execution_transition as create_execution_transition_query, -) -from ...routers.sessions.protocol import Settings -from ...routers.sessions.session import llm_generate - - -@activity.defn -async def prompt_step(context: StepContext) -> dict: - assert isinstance(context.definition, PromptStep) - - # Get context data - context_data: dict = context.model_dump() - - # Render template messages - template_messages: list[InputChatMLMessage] = context.definition.prompt - messages = await asyncio.gather( - *[ - render_template(msg.content, context_data, skip_vars=["developer_id"]) - for msg in template_messages - ] - ) - - messages = [ - ChatML(role="user", content=m) if isinstance(m, str) else ChatML(**m) - for m in messages - ] - - # Get settings and run llm - response: ChatCompletion = await llm_generate( - messages, - Settings( - model=context.definition.settings.model or "gpt-4-turbo", - response_format=None, - ), - ) - - return response - - -@activity.defn -async def evaluate_step(context: StepContext) -> dict: - assert isinstance(context.definition, EvaluateStep) - - names = {} - for i in context.inputs: - names.update(i) - - return { - "result": { - k: simple_eval(v, names=names) - for k, v in context.definition.evaluate.items() - } - } - - -@activity.defn -async def yield_step(context: StepContext) -> dict: - if not isinstance(context.definition, YieldStep): - return {} - - # TODO: implement - - return {"test": "result"} - - -@activity.defn -async def tool_call_step(context: StepContext) -> dict: - assert isinstance(context.definition, ToolCallStep) - - context.definition.tool_id - context.definition.arguments - # get tool by id - # call tool - - -# @activity.defn -# async def error_step(context: StepContext) -> dict: -# if not isinstance(context.definition, ErrorWorkflowStep): -# return {} - -# return {"error": context.definition.error} - - -@activity.defn -async def if_else_step(context: StepContext) -> dict: - assert isinstance(context.definition, IfElseWorkflowStep) - - context_data: dict = context.model_dump() - next_workflow = ( - context.definition.then - if simple_eval(context.definition.if_, names=context_data) - else context.definition.else_ - ) - - return {"goto_workflow": next_workflow} - - -@activity.defn -async def transition_step( - context: StepContext, - transition_info: TransitionInfo, -) -> dict: - print("Running transition step") - # raise NotImplementedError() - - # Get transition info - transition_data = transition_info.model_dump(by_alias=False) - - # Get task token if it's a waiting step - if transition_info.type == "awaiting_input": - task_token = activity.info().task_token - transition_data["__task_token"] = task_token - - # Create transition - create_execution_transition_query( - developer_id=context.developer_id, - execution_id=context.execution.id, - transition_id=uuid4(), - **transition_data, - ) - - # Raise if it's a waiting step - if transition_info.type == "awaiting_input": - activity.raise_complete_async() +# ruff: noqa: F401, F403, F405 + +from .base_evaluate import base_evaluate +from .evaluate_step import evaluate_step +from .for_each_step import for_each_step +from .if_else_step import if_else_step +from .log_step import log_step +from .map_reduce_step import map_reduce_step +from .prompt_step import prompt_step +from .raise_complete_async import raise_complete_async +from .return_step import return_step +from .switch_step import switch_step +from .tool_call_step import tool_call_step +from .transition_step import transition_step +from .wait_for_input_step import wait_for_input_step +from .yield_step import yield_step diff --git a/agents-api/agents_api/activities/task_steps/base_evaluate.py b/agents-api/agents_api/activities/task_steps/base_evaluate.py new file mode 100644 index 000000000..e65f1fe66 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/base_evaluate.py @@ -0,0 +1,55 @@ +from typing import Any + +from beartype import beartype +from box import Box +from openai import BaseModel +from temporalio import activity + +from ...env import testing +from ..utils import get_evaluator + + +@beartype +async def base_evaluate( + exprs: str | list[str] | dict[str, str], + values: dict[str, Any] = {}, +) -> Any | list[Any] | dict[str, Any]: + input_len = 1 if isinstance(exprs, str) else len(exprs) + assert input_len > 0, "exprs must be a non-empty string, list or dict" + + # Turn the nested dict values from pydantic to dicts where possible + values = { + k: v.model_dump() if isinstance(v, BaseModel) else v for k, v in values.items() + } + + # TODO: We should make this frozen_box=True, but we need to make sure that + # we don't break anything + values = Box(values, frozen_box=False, conversion_box=False) + + evaluator = get_evaluator(names=values) + + try: + match exprs: + case str(): + return evaluator.eval(exprs) + + case list(): + return [evaluator.eval(expr) for expr in exprs] + + case dict(): + return {k: evaluator.eval(v) for k, v in exprs.items()} + + except BaseException as e: + if activity.in_activity(): + activity.logger.error(f"Error in base_evaluate: {e}") + + raise + + +# Note: This is here just for clarity. We could have just imported base_evaluate directly +# They do the same thing, so we dont need to mock the base_evaluate function +mock_base_evaluate = base_evaluate + +base_evaluate = activity.defn(name="base_evaluate")( + base_evaluate if not testing else mock_base_evaluate +) diff --git a/agents-api/agents_api/activities/task_steps/evaluate_step.py b/agents-api/agents_api/activities/task_steps/evaluate_step.py new file mode 100644 index 000000000..6b1650ff4 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/evaluate_step.py @@ -0,0 +1,41 @@ +from typing import Any + +from beartype import beartype +from temporalio import activity + +from ...activities.utils import simple_eval_dict +from ...common.protocol.tasks import StepContext, StepOutcome +from ...env import testing + + +@beartype +async def evaluate_step( + context: StepContext, + additional_values: dict[str, Any] = {}, + override_expr: dict[str, str] | None = None, +) -> StepOutcome: + try: + expr = ( + override_expr + if override_expr is not None + else context.current_step.evaluate + ) + + values = context.model_dump() | additional_values + output = simple_eval_dict(expr, values) + result = StepOutcome(output=output) + + return result + + except BaseException as e: + activity.logger.error(f"Error in evaluate_step: {e}") + return StepOutcome(error=str(e) or repr(e)) + + +# Note: This is here just for clarity. We could have just imported evaluate_step directly +# They do the same thing, so we dont need to mock the evaluate_step function +mock_evaluate_step = evaluate_step + +evaluate_step = activity.defn(name="evaluate_step")( + evaluate_step if not testing else mock_evaluate_step +) diff --git a/agents-api/agents_api/activities/task_steps/for_each_step.py b/agents-api/agents_api/activities/task_steps/for_each_step.py new file mode 100644 index 000000000..45f6d11dc --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/for_each_step.py @@ -0,0 +1,36 @@ +import logging + +from beartype import beartype +from temporalio import activity + +from ...autogen.openapi_model import ForeachStep +from ...common.protocol.tasks import ( + StepContext, + StepOutcome, +) +from ...env import testing +from .base_evaluate import base_evaluate + + +@beartype +async def for_each_step(context: StepContext) -> StepOutcome: + try: + assert isinstance(context.current_step, ForeachStep) + + output = await base_evaluate( + context.current_step.foreach.in_, context.model_dump() + ) + return StepOutcome(output=output) + + except BaseException as e: + logging.error(f"Error in for_each_step: {e}") + return StepOutcome(error=str(e)) + + +# Note: This is here just for clarity. We could have just imported if_else_step directly +# They do the same thing, so we dont need to mock the if_else_step function +mock_if_else_step = for_each_step + +for_each_step = activity.defn(name="for_each_step")( + for_each_step if not testing else mock_if_else_step +) diff --git a/agents-api/agents_api/activities/task_steps/if_else_step.py b/agents-api/agents_api/activities/task_steps/if_else_step.py new file mode 100644 index 000000000..ecb935ca6 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/if_else_step.py @@ -0,0 +1,38 @@ +from beartype import beartype +from temporalio import activity + +from ...autogen.openapi_model import IfElseWorkflowStep +from ...common.protocol.tasks import ( + StepContext, + StepOutcome, +) +from ...env import testing +from .base_evaluate import base_evaluate + + +@beartype +async def if_else_step(context: StepContext) -> StepOutcome: + # NOTE: This activity is only for logging, so we just evaluate the expression + # Hence, it's a local activity and SHOULD NOT fail + try: + assert isinstance(context.current_step, IfElseWorkflowStep) + + expr: str = context.current_step.if_ + output = await base_evaluate(expr, context.model_dump()) + output: bool = bool(output) + + result = StepOutcome(output=output) + return result + + except BaseException as e: + activity.logger.error(f"Error in if_else_step: {e}") + return StepOutcome(error=str(e)) + + +# Note: This is here just for clarity. We could have just imported if_else_step directly +# They do the same thing, so we dont need to mock the if_else_step function +mock_if_else_step = if_else_step + +if_else_step = activity.defn(name="if_else_step")( + if_else_step if not testing else mock_if_else_step +) diff --git a/agents-api/agents_api/activities/task_steps/log_step.py b/agents-api/agents_api/activities/task_steps/log_step.py new file mode 100644 index 000000000..b33409474 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/log_step.py @@ -0,0 +1,35 @@ +from beartype import beartype +from temporalio import activity + +from ...autogen.openapi_model import LogStep +from ...common.protocol.tasks import ( + StepContext, + StepOutcome, +) +from ...env import testing +from .base_evaluate import base_evaluate + + +@beartype +async def log_step(context: StepContext) -> StepOutcome: + # NOTE: This activity is only for logging, so we just evaluate the expression + # Hence, it's a local activity and SHOULD NOT fail + try: + assert isinstance(context.current_step, LogStep) + + expr: str = context.current_step.log + output = await base_evaluate(expr, context.model_dump()) + + result = StepOutcome(output=output) + return result + + except BaseException as e: + activity.logger.error(f"Error in log_step: {e}") + return StepOutcome(error=str(e)) + + +# Note: This is here just for clarity. We could have just imported log_step directly +# They do the same thing, so we dont need to mock the log_step function +mock_log_step = log_step + +log_step = activity.defn(name="log_step")(log_step if not testing else mock_log_step) diff --git a/agents-api/agents_api/activities/task_steps/map_reduce_step.py b/agents-api/agents_api/activities/task_steps/map_reduce_step.py new file mode 100644 index 000000000..97fd0c154 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/map_reduce_step.py @@ -0,0 +1,35 @@ +import logging + +from beartype import beartype +from temporalio import activity + +from ...autogen.openapi_model import MapReduceStep +from ...common.protocol.tasks import ( + StepContext, + StepOutcome, +) +from ...env import testing +from .base_evaluate import base_evaluate + + +@beartype +async def map_reduce_step(context: StepContext) -> StepOutcome: + try: + assert isinstance(context.current_step, MapReduceStep) + + output = await base_evaluate(context.current_step.over, context.model_dump()) + + return StepOutcome(output=output) + + except BaseException as e: + logging.error(f"Error in map_reduce_step: {e}") + return StepOutcome(error=str(e)) + + +# Note: This is here just for clarity. We could have just imported if_else_step directly +# They do the same thing, so we dont need to mock the if_else_step function +mock_if_else_step = map_reduce_step + +map_reduce_step = activity.defn(name="map_reduce_step")( + map_reduce_step if not testing else mock_if_else_step +) diff --git a/agents-api/agents_api/activities/task_steps/prompt_step.py b/agents-api/agents_api/activities/task_steps/prompt_step.py new file mode 100644 index 000000000..84a569ee1 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/prompt_step.py @@ -0,0 +1,105 @@ +import asyncio + +from beartype import beartype +from temporalio import activity + +from ...autogen.openapi_model import ( + ChatSettings, + Content, + ContentModel, + InputChatMLMessage, +) +from ...clients import ( + litellm, # We dont directly import `acompletion` so we can mock it +) +from ...common.protocol.tasks import StepContext, StepOutcome +from ...common.utils.template import render_template + + +def _content_to_dict( + content: str | list[str] | list[Content | ContentModel], role: str +) -> str | list[dict]: + if isinstance(content, str): + return content + + result = [] + for s in content: + if isinstance(s, str): + result.append({"content": {"type": "text", "text": s, "role": role}}) + elif isinstance(s, Content): + result.append({"content": {"type": s.type, "text": s.text, "role": role}}) + elif isinstance(s, ContentModel): + result.append( + { + "content": { + "type": s.type, + "image_url": {"url": s.image_url.url}, + "role": role, + } + } + ) + + return result + + +@activity.defn +@beartype +async def prompt_step(context: StepContext) -> StepOutcome: + # Get context data + context_data: dict = context.model_dump() + + # Render template messages + prompt = ( + [InputChatMLMessage(content=context.current_step.prompt)] + if isinstance(context.current_step.prompt, str) + else context.current_step.prompt + ) + + template_messages: list[InputChatMLMessage] = prompt + messages = await asyncio.gather( + *[ + render_template( + _content_to_dict(msg.content, msg.role), + context_data, + skip_vars=["developer_id"], + ) + for msg in template_messages + ] + ) + + result_messages = [] + for m in messages: + if isinstance(m, str): + msg = InputChatMLMessage(role="user", content=m) + else: + msg = [] + for d in m: + role = d["content"].get("role") + d["content"] = [d["content"]] + d["role"] = role + msg.append(InputChatMLMessage(**d)) + + result_messages.append(msg) + + # messages = [ + # ( + # InputChatMLMessage(role="user", content=m) + # if isinstance(m, str) + # else [InputChatMLMessage(**d) for d in m] + # ) + # for m in messages + # ] + + # Get settings and run llm + settings: ChatSettings = context.current_step.settings or ChatSettings() + settings_data: dict = settings.model_dump() + + response = await litellm.acompletion( + messages=result_messages, + **settings_data, + ) + + return StepOutcome( + output=response.model_dump(), + next=None, + ) diff --git a/agents-api/agents_api/activities/task_steps/raise_complete_async.py b/agents-api/agents_api/activities/task_steps/raise_complete_async.py new file mode 100644 index 000000000..b393ceda6 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/raise_complete_async.py @@ -0,0 +1,6 @@ +from temporalio import activity + + +@activity.defn +async def raise_complete_async() -> None: + activity.raise_complete_async() diff --git a/agents-api/agents_api/activities/task_steps/return_step.py b/agents-api/agents_api/activities/task_steps/return_step.py new file mode 100644 index 000000000..1e272bab2 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/return_step.py @@ -0,0 +1,35 @@ +from temporalio import activity + +from ...autogen.openapi_model import ReturnStep +from ...common.protocol.tasks import ( + StepContext, + StepOutcome, +) +from ...env import testing +from .base_evaluate import base_evaluate + + +async def return_step(context: StepContext) -> StepOutcome: + # NOTE: This activity is only for returning immediately, so we just evaluate the expression + # Hence, it's a local activity and SHOULD NOT fail + try: + assert isinstance(context.current_step, ReturnStep) + + exprs: dict[str, str] = context.current_step.return_ + output = await base_evaluate(exprs, context.model_dump()) + + result = StepOutcome(output=output) + return result + + except BaseException as e: + activity.logger.error(f"Error in log_step: {e}") + return StepOutcome(error=str(e)) + + +# Note: This is here just for clarity. We could have just imported return_step directly +# They do the same thing, so we dont need to mock the return_step function +mock_return_step = return_step + +return_step = activity.defn(name="return_step")( + return_step if not testing else mock_return_step +) diff --git a/agents-api/agents_api/activities/task_steps/switch_step.py b/agents-api/agents_api/activities/task_steps/switch_step.py new file mode 100644 index 000000000..b28150450 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/switch_step.py @@ -0,0 +1,45 @@ +from beartype import beartype +from temporalio import activity + +from ...autogen.openapi_model import SwitchStep +from ...common.protocol.tasks import ( + StepContext, + StepOutcome, +) +from ...env import testing +from ..utils import get_evaluator + + +@beartype +async def switch_step(context: StepContext) -> StepOutcome: + try: + assert isinstance(context.current_step, SwitchStep) + + # Assume that none of the cases evaluate to truthy + output: int = -1 + cases: list[str] = [c.case for c in context.current_step.switch] + + evaluator = get_evaluator(names=context.model_dump()) + + for i, case in enumerate(cases): + result = evaluator.eval(case) + + if result: + output = i + break + + result = StepOutcome(output=output) + return result + + except BaseException as e: + activity.logger.error(f"Error in switch_step: {e}") + return StepOutcome(error=str(e)) + + +# Note: This is here just for clarity. We could have just imported switch_step directly +# They do the same thing, so we dont need to mock the switch_step function +mock_switch_step = switch_step + +switch_step = activity.defn(name="switch_step")( + switch_step if not testing else mock_switch_step +) diff --git a/agents-api/agents_api/activities/task_steps/tool_call_step.py b/agents-api/agents_api/activities/task_steps/tool_call_step.py new file mode 100644 index 000000000..85a119deb --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/tool_call_step.py @@ -0,0 +1,20 @@ +from beartype import beartype +from temporalio import activity + +from ...common.protocol.tasks import ( + StepContext, +) + + +@activity.defn +@beartype +async def tool_call_step(context: StepContext) -> dict: + raise NotImplementedError() + # assert isinstance(context.current_step, ToolCallStep) + + # context.current_step.tool_id + # context.current_step.arguments + # # get tool by id + # # call tool + + # return {} diff --git a/agents-api/agents_api/activities/task_steps/transition_step.py b/agents-api/agents_api/activities/task_steps/transition_step.py new file mode 100644 index 000000000..f44503cb5 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/transition_step.py @@ -0,0 +1,44 @@ +from beartype import beartype +from temporalio import activity + +from ...autogen.openapi_model import CreateTransitionRequest +from ...common.protocol.tasks import StepContext +from ...env import testing +from ...models.execution.create_execution_transition import ( + create_execution_transition as create_execution_transition_query, +) + + +@beartype +async def transition_step( + context: StepContext, + transition_info: CreateTransitionRequest, +) -> None: + need_to_wait = transition_info.type == "wait" + + # Get task token if it's a waiting step + if need_to_wait: + task_token = activity.info().task_token + transition_info.task_token = task_token + + # Create transition + create_execution_transition_query( + developer_id=context.developer_id, + execution_id=context.execution.id, + task_id=context.task.id, + data=transition_info, + update_execution_status=True, + ) + + +async def mock_transition_step( + context: StepContext, + transition_info: CreateTransitionRequest, +) -> None: + # Does nothing + return None + + +transition_step = activity.defn(name="transition_step")( + transition_step if not testing else mock_transition_step +) diff --git a/agents-api/agents_api/activities/task_steps/wait_for_input_step.py b/agents-api/agents_api/activities/task_steps/wait_for_input_step.py new file mode 100644 index 000000000..c0666512b --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/wait_for_input_step.py @@ -0,0 +1,30 @@ +from temporalio import activity + +from ...autogen.openapi_model import WaitForInputStep +from ...common.protocol.tasks import StepContext, StepOutcome +from ...env import testing +from .base_evaluate import base_evaluate + + +async def wait_for_input_step(context: StepContext) -> StepOutcome: + try: + assert isinstance(context.current_step, WaitForInputStep) + + exprs = context.current_step.wait_for_input + output = await base_evaluate(exprs, context.model_dump()) + + result = StepOutcome(output=output) + return result + + except BaseException as e: + activity.logger.error(f"Error in wait_for_input_step: {e}") + return StepOutcome(error=str(e)) + + +# Note: This is here just for clarity. We could have just imported wait_for_input_step directly +# They do the same thing, so we dont need to mock the wait_for_input_step function +mock_wait_for_input_step = wait_for_input_step + +wait_for_input_step = activity.defn(name="wait_for_input_step")( + wait_for_input_step if not testing else mock_wait_for_input_step +) diff --git a/agents-api/agents_api/activities/task_steps/yield_step.py b/agents-api/agents_api/activities/task_steps/yield_step.py new file mode 100644 index 000000000..41fa2eb87 --- /dev/null +++ b/agents-api/agents_api/activities/task_steps/yield_step.py @@ -0,0 +1,50 @@ +from typing import Callable + +from beartype import beartype +from temporalio import activity + +from agents_api.autogen.openapi_model import TransitionTarget, YieldStep + +from ...common.protocol.tasks import StepContext, StepOutcome +from ...env import testing +from .base_evaluate import base_evaluate + + +@beartype +async def yield_step(context: StepContext) -> StepOutcome: + # NOTE: This activity is only for returning immediately, so we just evaluate the expression + # Hence, it's a local activity and SHOULD NOT fail + try: + assert isinstance(context.current_step, YieldStep) + + all_workflows = context.execution_input.task.workflows + workflow = context.current_step.workflow + exprs = context.current_step.arguments + + assert workflow in [ + wf.name for wf in all_workflows + ], f"Workflow {workflow} not found in task" + + # Evaluate the expressions in the arguments + arguments = await base_evaluate(exprs, context.model_dump()) + + # Transition to the first step of that workflow + transition_target = TransitionTarget( + workflow=workflow, + step=0, + ) + + return StepOutcome(output=arguments, transition_to=("step", transition_target)) + + except BaseException as e: + activity.logger.error(f"Error in yield_step: {e}") + return StepOutcome(error=str(e)) + + +# Note: This is here just for clarity. We could have just imported yield_step directly +# They do the same thing, so we dont need to mock the yield_step function +mock_yield_step: Callable[[StepContext], StepOutcome] = yield_step + +yield_step: Callable[[StepContext], StepOutcome] = activity.defn(name="yield_step")( + yield_step if not testing else mock_yield_step +) diff --git a/agents-api/agents_api/activities/truncation.py b/agents-api/agents_api/activities/truncation.py index 190190a79..06f33d8b6 100644 --- a/agents-api/agents_api/activities/truncation.py +++ b/agents-api/agents_api/activities/truncation.py @@ -1,53 +1,55 @@ from uuid import UUID +from beartype import beartype from temporalio import activity -from agents_api.autogen.openapi_model import Role -from agents_api.common.protocol.entries import Entry -from agents_api.models.entry.delete_entries import delete_entries -from agents_api.models.entry.entries_summarization import get_toplevel_entries_query +from agents_api.autogen.openapi_model import Entry + +# from agents_api.models.entry.entries_summarization import get_toplevel_entries_query def get_extra_entries(messages: list[Entry], token_count_threshold: int) -> list[UUID]: + raise NotImplementedError() + if not len(messages): return messages - result: list[UUID] = [] - token_cnt, offset = 0, 0 - if messages[0].role == Role.system: - token_cnt, offset = messages[0].token_count, 1 + _token_cnt, _offset = 0, 0 + # if messages[0].role == Role.system: + # token_cnt, offset = messages[0].token_count, 1 - for m in reversed(messages[offset:]): - token_cnt += m.token_count - if token_cnt < token_count_threshold: - continue - else: - result.append(m.id) + # for m in reversed(messages[offset:]): + # token_cnt += m.token_count + # if token_cnt < token_count_threshold: + # continue + # else: + # result.append(m.id) - return result + # return result @activity.defn +@beartype async def truncation(session_id: str, token_count_threshold: int) -> None: session_id = UUID(session_id) - delete_entries( - get_extra_entries( - [ - Entry( - entry_id=row["entry_id"], - session_id=session_id, - source=row["source"], - role=Role(row["role"]), - name=row["name"], - content=row["content"], - created_at=row["created_at"], - timestamp=row["timestamp"], - ) - for _, row in get_toplevel_entries_query( - session_id=session_id - ).iterrows() - ], - token_count_threshold, - ), - ) + # delete_entries( + # get_extra_entries( + # [ + # Entry( + # entry_id=row["entry_id"], + # session_id=session_id, + # source=row["source"], + # role=Role(row["role"]), + # name=row["name"], + # content=row["content"], + # created_at=row["created_at"], + # timestamp=row["timestamp"], + # ) + # for _, row in get_toplevel_entries_query( + # session_id=session_id + # ).iterrows() + # ], + # token_count_threshold, + # ), + # ) diff --git a/agents-api/agents_api/activities/types.py b/agents-api/agents_api/activities/types.py index 37fd8015d..c2af67936 100644 --- a/agents-api/agents_api/activities/types.py +++ b/agents-api/agents_api/activities/types.py @@ -1,111 +1,36 @@ -from typing import Any, Callable, Literal, Optional, Protocol, TypedDict +from typing import Literal from uuid import UUID from pydantic import BaseModel +from ..autogen.openapi_model import InputChatMLMessage -class PromptModule(Protocol): - stop: list[str] - temperature: float - parser: Callable[[str], str] - make_prompt: Callable[..., str] - -class ChatML(BaseModel): - role: Literal["system", "user", "assistant"] - content: str - - name: Optional[str] = None - entry_id: Optional[UUID] = None - - processed: bool = False - parent_id: Optional[UUID] = None - session_id: Optional[UUID] = None - timestamp: Optional[float] = None - token_count: Optional[int] = None - - -class BaseTask(BaseModel): ... - - -class BaseTaskArgs(BaseModel): ... - - -class AddPrinciplesTaskArgs(BaseTaskArgs): - scores: dict[str, Any] - full: bool = False - name: Optional[str] = None - user_id: Optional[UUID] = None - character_id: Optional[UUID] = None - - -class AddPrinciplesTask(BaseTask): - name: Literal["add_principles.v1"] - args: AddPrinciplesTaskArgs - - -class MemoryManagementTaskArgs(BaseTaskArgs): +class MemoryManagementTaskArgs(BaseModel): session_id: UUID model: str - dialog: list[ChatML] + dialog: list[InputChatMLMessage] previous_memories: list[str] = [] -class MemoryManagementTask(BaseTask): +class MemoryManagementTask(BaseModel): name: Literal["memory_management.v1"] args: MemoryManagementTaskArgs -class MemoryDensityTaskArgs(BaseTaskArgs): - memory: str - - -class MemoryDensityTask(BaseTask): - name: Literal["memory_density.v1"] - args: MemoryDensityTaskArgs - - -class MemoryRatingTaskArgs(BaseTaskArgs): +class MemoryRatingTaskArgs(BaseModel): memory: str -class MemoryRatingTask(BaseTask): +class MemoryRatingTask(BaseModel): name: Literal["memory_rating.v1"] args: MemoryRatingTaskArgs -class DialogInsightsTaskArgs(BaseTaskArgs): - dialog: list[ChatML] - person1: str - person2: str - - -class DialogInsightsTask(BaseTask): - name: Literal["dialog_insights.v1"] - args: DialogInsightsTaskArgs - - -class RelationshipSummaryTaskArgs(BaseTaskArgs): - statements: list[str] - person1: str - person2: str - - -class RelationshipSummaryTask(BaseTask): - name: Literal["relationship_summary.v1"] - args: RelationshipSummaryTaskArgs - - -class SalientQuestionsTaskArgs(BaseTaskArgs): - statements: list[str] - num: int = 3 - - -class SalientQuestionsTask(BaseTask): - name: Literal["salient_questions.v1"] - args: SalientQuestionsTaskArgs - - -class CombinedTask(TypedDict): - name: str - args: dict[Any, Any] +class EmbedDocsPayload(BaseModel): + developer_id: UUID + doc_id: UUID + content: list[str] + embed_instruction: str | None + title: str | None = None + include_title: bool = False # Need to be a separate parameter for the activity diff --git a/agents-api/agents_api/activities/utils.py b/agents-api/agents_api/activities/utils.py new file mode 100644 index 000000000..21c6b3675 --- /dev/null +++ b/agents-api/agents_api/activities/utils.py @@ -0,0 +1,30 @@ +import json +from typing import Any + +import re2 +import yaml +from beartype import beartype +from simpleeval import EvalWithCompoundTypes, SimpleEval +from yaml import CSafeLoader + +ALLOWED_FUNCTIONS = { + "zip": zip, + "len": len, + "load_yaml": lambda string: yaml.load(string, Loader=CSafeLoader), + "match_regex": lambda pattern, string: bool(re2.fullmatch(pattern, string)), + "search_regex": lambda pattern, string: re2.search(pattern, string), + "load_json": json.loads, +} + + +@beartype +def get_evaluator(names: dict[str, Any]) -> SimpleEval: + evaluator = EvalWithCompoundTypes(names=names, functions=ALLOWED_FUNCTIONS) + return evaluator + + +@beartype +def simple_eval_dict(exprs: dict[str, str], values: dict[str, Any]) -> dict[str, Any]: + evaluator = get_evaluator(names=values) + + return {k: evaluator.eval(v) for k, v in exprs.items()} diff --git a/agents-api/agents_api/autogen/Agents.py b/agents-api/agents_api/autogen/Agents.py index 71b53c8df..157ceb064 100644 --- a/agents-api/agents_api/autogen/Agents.py +++ b/agents-api/agents_api/autogen/Agents.py @@ -8,7 +8,7 @@ from pydantic import AwareDatetime, BaseModel, ConfigDict, Field -from .Chat import GenerationPresetSettings, OpenAISettings, VLLMSettings +from .Chat import DefaultChatSettings class Agent(BaseModel): @@ -29,6 +29,7 @@ class Agent(BaseModel): str, Field( "", + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -47,9 +48,7 @@ class Agent(BaseModel): """ Instructions for the agent """ - default_settings: ( - GenerationPresetSettings | OpenAISettings | VLLMSettings | None - ) = None + default_settings: DefaultChatSettings | None = None """ Default settings for all sessions created by this agent """ @@ -68,6 +67,7 @@ class CreateAgentRequest(BaseModel): str, Field( "", + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -86,9 +86,42 @@ class CreateAgentRequest(BaseModel): """ Instructions for the agent """ - default_settings: ( - GenerationPresetSettings | OpenAISettings | VLLMSettings | None - ) = None + default_settings: DefaultChatSettings | None = None + """ + Default settings for all sessions created by this agent + """ + + +class CreateOrUpdateAgentRequest(CreateAgentRequest): + model_config = ConfigDict( + populate_by_name=True, + ) + id: UUID + metadata: dict[str, Any] | None = None + name: Annotated[ + str, + Field( + "", + max_length=120, + pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", + ), + ] + """ + Name of the agent + """ + about: str = "" + """ + About the agent + """ + model: str = "" + """ + Model name to use (gpt-4-turbo, gemini-nano etc) + """ + instructions: str | list[str] = "" + """ + Instructions for the agent + """ + default_settings: DefaultChatSettings | None = None """ Default settings for all sessions created by this agent """ @@ -107,6 +140,7 @@ class PatchAgentRequest(BaseModel): str, Field( "", + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -125,9 +159,7 @@ class PatchAgentRequest(BaseModel): """ Instructions for the agent """ - default_settings: ( - GenerationPresetSettings | OpenAISettings | VLLMSettings | None - ) = None + default_settings: DefaultChatSettings | None = None """ Default settings for all sessions created by this agent """ @@ -146,6 +178,7 @@ class UpdateAgentRequest(BaseModel): str, Field( "", + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -164,9 +197,7 @@ class UpdateAgentRequest(BaseModel): """ Instructions for the agent """ - default_settings: ( - GenerationPresetSettings | OpenAISettings | VLLMSettings | None - ) = None + default_settings: DefaultChatSettings | None = None """ Default settings for all sessions created by this agent """ diff --git a/agents-api/agents_api/autogen/Chat.py b/agents-api/agents_api/autogen/Chat.py index c7c25c7ad..da214ec77 100644 --- a/agents-api/agents_api/autogen/Chat.py +++ b/agents-api/agents_api/autogen/Chat.py @@ -6,10 +6,11 @@ from typing import Annotated, Literal from uuid import UUID -from pydantic import AwareDatetime, BaseModel, ConfigDict, Field +from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, StrictBool +from .Common import LogitBias from .Docs import DocReference -from .Entries import ChatMLMessage +from .Tools import FunctionTool, NamedToolChoice class BaseChatOutput(BaseModel): @@ -21,7 +22,7 @@ class BaseChatOutput(BaseModel): """ The reason the model stopped generating tokens """ - logprobs: Annotated[LogProbResponse | None, Field(...)] + logprobs: LogProbResponse | None = None """ The log probabilities of tokens """ @@ -31,7 +32,7 @@ class BaseChatResponse(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - usage: Annotated[CompetionUsage | None, Field(...)] + usage: CompetionUsage | None = None """ Usage statistics for the completion request """ @@ -59,7 +60,25 @@ class BaseTokenLogProb(BaseModel): """ The log probability of the token """ - bytes: Annotated[list[int] | None, Field(...)] + bytes: list[int] | None = None + + +class ChatInputData(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + messages: Annotated[list[Message], Field(min_length=1)] + """ + A list of new input messages comprising the conversation so far. + """ + tools: Annotated[list[FunctionTool] | None, Field(None, min_length=1)] + """ + (Advanced) List of tools that are provided in addition to agent's default set of tools. + """ + tool_choice: Literal["auto", "none"] | NamedToolChoice | None = None + """ + Can be one of existing tools given to the agent earlier or the ones provided in this request. + """ class ChatOutputChunk(BaseChatOutput): @@ -70,7 +89,7 @@ class ChatOutputChunk(BaseChatOutput): model_config = ConfigDict( populate_by_name=True, ) - delta: ChatMLMessage + delta: Delta """ The message generated by the model """ @@ -94,15 +113,21 @@ class CompetionUsage(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - completion_tokens: Annotated[int, Field(json_schema_extra={"readOnly": True})] + completion_tokens: Annotated[ + int | None, Field(None, json_schema_extra={"readOnly": True}) + ] """ Number of tokens in the generated completion """ - prompt_tokens: Annotated[int, Field(json_schema_extra={"readOnly": True})] + prompt_tokens: Annotated[ + int | None, Field(None, json_schema_extra={"readOnly": True}) + ] """ Number of tokens in the prompt """ - total_tokens: Annotated[int, Field(json_schema_extra={"readOnly": True})] + total_tokens: Annotated[ + int | None, Field(None, json_schema_extra={"readOnly": True}) + ] """ Total number of tokens used in the request (prompt + completion) """ @@ -118,26 +143,80 @@ class CompletionResponseFormat(BaseModel): """ -class GenerationPresetSettings(BaseModel): +class Content(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + text: str + type: Literal["text"] = "text" + """ + The type (fixed to 'text') + """ + + +class ContentModel(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + image_url: ImageUrl + """ + The image URL + """ + type: Literal["image_url"] = "image_url" + """ + The type (fixed to 'image_url') + """ + + +class Delta(BaseModel): + """ + The message generated by the model + """ + + model_config = ConfigDict( + populate_by_name=True, + ) + role: Literal[ + "user", + "assistant", + "system", + "function", + "function_response", + "function_call", + "auto", + ] + """ + The role of the message + """ + content: str | list[str] | list[Content | ContentModel] + """ + The content parts of the message + """ + name: str | None = None + """ + Name + """ + continue_: Annotated[StrictBool | None, Field(None, alias="continue")] + """ + Whether to continue this message or return a new one + """ + + +class ImageUrl(BaseModel): + """ + The image URL + """ + model_config = ConfigDict( populate_by_name=True, ) - preset: ( - Literal[ - "problem_solving", - "conversational", - "fun", - "prose", - "creative", - "business", - "deterministic", - "code", - "multilingual", - ] - | None - ) = None + url: str + """ + Image URL or base64 data url (e.g. `data:image/jpeg;base64,`) + """ + detail: Literal["low", "high", "auto"] = "auto" """ - Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) + The detail level of the image """ @@ -151,8 +230,44 @@ class LogProbResponse(BaseModel): """ -class MessageChatResponse(ChunkChatResponse): - pass +class Message(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + role: Literal[ + "user", + "assistant", + "system", + "function", + "function_response", + "function_call", + "auto", + ] + """ + The role of the message + """ + content: str | list[str] | list[Content | ContentModel] + """ + The content parts of the message + """ + name: str | None = None + """ + Name + """ + continue_: Annotated[StrictBool | None, Field(None, alias="continue")] + """ + Whether to continue this message or return a new one + """ + + +class MessageChatResponse(BaseChatResponse): + model_config = ConfigDict( + populate_by_name=True, + ) + choices: list[SingleChatOutput | MultipleChatOutput] + """ + The deltas generated by the model + """ class MultipleChatOutput(BaseChatOutput): @@ -163,7 +278,7 @@ class MultipleChatOutput(BaseChatOutput): model_config = ConfigDict( populate_by_name=True, ) - messages: list[ChatMLMessage] + messages: list[Message] class OpenAISettings(BaseModel): @@ -196,7 +311,7 @@ class SingleChatOutput(BaseChatOutput): model_config = ConfigDict( populate_by_name=True, ) - message: ChatMLMessage + message: Message class TokenLogProb(BaseTokenLogProb): @@ -206,10 +321,61 @@ class TokenLogProb(BaseTokenLogProb): top_logprobs: list[BaseTokenLogProb] -class VLLMSettings(BaseModel): +class ChatInput(ChatInputData): model_config = ConfigDict( populate_by_name=True, ) + remember: Annotated[StrictBool, Field(False, json_schema_extra={"readOnly": True})] + """ + DISABLED: Whether this interaction should form new memories or not (will be enabled in a future release) + """ + recall: StrictBool = True + """ + Whether previous memories and docs should be recalled or not + """ + save: StrictBool = True + """ + Whether this interaction should be stored in the session history or not + """ + model: Annotated[ + str | None, + Field( + None, + max_length=120, + pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", + ), + ] + """ + Identifier of the model to be used + """ + stream: StrictBool = False + """ + Indicates if the server should stream the response as it's generated + """ + stop: Annotated[list[str] | None, Field(None, max_length=4, min_length=1)] + """ + Up to 4 sequences where the API will stop generating further tokens. + """ + seed: Annotated[int | None, Field(None, ge=-1, le=1000)] + """ + If specified, the system will make a best effort to sample deterministically for that particular seed value + """ + max_tokens: Annotated[int | None, Field(None, ge=1)] + """ + The maximum number of tokens to generate in the chat completion + """ + logit_bias: dict[str, LogitBias] | None = None + """ + Modify the likelihood of specified tokens appearing in the completion + """ + response_format: CompletionResponseFormat | None = None + """ + Response format (set to `json_object` to restrict output to JSON) + """ + agent: UUID | None = None + """ + Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) + """ repetition_penalty: Annotated[float | None, Field(None, ge=0.0, le=2.0)] """ Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. @@ -218,6 +384,18 @@ class VLLMSettings(BaseModel): """ Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. """ + min_p: Annotated[float | None, Field(None, ge=0.0, le=1.0)] + """ + Minimum probability compared to leading token to be considered + """ + frequency_penalty: Annotated[float | None, Field(None, ge=-2.0, le=2.0)] + """ + Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + """ + presence_penalty: Annotated[float | None, Field(None, ge=-2.0, le=2.0)] + """ + Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + """ temperature: Annotated[float | None, Field(None, ge=0.0, le=5.0)] """ What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. @@ -226,7 +404,70 @@ class VLLMSettings(BaseModel): """ Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. """ + + +class DefaultChatSettings(OpenAISettings): + """ + Default settings for the chat session (also used by the agent) + """ + + model_config = ConfigDict( + populate_by_name=True, + ) + repetition_penalty: Annotated[float | None, Field(None, ge=0.0, le=2.0)] + """ + Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + """ + length_penalty: Annotated[float | None, Field(None, ge=0.0, le=2.0)] + """ + Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. + """ min_p: Annotated[float | None, Field(None, ge=0.0, le=1.0)] """ Minimum probability compared to leading token to be considered """ + + +class ChatSettings(DefaultChatSettings): + model_config = ConfigDict( + populate_by_name=True, + ) + model: Annotated[ + str | None, + Field( + None, + max_length=120, + pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", + ), + ] + """ + Identifier of the model to be used + """ + stream: StrictBool = False + """ + Indicates if the server should stream the response as it's generated + """ + stop: Annotated[list[str] | None, Field(None, max_length=4, min_length=1)] + """ + Up to 4 sequences where the API will stop generating further tokens. + """ + seed: Annotated[int | None, Field(None, ge=-1, le=1000)] + """ + If specified, the system will make a best effort to sample deterministically for that particular seed value + """ + max_tokens: Annotated[int | None, Field(None, ge=1)] + """ + The maximum number of tokens to generate in the chat completion + """ + logit_bias: dict[str, LogitBias] | None = None + """ + Modify the likelihood of specified tokens appearing in the completion + """ + response_format: CompletionResponseFormat | None = None + """ + Response format (set to `json_object` to restrict output to JSON) + """ + agent: UUID | None = None + """ + Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) + """ diff --git a/agents-api/agents_api/autogen/Common.py b/agents-api/agents_api/autogen/Common.py index aab88621d..4bb28d400 100644 --- a/agents-api/agents_api/autogen/Common.py +++ b/agents-api/agents_api/autogen/Common.py @@ -36,6 +36,16 @@ class Offset(RootModel[int]): """ +class PyExpression(RootModel[str]): + model_config = ConfigDict( + populate_by_name=True, + ) + root: str + """ + A simple python expression compatible with SimpleEval. + """ + + class ResourceCreatedResponse(BaseModel): model_config = ConfigDict( populate_by_name=True, diff --git a/agents-api/agents_api/autogen/Docs.py b/agents-api/agents_api/autogen/Docs.py index 7c4c3b446..a7023ddfc 100644 --- a/agents-api/agents_api/autogen/Docs.py +++ b/agents-api/agents_api/autogen/Docs.py @@ -13,21 +13,29 @@ class BaseDocSearchRequest(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - confidence: Annotated[float, Field(0.5, ge=0.0, le=1.0)] + limit: Annotated[int, Field(10, ge=1, le=100)] + lang: Literal["en-US"] = "en-US" """ - The confidence cutoff level + The language to be used for text-only search. Support for other languages coming soon. """ - alpha: Annotated[float, Field(0.75, ge=0.0, le=1.0)] + + +class CreateDocRequest(BaseModel): """ - The weight to apply to BM25 vs Vector search results. 0 => pure BM25; 1 => pure vector; + Payload for creating a doc """ - mmr: bool = False + + model_config = ConfigDict( + populate_by_name=True, + ) + metadata: dict[str, Any] | None = None + title: Annotated[str, Field(max_length=800)] """ - Whether to include the MMR algorithm in the search. Optimizes for diversity in search results. + Title describing what this document contains """ - lang: Literal["en-US"] = "en-US" + content: str | list[str] """ - The language to be used for text-only search. Support for other languages coming soon. + Contents of the document """ @@ -41,12 +49,7 @@ class Doc(BaseModel): """ When this resource was created as UTC date-time """ - title: Annotated[ - str, - Field( - pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$" - ), - ] + title: Annotated[str, Field(max_length=800)] """ Title describing what this document contains """ @@ -76,12 +79,23 @@ class DocReference(BaseModel): """ ID of the document """ - snippet_index: list[int] + title: str | None = None + snippets: Annotated[list[Snippet], Field(min_length=1)] + distance: float | None = None + + +class DocSearchResponse(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + docs: list[DocReference] + """ + The documents that were found """ - Snippets referred to of the document + time: Annotated[float, Field(gt=0.0)] + """ + The time taken to search in seconds """ - title: str | None = None - snippet: str | None = None class EmbedQueryRequest(BaseModel): @@ -108,23 +122,39 @@ class HybridDocSearchRequest(BaseDocSearchRequest): model_config = ConfigDict( populate_by_name=True, ) - text: str | list[str] + confidence: Annotated[float, Field(0.5, ge=0.0, le=1.0)] + """ + The confidence cutoff level + """ + alpha: Annotated[float, Field(0.75, ge=0.0, le=1.0)] + """ + The weight to apply to BM25 vs Vector search results. 0 => pure BM25; 1 => pure vector; """ - Text or texts to use in the search. In `hybrid` search mode, either `text` or both `text` and `vector` fields are required. + text: str """ - vector: list[float] | list[list[float]] + Text to use in the search. In `hybrid` search mode, either `text` or both `text` and `vector` fields are required. """ - Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. + vector: list[float] """ + Vector to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. + """ + + +class Snippet(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + index: int + content: str class TextOnlyDocSearchRequest(BaseDocSearchRequest): model_config = ConfigDict( populate_by_name=True, ) - text: str | list[str] + text: str """ - Text or texts to use in the search. + Text to use in the search. """ @@ -132,7 +162,11 @@ class VectorDocSearchRequest(BaseDocSearchRequest): model_config = ConfigDict( populate_by_name=True, ) - vector: list[float] | list[list[float]] + confidence: Annotated[float, Field(0.5, ge=0.0, le=1.0)] + """ + The confidence cutoff level + """ + vector: list[float] """ - Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. + Vector to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. """ diff --git a/agents-api/agents_api/autogen/Entries.py b/agents-api/agents_api/autogen/Entries.py index 6c8bf9ca4..1f4286eb9 100644 --- a/agents-api/agents_api/autogen/Entries.py +++ b/agents-api/agents_api/autogen/Entries.py @@ -6,7 +6,7 @@ from typing import Annotated, Literal from uuid import UUID -from pydantic import AnyUrl, AwareDatetime, BaseModel, ConfigDict, Field +from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, RootModel from .Tools import ChosenToolCall, Tool, ToolResponse @@ -17,7 +17,7 @@ class BaseEntry(BaseModel): ) role: Literal[ "user", - "agent", + "assistant", "system", "function", "function_response", @@ -29,51 +29,45 @@ class BaseEntry(BaseModel): """ name: str | None = None content: ( - list[ChatMLTextContentPart | ChatMLImageContentPart] + list[Content | ContentModel] | Tool | ChosenToolCall | str | ToolResponse | list[ - list[ChatMLTextContentPart | ChatMLImageContentPart] - | Tool - | ChosenToolCall - | str - | ToolResponse + list[Content | ContentModel] | Tool | ChosenToolCall | str | ToolResponse ] ) source: Literal[ "api_request", "api_response", "tool_response", "internal", "summarizer", "meta" ] - tokenizer: str | None = None - token_count: int | None = None + tokenizer: str + token_count: int timestamp: Annotated[float, Field(ge=0.0)] """ This is the time that this event refers to. """ -class ChatMLImageContentPart(BaseModel): - model_config = ConfigDict( - populate_by_name=True, - ) - image_url: ImageURL - """ - The image URL - """ - type: Literal["image_url"] = "image_url" - """ - The type (fixed to 'image_url') - """ - - -class ChatMLMessage(BaseModel): +class ChatMLRole( + RootModel[ + Literal[ + "user", + "assistant", + "system", + "function", + "function_response", + "function_call", + "auto", + ] + ] +): model_config = ConfigDict( populate_by_name=True, ) - role: Literal[ + root: Literal[ "user", - "agent", + "assistant", "system", "function", "function_response", @@ -81,30 +75,11 @@ class ChatMLMessage(BaseModel): "auto", ] """ - The role of the message - """ - content: str | list[str] | list[ChatMLTextContentPart | ChatMLImageContentPart] - """ - The content parts of the message - """ - name: str | None = None - """ - Name - """ - tool_calls: Annotated[ - list[ChosenToolCall], Field([], json_schema_extra={"readOnly": True}) - ] - """ - Tool calls generated by the model. - """ - created_at: Annotated[AwareDatetime, Field(json_schema_extra={"readOnly": True})] - """ - When this resource was created as UTC date-time + ChatML role (system|assistant|user|function_call|function|function_response|auto) """ - id: Annotated[UUID, Field(json_schema_extra={"readOnly": True})] -class ChatMLTextContentPart(BaseModel): +class Content(BaseModel): model_config = ConfigDict( populate_by_name=True, ) @@ -115,6 +90,20 @@ class ChatMLTextContentPart(BaseModel): """ +class ContentModel(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + image_url: ImageUrl + """ + The image URL + """ + type: Literal["image_url"] = "image_url" + """ + The type (fixed to 'image_url') + """ + + class Entry(BaseEntry): model_config = ConfigDict( populate_by_name=True, @@ -130,7 +119,7 @@ class History(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - entries: list[BaseEntry] + entries: list[Entry] relations: list[Relation] session_id: Annotated[UUID, Field(json_schema_extra={"readOnly": True})] created_at: Annotated[AwareDatetime, Field(json_schema_extra={"readOnly": True})] @@ -139,47 +128,31 @@ class History(BaseModel): """ -class ImageURL(BaseModel): +class ImageDetail(RootModel[Literal["low", "high", "auto"]]): model_config = ConfigDict( populate_by_name=True, ) - url: AnyUrl + root: Literal["low", "high", "auto"] """ - Image URL or base64 data url (e.g. `data:image/jpeg;base64,`) + Image detail level """ - detail: Literal["low", "high", "auto"] = "auto" + + +class ImageUrl(BaseModel): """ - The detail level of the image + The image URL """ - -class InputChatMLMessage(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - role: Literal[ - "user", - "agent", - "system", - "function", - "function_response", - "function_call", - "auto", - ] + url: str """ - The role of the message - """ - content: str | list[str] | list[ChatMLTextContentPart | ChatMLImageContentPart] - """ - The content parts of the message - """ - name: str | None = None - """ - Name + Image URL or base64 data url (e.g. `data:image/jpeg;base64,`) """ - continue_: Annotated[bool | None, Field(None, alias="continue")] + detail: Literal["low", "high", "auto"] = "auto" """ - Whether to continue this message or return a new one + The detail level of the image """ diff --git a/agents-api/agents_api/autogen/Executions.py b/agents-api/agents_api/autogen/Executions.py index d1cb40f0f..73dab5d65 100644 --- a/agents-api/agents_api/autogen/Executions.py +++ b/agents-api/agents_api/autogen/Executions.py @@ -68,10 +68,6 @@ class TaskTokenResumeExecutionRequest(BaseModel): populate_by_name=True, ) status: Literal["running"] = "running" - task_token: str - """ - A Task Token is a unique identifier for a specific Task Execution. - """ input: dict[str, Any] | None = None """ The input to resume the execution with @@ -87,9 +83,11 @@ class Transition(BaseModel): Field(json_schema_extra={"readOnly": True}), ] execution_id: Annotated[UUID, Field(json_schema_extra={"readOnly": True})] - output: Annotated[dict[str, Any], Field(json_schema_extra={"readOnly": True})] - current: Annotated[list, Field(json_schema_extra={"readOnly": True})] - next: Annotated[list | None, Field(json_schema_extra={"readOnly": True})] + output: Annotated[Any, Field(json_schema_extra={"readOnly": True})] + current: Annotated[TransitionTarget, Field(json_schema_extra={"readOnly": True})] + next: Annotated[ + TransitionTarget | None, Field(json_schema_extra={"readOnly": True}) + ] id: Annotated[UUID, Field(json_schema_extra={"readOnly": True})] metadata: dict[str, Any] | None = None created_at: Annotated[AwareDatetime, Field(json_schema_extra={"readOnly": True})] @@ -102,6 +100,25 @@ class Transition(BaseModel): """ +class TransitionTarget(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + workflow: Annotated[ + str, + Field( + max_length=120, + pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", + ), + ] + """ + For Unicode character safety + See: https://unicode.org/reports/tr31/ + See: https://www.unicode.org/reports/tr39/#Identifier_Characters + """ + step: int + + class UpdateExecutionRequest(BaseModel): model_config = ConfigDict( populate_by_name=True, diff --git a/agents-api/agents_api/autogen/Jobs.py b/agents-api/agents_api/autogen/Jobs.py index 92acce9ad..568b7a09c 100644 --- a/agents-api/agents_api/autogen/Jobs.py +++ b/agents-api/agents_api/autogen/Jobs.py @@ -6,7 +6,7 @@ from typing import Annotated, Literal from uuid import UUID -from pydantic import AwareDatetime, BaseModel, ConfigDict, Field +from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, StrictBool class JobStatus(BaseModel): @@ -26,6 +26,7 @@ class JobStatus(BaseModel): str, Field( "", + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -36,7 +37,7 @@ class JobStatus(BaseModel): """ Reason for the current state of the job """ - has_progress: bool = False + has_progress: StrictBool = False """ Whether this Job supports progress updates """ diff --git a/agents-api/agents_api/autogen/Sessions.py b/agents-api/agents_api/autogen/Sessions.py index 89f5f590d..4380dac02 100644 --- a/agents-api/agents_api/autogen/Sessions.py +++ b/agents-api/agents_api/autogen/Sessions.py @@ -6,7 +6,7 @@ from typing import Annotated, Any, Literal from uuid import UUID -from pydantic import AwareDatetime, BaseModel, ConfigDict, Field +from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, StrictBool class CreateSessionRequest(BaseModel): @@ -31,7 +31,7 @@ class CreateSessionRequest(BaseModel): """ A specific situation that sets the background for this session """ - render_templates: bool = False + render_templates: StrictBool = True """ Render system and assistant message content as jinja templates """ @@ -58,7 +58,7 @@ class PatchSessionRequest(BaseModel): """ A specific situation that sets the background for this session """ - render_templates: bool = False + render_templates: StrictBool = True """ Render system and assistant message content as jinja templates """ @@ -85,7 +85,7 @@ class Session(BaseModel): """ Summary (null at the beginning) - generated automatically after every interaction """ - render_templates: bool = False + render_templates: StrictBool = True """ Render system and assistant message content as jinja templates """ @@ -148,7 +148,41 @@ class UpdateSessionRequest(BaseModel): """ A specific situation that sets the background for this session """ - render_templates: bool = False + render_templates: StrictBool = True + """ + Render system and assistant message content as jinja templates + """ + token_budget: int | None = None + """ + Threshold value for the adaptive context functionality + """ + context_overflow: Literal["truncate", "adaptive"] | None = None + """ + Action to start on context window overflow + """ + metadata: dict[str, Any] | None = None + + +class CreateOrUpdateSessionRequest(CreateSessionRequest): + model_config = ConfigDict( + populate_by_name=True, + ) + id: UUID + user: UUID | None = None + """ + User ID of user associated with this session + """ + users: list[UUID] | None = None + agent: UUID | None = None + """ + Agent ID of agent associated with this session + """ + agents: list[UUID] | None = None + situation: str = '{%- if agent.name -%}\nYou are {{agent.name}}.{{" "}}\n{%- endif -%}\n\n{%- if agent.about -%}\nAbout you: {{agent.name}}.{{" "}}\n{%- endif -%}\n\n{%- if user -%}\nYou are talking to a user\n {%- if user.name -%}{{" "}} and their name is {{user.name}}\n {%- if user.about -%}. About the user: {{user.about}}.{%- else -%}.{%- endif -%}\n {%- endif -%}\n{%- endif -%}\n\n{{"\n\n"}}\n\n{%- if agent.instructions -%}\nInstructions:{{"\n"}}\n {%- if agent.instructions is string -%}\n {{agent.instructions}}{{"\n"}}\n {%- else -%}\n {%- for instruction in agent.instructions -%}\n - {{instruction}}{{"\n"}}\n {%- endfor -%}\n {%- endif -%}\n {{"\n"}}\n{%- endif -%}\n\n{%- if tools -%}\nTools:{{"\n"}}\n {%- for tool in tools -%}\n {%- if tool.type == "function" -%}\n - {{tool.function.name}}\n {%- if tool.function.description -%}: {{tool.function.description}}{%- endif -%}{{"\n"}}\n {%- else -%}\n - {{ 0/0 }} {# Error: Other tool types aren\'t supported yet. #}\n {%- endif -%}\n {%- endfor -%}\n{{"\n\n"}}\n{%- endif -%}\n\n{%- if docs -%}\nRelevant documents:{{"\n"}}\n {%- for doc in docs -%}\n {{doc.title}}{{"\n"}}\n {%- if doc.content is string -%}\n {{doc.content}}{{"\n"}}\n {%- else -%}\n {%- for snippet in doc.content -%}\n {{snippet}}{{"\n"}}\n {%- endfor -%}\n {%- endif -%}\n {{"---"}}\n {%- endfor -%}\n{%- endif -%}' + """ + A specific situation that sets the background for this session + """ + render_templates: StrictBool = True """ Render system and assistant message content as jinja templates """ diff --git a/agents-api/agents_api/autogen/Tasks.py b/agents-api/agents_api/autogen/Tasks.py index 87f5a5263..ed935426b 100644 --- a/agents-api/agents_api/autogen/Tasks.py +++ b/agents-api/agents_api/autogen/Tasks.py @@ -6,23 +6,71 @@ from typing import Annotated, Any, Literal from uuid import UUID -from pydantic import AwareDatetime, BaseModel, ConfigDict, Field +from pydantic import AwareDatetime, BaseModel, ConfigDict, Field, StrictBool -from .Chat import CompletionResponseFormat -from .Common import LogitBias -from .Entries import InputChatMLMessage -from .Tools import FunctionDef +from .Chat import ChatSettings +from .Docs import ( + EmbedQueryRequest, + HybridDocSearchRequest, + TextOnlyDocSearchRequest, + VectorDocSearchRequest, +) +from .Tools import CreateToolRequest -class BaseWorkflowStep(BaseModel): +class CaseThen(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - kind_: Literal[ - "tool_call", "yield", "prompt", "evaluate", "if_else", "wait_for_input", "error" - ] + case: Literal["_"] | str """ - The kind of step + The condition to evaluate + """ + then: ( + EvaluateStep + | ToolCallStep + | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | ReturnStep + | SleepStep + | ErrorWorkflowStep + | YieldStep + | WaitForInputStep + ) + """ + The steps to run if the condition is true + """ + + +class Content(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + text: str + """ + A valid jinja template. + """ + type: Literal["text"] = "text" + """ + The type (fixed to 'text') + """ + + +class ContentModel(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + image_url: ImageUrl + """ + The image URL + """ + type: Literal["image_url"] = "image_url" + """ + The type (fixed to 'image_url') """ @@ -39,11 +87,22 @@ class CreateTaskRequest(BaseModel): main: list[ EvaluateStep | ToolCallStep - | YieldStep | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | ReturnStep + | SleepStep | ErrorWorkflowStep + | YieldStep | WaitForInputStep | IfElseWorkflowStep + | SwitchStep + | ForeachStep + | ParallelStep + | Main ] """ The entrypoint of the task. @@ -56,61 +115,160 @@ class CreateTaskRequest(BaseModel): """ Tools defined specifically for this task not included in the Agent itself. """ - inherit_tools: bool = True + inherit_tools: StrictBool = True """ Whether to inherit tools from the parent agent or not. Defaults to true. """ metadata: dict[str, Any] | None = None -class ErrorWorkflowStep(BaseWorkflowStep): +class EmbedStep(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - kind_: Literal["error"] = "error" + kind_: Annotated[ + Literal["embed"], Field("embed", json_schema_extra={"readOnly": True}) + ] + """ + The kind of step + """ + embed: EmbedQueryRequest + """ + The text to embed + """ + + +class ErrorWorkflowStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[ + Literal["error"], Field("error", json_schema_extra={"readOnly": True}) + ] + """ + The kind of step + """ error: str """ The error message """ -class EvaluateStep(BaseWorkflowStep): +class EvaluateStep(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - kind_: Literal["evaluate"] = "evaluate" + kind_: Annotated[ + Literal["evaluate"], Field("evaluate", json_schema_extra={"readOnly": True}) + ] + """ + The kind of step + """ evaluate: dict[str, str] """ The expression to evaluate """ -class IfElseWorkflowStep(BaseWorkflowStep): +class ForeachDo(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + in_: Annotated[str, Field(alias="in")] + """ + The variable to iterate over. + VALIDATION: Should NOT return more than 1000 elements. + """ + do: ( + EvaluateStep + | ToolCallStep + | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + ) + """ + The steps to run for each iteration + """ + + +class ForeachStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[ + Literal["foreach"], Field("foreach", json_schema_extra={"readOnly": True}) + ] + """ + The kind of step + """ + foreach: ForeachDo + """ + The steps to run for each iteration + """ + + +class GetStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[Literal["get"], Field("get", json_schema_extra={"readOnly": True})] + """ + The kind of step + """ + get: str + """ + The key to get + """ + + +class IfElseWorkflowStep(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - kind_: Literal["if_else"] = "if_else" + kind_: Annotated[ + Literal["if_else"], Field("if_else", json_schema_extra={"readOnly": True}) + ] + """ + The kind of step + """ if_: Annotated[str, Field(alias="if")] """ The condition to evaluate """ then: ( - Any + EvaluateStep | ToolCallStep - | YieldStep | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | ReturnStep + | SleepStep | ErrorWorkflowStep + | YieldStep | WaitForInputStep ) """ The steps to run if the condition is true """ else_: Annotated[ - Any + EvaluateStep | ToolCallStep - | YieldStep | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | ReturnStep + | SleepStep | ErrorWorkflowStep + | YieldStep | WaitForInputStep, Field(alias="else"), ] @@ -119,254 +277,347 @@ class IfElseWorkflowStep(BaseWorkflowStep): """ -class PatchTaskRequest(BaseModel): +class ImageUrl(BaseModel): """ - Payload for patching a task + The image URL """ model_config = ConfigDict( populate_by_name=True, ) - description: str = "" - main: ( - list[ - EvaluateStep - | ToolCallStep - | YieldStep - | PromptStep - | ErrorWorkflowStep - | WaitForInputStep - | IfElseWorkflowStep - ] - | None - ) = None - """ - The entrypoint of the task. - """ - input_schema: dict[str, Any] | None = None - """ - The schema for the input to the task. `null` means all inputs are valid. - """ - tools: list[TaskTool] = [] + url: str """ - Tools defined specifically for this task not included in the Agent itself. + Image URL or base64 data url (e.g. `data:image/jpeg;base64,`) """ - inherit_tools: bool = True + detail: Literal["low", "high", "auto"] = "auto" """ - Whether to inherit tools from the parent agent or not. Defaults to true. + The detail level of the image """ - metadata: dict[str, Any] | None = None -class PromptStep(BaseWorkflowStep): +class LogStep(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - kind_: Literal["prompt"] = "prompt" - prompt: str | list[InputChatMLMessage] + kind_: Annotated[Literal["log"], Field("log", json_schema_extra={"readOnly": True})] """ - The prompt to run + The kind of step """ - settings: Settings | SettingsModel | SettingsModel1 + log: str """ - Settings for the prompt + The value to log """ -class Settings(BaseModel): - """ - Settings for the prompt - """ - +class Main(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - model: Annotated[ - str | None, - Field( - None, - pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", - ), + kind_: Annotated[ + Literal["map_reduce"], Field("map_reduce", json_schema_extra={"readOnly": True}) ] """ - Identifier of the model to be used + The kind of step + """ + over: str """ - stream: bool = False + The variable to iterate over + """ + map: ( + EvaluateStep + | ToolCallStep + | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + ) """ - Indicates if the server should stream the response as it's generated + The steps to run for each iteration """ - stop: Annotated[list[str] | None, Field(None, max_length=4, min_length=1)] + reduce: str | None = None """ - Up to 4 sequences where the API will stop generating further tokens. + The expression to reduce the results. + If not provided, the results are collected and returned as a list. + A special parameter named `results` is the accumulator and `_` is the current value. """ - seed: Annotated[int | None, Field(None, ge=-1, le=1000)] + initial: Any = [] + + +class MainModel(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: str | None = None """ - If specified, the system will make a best effort to sample deterministically for that particular seed value + Discriminator property for BaseWorkflowStep. """ - max_tokens: Annotated[int | None, Field(None, ge=1)] + over: str """ - The maximum number of tokens to generate in the chat completion + The variable to iterate over """ - logit_bias: dict[str, LogitBias] | None = None + map: ( + EvaluateStep + | ToolCallStep + | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + ) """ - Modify the likelihood of specified tokens appearing in the completion + The steps to run for each iteration """ - response_format: CompletionResponseFormat | None = None + reduce: str | None = None """ - Response format (set to `json_object` to restrict output to JSON) + The expression to reduce the results. + If not provided, the results are collected and returned as a list. + A special parameter named `results` is the accumulator and `_` is the current value. """ - agent: UUID | None = None + initial: Any = [] + + +class ParallelStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[ + Literal["parallel"], Field("parallel", json_schema_extra={"readOnly": True}) + ] """ - Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) + The kind of step """ - preset: ( - Literal[ - "problem_solving", - "conversational", - "fun", - "prose", - "creative", - "business", - "deterministic", - "code", - "multilingual", - ] - | None - ) = None + parallel: Annotated[ + list[ + EvaluateStep + | ToolCallStep + | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + ], + Field(max_length=100), + ] """ - Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) + The steps to run in parallel. Max concurrency will depend on the platform. """ -class SettingsModel(BaseModel): +class PatchTaskRequest(BaseModel): """ - Settings for the prompt + Payload for patching a task """ model_config = ConfigDict( populate_by_name=True, ) - model: Annotated[ - str | None, - Field( - None, - pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", - ), - ] - """ - Identifier of the model to be used - """ - stream: bool = False + description: str = "" + main: ( + list[ + EvaluateStep + | ToolCallStep + | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | ReturnStep + | SleepStep + | ErrorWorkflowStep + | YieldStep + | WaitForInputStep + | IfElseWorkflowStep + | SwitchStep + | ForeachStep + | ParallelStep + | MainModel + ] + | None + ) = None """ - Indicates if the server should stream the response as it's generated + The entrypoint of the task. """ - stop: Annotated[list[str] | None, Field(None, max_length=4, min_length=1)] + input_schema: dict[str, Any] | None = None """ - Up to 4 sequences where the API will stop generating further tokens. + The schema for the input to the task. `null` means all inputs are valid. """ - seed: Annotated[int | None, Field(None, ge=-1, le=1000)] + tools: list[TaskTool] = [] """ - If specified, the system will make a best effort to sample deterministically for that particular seed value + Tools defined specifically for this task not included in the Agent itself. """ - max_tokens: Annotated[int | None, Field(None, ge=1)] + inherit_tools: StrictBool = True """ - The maximum number of tokens to generate in the chat completion + Whether to inherit tools from the parent agent or not. Defaults to true. """ - logit_bias: dict[str, LogitBias] | None = None + metadata: dict[str, Any] | None = None + + +class PromptItem(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + role: Literal[ + "user", + "assistant", + "system", + "function", + "function_response", + "function_call", + "auto", + ] """ - Modify the likelihood of specified tokens appearing in the completion + The role of the message """ - response_format: CompletionResponseFormat | None = None + content: list[str] | list[Content | ContentModel] | str """ - Response format (set to `json_object` to restrict output to JSON) + The content parts of the message """ - agent: UUID | None = None + name: str | None = None """ - Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) + Name """ - frequency_penalty: Annotated[float | None, Field(None, ge=-2.0, le=2.0)] + continue_: Annotated[StrictBool | None, Field(None, alias="continue")] """ - Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + Whether to continue this message or return a new one """ - presence_penalty: Annotated[float | None, Field(None, ge=-2.0, le=2.0)] + + +class PromptStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[ + Literal["prompt"], Field("prompt", json_schema_extra={"readOnly": True}) + ] """ - Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + The kind of step """ - temperature: Annotated[float | None, Field(None, ge=0.0, le=5.0)] + prompt: list[PromptItem] | str """ - What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. + The prompt to run """ - top_p: Annotated[float | None, Field(None, ge=0.0, le=1.0)] + settings: ChatSettings | None = None """ - Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. + Settings for the prompt """ -class SettingsModel1(BaseModel): +class ReturnStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[ + Literal["return"], Field("return", json_schema_extra={"readOnly": True}) + ] """ - Settings for the prompt + The kind of step + """ + return_: Annotated[dict[str, str], Field(alias="return")] """ + The value to return + """ + +class SearchStep(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - model: Annotated[ - str | None, - Field( - None, - pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", - ), + kind_: Annotated[ + Literal["search"], Field("search", json_schema_extra={"readOnly": True}) ] """ - Identifier of the model to be used + The kind of step """ - stream: bool = False + search: VectorDocSearchRequest | TextOnlyDocSearchRequest | HybridDocSearchRequest """ - Indicates if the server should stream the response as it's generated + The search query """ - stop: Annotated[list[str] | None, Field(None, max_length=4, min_length=1)] + + +class SetKey(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + key: str """ - Up to 4 sequences where the API will stop generating further tokens. + The key to set """ - seed: Annotated[int | None, Field(None, ge=-1, le=1000)] + value: str + """ + The value to set + """ + + +class SetStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[Literal["set"], Field("set", json_schema_extra={"readOnly": True})] """ - If specified, the system will make a best effort to sample deterministically for that particular seed value + The kind of step """ - max_tokens: Annotated[int | None, Field(None, ge=1)] + set: SetKey """ - The maximum number of tokens to generate in the chat completion + The value to set """ - logit_bias: dict[str, LogitBias] | None = None + + +class SleepFor(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + seconds: Annotated[int, Field(0, ge=0, le=60)] """ - Modify the likelihood of specified tokens appearing in the completion + The number of seconds to sleep for """ - response_format: CompletionResponseFormat | None = None + minutes: Annotated[int, Field(0, ge=0, le=60)] """ - Response format (set to `json_object` to restrict output to JSON) + The number of minutes to sleep for """ - agent: UUID | None = None + hours: Annotated[int, Field(0, ge=0, le=24)] """ - Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) + The number of hours to sleep for """ - repetition_penalty: Annotated[float | None, Field(None, ge=0.0, le=2.0)] + days: Annotated[int, Field(0, ge=0, le=30)] """ - Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + The number of days to sleep for """ - length_penalty: Annotated[float | None, Field(None, ge=0.0, le=2.0)] + + +class SleepStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[ + Literal["sleep"], Field("sleep", json_schema_extra={"readOnly": True}) + ] """ - Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. + The kind of step """ - temperature: Annotated[float | None, Field(None, ge=0.0, le=5.0)] + sleep: SleepFor """ - What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. + The duration to sleep for (max 31 days) """ - top_p: Annotated[float | None, Field(None, ge=0.0, le=1.0)] + + +class SwitchStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[ + Literal["switch"], Field("switch", json_schema_extra={"readOnly": True}) + ] """ - Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. + The kind of step """ - min_p: Annotated[float | None, Field(None, ge=0.0, le=1.0)] + switch: Annotated[list[CaseThen], Field(min_length=1)] """ - Minimum probability compared to leading token to be considered + The cond tree """ @@ -383,11 +634,22 @@ class Task(BaseModel): main: list[ EvaluateStep | ToolCallStep - | YieldStep | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | ReturnStep + | SleepStep | ErrorWorkflowStep + | YieldStep | WaitForInputStep | IfElseWorkflowStep + | SwitchStep + | ForeachStep + | ParallelStep + | Main ] """ The entrypoint of the task. @@ -400,7 +662,7 @@ class Task(BaseModel): """ Tools defined specifically for this task not included in the Agent itself. """ - inherit_tools: bool = True + inherit_tools: StrictBool = True """ Whether to inherit tools from the parent agent or not. Defaults to true. """ @@ -416,42 +678,35 @@ class Task(BaseModel): metadata: dict[str, Any] | None = None -class TaskTool(BaseModel): +class TaskTool(CreateToolRequest): model_config = ConfigDict( populate_by_name=True, ) - inherited: Annotated[bool, Field(False, json_schema_extra={"readOnly": True})] + inherited: Annotated[StrictBool, Field(False, json_schema_extra={"readOnly": True})] """ Read-only: Whether the tool was inherited or not. Only applies within tasks. """ - type: Literal["function", "integration", "system", "api_call"] - """ - Whether this tool is a `function`, `api_call`, `system` etc. (Only `function` tool supported right now) - """ - name: Annotated[str, Field(pattern="^[^\\W0-9]\\w*$")] - """ - Name of the tool (must be unique for this agent and a valid python identifier string ) - """ - function: FunctionDef | None = None - integration: Any | None = None - system: Any | None = None - api_call: Any | None = None -class ToolCallStep(BaseWorkflowStep): +class ToolCallStep(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - kind_: Literal["tool_call"] = "tool_call" + kind_: Annotated[ + Literal["tool_call"], Field("tool_call", json_schema_extra={"readOnly": True}) + ] + """ + The kind of step + """ tool: Annotated[ str, Field(pattern="^(function|integration|system|api_call)\\.(\\w+)$") ] """ The tool to run """ - arguments: dict[str, Any] + arguments: dict[str, str] | Literal["_"] = "_" """ - The input parameters for the tool + The input parameters for the tool (defaults to last step output) """ @@ -467,11 +722,22 @@ class UpdateTaskRequest(BaseModel): main: list[ EvaluateStep | ToolCallStep - | YieldStep | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep + | ReturnStep + | SleepStep | ErrorWorkflowStep + | YieldStep | WaitForInputStep | IfElseWorkflowStep + | SwitchStep + | ForeachStep + | ParallelStep + | Main ] """ The entrypoint of the task. @@ -484,34 +750,56 @@ class UpdateTaskRequest(BaseModel): """ Tools defined specifically for this task not included in the Agent itself. """ - inherit_tools: bool = True + inherit_tools: StrictBool = True """ Whether to inherit tools from the parent agent or not. Defaults to true. """ metadata: dict[str, Any] | None = None -class WaitForInputStep(BaseWorkflowStep): +class WaitForInputInfo(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - kind_: Literal["wait_for_input"] = "wait_for_input" - info: str | dict[str, Any] + info: dict[str, str] """ Any additional info or data """ -class YieldStep(BaseWorkflowStep): +class WaitForInputStep(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - kind_: Literal["yield"] = "yield" + kind_: Annotated[ + Literal["wait_for_input"], + Field("wait_for_input", json_schema_extra={"readOnly": True}), + ] + """ + The kind of step + """ + wait_for_input: WaitForInputInfo + """ + Any additional info or data + """ + + +class YieldStep(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + ) + kind_: Annotated[ + Literal["yield"], Field("yield", json_schema_extra={"readOnly": True}) + ] + """ + The kind of step + """ workflow: str """ - The subworkflow to run + The subworkflow to run. + VALIDATION: Should resolve to a defined subworkflow. """ - arguments: dict[str, str] + arguments: dict[str, str] | Literal["_"] = "_" """ - The input parameters for the subworkflow + The input parameters for the subworkflow (defaults to last step output) """ diff --git a/agents-api/agents_api/autogen/Tools.py b/agents-api/agents_api/autogen/Tools.py index 16fbaeb7b..b28334041 100644 --- a/agents-api/agents_api/autogen/Tools.py +++ b/agents-api/agents_api/autogen/Tools.py @@ -28,45 +28,39 @@ class ChosenToolCall(BaseModel): id: Annotated[UUID, Field(json_schema_extra={"readOnly": True})] -class FunctionCallOption(BaseModel): +class CreateToolRequest(BaseModel): + """ + Payload for creating a tool + """ + model_config = ConfigDict( populate_by_name=True, ) - name: str + type: Literal["function", "integration", "system", "api_call"] """ - The name of the function + Whether this tool is a `function`, `api_call`, `system` etc. (Only `function` tool supported right now) """ - - -class FunctionDef(BaseModel): + name: Annotated[str, Field(max_length=40, pattern="^[^\\W0-9]\\w*$")] """ - Function definition + Name of the tool (must be unique for this agent and a valid python identifier string ) """ + function: FunctionDef | None = None + integration: Any | None = None + system: Any | None = None + api_call: Any | None = None + +class FunctionCallOption(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - name: Annotated[str, Field("overriden", pattern="^[^\\W0-9]\\w*$")] - """ - DO NOT USE: This will be overriden by the tool name. Here only for compatibility reasons. - """ - description: Annotated[ - str | None, - Field( - None, - pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", - ), - ] - """ - Description of the function - """ - parameters: dict[str, Any] + name: str """ - The parameters the function accepts + The name of the function """ -class FunctionDefUpdate(BaseModel): +class FunctionDef(BaseModel): """ Function definition """ @@ -74,7 +68,7 @@ class FunctionDefUpdate(BaseModel): model_config = ConfigDict( populate_by_name=True, ) - name: Annotated[str, Field("overriden", pattern="^[^\\W0-9]\\w*$")] + name: Any | None = None """ DO NOT USE: This will be overriden by the tool name. Here only for compatibility reasons. """ @@ -82,6 +76,7 @@ class FunctionDefUpdate(BaseModel): str | None, Field( None, + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -120,11 +115,11 @@ class PatchToolRequest(BaseModel): """ Whether this tool is a `function`, `api_call`, `system` etc. (Only `function` tool supported right now) """ - name: Annotated[str | None, Field(None, pattern="^[^\\W0-9]\\w*$")] + name: Annotated[str | None, Field(None, max_length=40, pattern="^[^\\W0-9]\\w*$")] """ Name of the tool (must be unique for this agent and a valid python identifier string ) """ - function: FunctionDefUpdate | None = None + function: FunctionDef | None = None integration: Any | None = None system: Any | None = None api_call: Any | None = None @@ -138,7 +133,7 @@ class Tool(BaseModel): """ Whether this tool is a `function`, `api_call`, `system` etc. (Only `function` tool supported right now) """ - name: Annotated[str, Field(pattern="^[^\\W0-9]\\w*$")] + name: Annotated[str, Field(max_length=40, pattern="^[^\\W0-9]\\w*$")] """ Name of the tool (must be unique for this agent and a valid python identifier string ) """ @@ -180,7 +175,7 @@ class UpdateToolRequest(BaseModel): """ Whether this tool is a `function`, `api_call`, `system` etc. (Only `function` tool supported right now) """ - name: Annotated[str, Field(pattern="^[^\\W0-9]\\w*$")] + name: Annotated[str, Field(max_length=40, pattern="^[^\\W0-9]\\w*$")] """ Name of the tool (must be unique for this agent and a valid python identifier string ) """ diff --git a/agents-api/agents_api/autogen/Users.py b/agents-api/agents_api/autogen/Users.py index 4e99f6923..6eeb1783d 100644 --- a/agents-api/agents_api/autogen/Users.py +++ b/agents-api/agents_api/autogen/Users.py @@ -22,6 +22,7 @@ class CreateUserRequest(BaseModel): str, Field( "", + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -47,6 +48,7 @@ class PatchUserRequest(BaseModel): str, Field( "", + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -72,6 +74,7 @@ class UpdateUserRequest(BaseModel): str, Field( "", + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -102,6 +105,7 @@ class User(BaseModel): str, Field( "", + max_length=120, pattern="^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", ), ] @@ -112,3 +116,10 @@ class User(BaseModel): """ About the user """ + + +class CreateOrUpdateUserRequest(CreateUserRequest): + model_config = ConfigDict( + populate_by_name=True, + ) + id: UUID diff --git a/agents-api/agents_api/autogen/openapi_model.py b/agents-api/agents_api/autogen/openapi_model.py index 3d5d3afaa..272b5dae0 100644 --- a/agents-api/agents_api/autogen/openapi_model.py +++ b/agents-api/agents_api/autogen/openapi_model.py @@ -1,9 +1,10 @@ # ruff: noqa: F401, F403, F405 -from typing import Annotated +from typing import Annotated, Any, Generic, Literal, Self, Type, TypeVar from uuid import UUID +from litellm.utils import _select_tokenizer as select_tokenizer +from litellm.utils import token_counter from pydantic import AwareDatetime, Field -from pydantic_partial import create_partial_model from ..common.utils.datetime import utcnow from .Agents import * @@ -18,23 +19,97 @@ from .Tools import * from .Users import * +# Generic models +# -------------- + +DataT = TypeVar("DataT", bound=BaseModel) + + +class ListResponse(BaseModel, Generic[DataT]): + items: list[DataT] + + +# Aliases +# ------- + CreateToolRequest = UpdateToolRequest CreateOrUpdateAgentRequest = UpdateAgentRequest CreateOrUpdateUserRequest = UpdateUserRequest CreateOrUpdateSessionRequest = CreateSessionRequest -CreateOrUpdateTaskRequest = CreateTaskRequest - -CreateTransitionRequest = create_partial_model( - Transition, - # The following fields are optional - "id", - "execution_id", - "created_at", - "updated_at", - "metadata", +ChatResponse = ChunkChatResponse | MessageChatResponse + +# TODO: Figure out wtf... 🤷‍♂️ +MapReduceStep = Main +ChatMLTextContentPart = Content +ChatMLImageContentPart = ContentModel +InputChatMLMessage = Message + + +# Custom types (not generated correctly) +# -------------------------------------- + +# TODO: Remove these when auto-population is fixed + +ChatMLContent = ( + list[ChatMLTextContentPart | ChatMLImageContentPart] + | Tool + | ChosenToolCall + | str + | ToolResponse + | list[ + list[ChatMLTextContentPart | ChatMLImageContentPart] + | Tool + | ChosenToolCall + | str + | ToolResponse + ] ) -ChatMLRole = BaseEntry.model_fields["role"].annotation +ChatMLRole = Literal[ + "user", + "assistant", + "system", + "function", + "function_response", + "function_call", + "auto", +] +assert BaseEntry.model_fields["role"].annotation == ChatMLRole + +ChatMLSource = Literal[ + "api_request", "api_response", "tool_response", "internal", "summarizer", "meta" +] +assert BaseEntry.model_fields["source"].annotation == ChatMLSource + + +ExecutionStatus = Literal[ + "queued", + "starting", + "running", + "awaiting_input", + "succeeded", + "failed", + "cancelled", +] +assert Execution.model_fields["status"].annotation == ExecutionStatus + + +TransitionType = Literal["finish", "wait", "resume", "error", "step", "cancelled"] +assert Transition.model_fields["type"].annotation == TransitionType + + +# Create models +# ------------- + + +class CreateTransitionRequest(Transition): + # The following fields are optional in this + + id: UUID | None = None + execution_id: UUID | None = None + created_at: AwareDatetime | None = None + updated_at: AwareDatetime | None = None + metadata: dict[str, Any] | None = None class CreateEntryRequest(BaseEntry): @@ -42,36 +117,35 @@ class CreateEntryRequest(BaseEntry): float, Field(ge=0.0, default_factory=lambda: utcnow().timestamp()) ] - -def make_session( - *, - agents: list[UUID], - users: list[UUID], - **data: dict, -) -> Session: - """ - Create a new session object. - """ - cls, participants = None, {} - - match (len(agents), len(users)): - case (0, _): - raise ValueError("At least one agent must be provided.") - case (1, 0): - cls = SingleAgentNoUserSession - participants = {"agent": agents[0]} - case (1, 1): - cls = SingleAgentSingleUserSession - participants = {"agent": agents[0], "user": users[0]} - case (1, u) if u > 1: - cls = SingleAgentMultiUserSession - participants = {"agent": agents[0], "users": users} - case _: - cls = MultiAgentMultiUserSession - participants = {"agents": agents, "users": users} - - return cls(**{**data, **participants}) - + @classmethod + def from_model_input( + cls: Type[Self], + model: str, + *, + role: ChatMLRole, + content: ChatMLContent, + name: str | None = None, + source: ChatMLSource, + **kwargs: dict, + ) -> Self: + tokenizer: dict = select_tokenizer(model=model) + token_count = token_counter( + model=model, messages=[{"role": role, "content": content, "name": name}] + ) + + return cls( + role=role, + content=content, + name=name, + source=source, + tokenizer=tokenizer["type"], + token_count=token_count, + **kwargs, + ) + + +# Task related models +# ------------------- WorkflowStep = ( PromptStep @@ -80,6 +154,18 @@ def make_session( | ToolCallStep | ErrorWorkflowStep | IfElseWorkflowStep + | ReturnStep + | SleepStep + | WaitForInputStep + | LogStep + | EmbedStep + | SearchStep + | SetStep + | GetStep + | ForeachStep + | ParallelStep + | SwitchStep + | MapReduceStep ) @@ -102,7 +188,9 @@ class TaskSpec(_Task): model_config = ConfigDict(extra="ignore") workflows: list[Workflow] - main: list[WorkflowStep] | None = None + + # Remove main field from the model + main: None = None class TaskSpecDef(TaskSpec): @@ -136,6 +224,8 @@ class CreateTaskRequest(_CreateTaskRequest): ) +CreateOrUpdateTaskRequest = CreateTaskRequest + _PatchTaskRequest = PatchTaskRequest diff --git a/agents-api/agents_api/clients/cozo.py b/agents-api/agents_api/clients/cozo.py index c9fafe710..e2991c9d8 100644 --- a/agents-api/agents_api/clients/cozo.py +++ b/agents-api/agents_api/clients/cozo.py @@ -1,9 +1,18 @@ +from typing import Any, Dict + from pycozo.client import Client from ..env import cozo_auth, cozo_host +from ..web import app -options = {"host": cozo_host} +options: Dict[str, str] = {"host": cozo_host} if cozo_auth: options.update({"auth": cozo_auth}) -client = Client("http", options=options) + +def get_cozo_client() -> Any: + client = getattr(app.state, "cozo_client", Client("http", options=options)) + if not hasattr(app.state, "cozo_client"): + app.state.cozo_client = client + + return client diff --git a/agents-api/agents_api/clients/embed.py b/agents-api/agents_api/clients/embed.py index 1176585a8..b9412f485 100644 --- a/agents-api/agents_api/clients/embed.py +++ b/agents-api/agents_api/clients/embed.py @@ -1,12 +1,12 @@ import httpx -from ..env import embedding_model_id, embedding_service_url, truncate_embed_text +from ..env import embedding_model_id, embedding_service_base, truncate_embed_text async def embed( inputs: list[str], join_inputs=False, - embedding_service_url: str = embedding_service_url, + embedding_service_url: str = embedding_service_base + "/embed", embedding_model_name: str = embedding_model_id, ) -> list[list[float]]: async with httpx.AsyncClient(timeout=30) as client: @@ -17,7 +17,7 @@ async def embed( }, json={ "inputs": "\n\n".join(inputs) if join_inputs else inputs, - "normalize": True, + # # FIXME: We should control the truncation ourselves and truncate before sending "truncate": truncate_embed_text, "model_id": embedding_model_name, diff --git a/agents-api/agents_api/clients/litellm.py b/agents-api/agents_api/clients/litellm.py new file mode 100644 index 000000000..4c78e2876 --- /dev/null +++ b/agents-api/agents_api/clients/litellm.py @@ -0,0 +1,21 @@ +from functools import wraps +from typing import List, TypeVar + +from litellm import acompletion as _acompletion +from litellm.utils import CustomStreamWrapper, ModelResponse + +from ..env import litellm_master_key, litellm_url + +_RWrapped = TypeVar("_RWrapped") + +__all__: List[str] = ["acompletion"] + + +@wraps(_acompletion) +async def acompletion(*, model: str, **kwargs) -> ModelResponse | CustomStreamWrapper: + return await _acompletion( + model=f"openai/{model}", # This is here because litellm proxy expects this format + **kwargs, + api_base=litellm_url, + api_key=litellm_master_key, + ) diff --git a/agents-api/agents_api/clients/model.py b/agents-api/agents_api/clients/model.py deleted file mode 100644 index 648cc8d57..000000000 --- a/agents-api/agents_api/clients/model.py +++ /dev/null @@ -1,10 +0,0 @@ -from openai import AsyncOpenAI - -from ..env import model_api_key, model_inference_url, openai_api_key - -openai_client = AsyncOpenAI(api_key=openai_api_key) - -julep_client = AsyncOpenAI( - base_url=model_inference_url, - api_key=model_api_key, -) diff --git a/agents-api/agents_api/clients/temporal.py b/agents-api/agents_api/clients/temporal.py index ad9205056..f271509b0 100644 --- a/agents-api/agents_api/clients/temporal.py +++ b/agents-api/agents_api/clients/temporal.py @@ -1,19 +1,25 @@ +from datetime import timedelta from uuid import UUID from temporalio.client import Client, TLSConfig -from agents_api.env import ( +from ..autogen.openapi_model import TransitionTarget +from ..common.protocol.tasks import ExecutionInput +from ..env import ( temporal_client_cert, temporal_namespace, temporal_private_key, + temporal_task_queue, temporal_worker_url, ) - -from ..common.protocol.tasks import ExecutionInput from ..worker.codec import pydantic_data_converter -async def get_client(): +async def get_client( + worker_url: str = temporal_worker_url, + namespace: str = temporal_namespace, + data_converter=pydantic_data_converter, +): tls_config = False if temporal_private_key and temporal_client_cert: @@ -23,61 +29,29 @@ async def get_client(): ) return await Client.connect( - temporal_worker_url, - namespace=temporal_namespace, + worker_url, + namespace=namespace, tls=tls_config, - data_converter=pydantic_data_converter, - ) - - -async def run_summarization_task(session_id: UUID, job_id: UUID): - client = await get_client() - - await client.execute_workflow( - "SummarizationWorkflow", - args=[str(session_id)], - task_queue="memory-task-queue", - id=str(job_id), - ) - - -async def run_embed_docs_task( - doc_id: UUID, title: str, content: list[str], job_id: UUID -): - client = await get_client() - - await client.execute_workflow( - "EmbedDocsWorkflow", - args=[str(doc_id), title, content], - task_queue="memory-task-queue", - id=str(job_id), - ) - - -async def run_truncation_task( - token_count_threshold: int, session_id: UUID, job_id: UUID -): - client = await get_client() - - await client.execute_workflow( - "TruncationWorkflow", - args=[str(session_id), token_count_threshold], - task_queue="memory-task-queue", - id=str(job_id), + data_converter=data_converter, ) async def run_task_execution_workflow( + *, execution_input: ExecutionInput, job_id: UUID, - start: tuple[str, int] = ("main", 0), + start: TransitionTarget = TransitionTarget(workflow="main", step=0), previous_inputs: list[dict] = [], + client: Client | None = None, ): - client = await get_client() + from ..workflows.task_execution import TaskExecutionWorkflow + + client = client or (await get_client()) - await client.execute_workflow( - "TaskExecutionWorkflow", + return await client.start_workflow( + TaskExecutionWorkflow.run, args=[execution_input, start, previous_inputs], - task_queue="memory-task-queue", + task_queue=temporal_task_queue, id=str(job_id), + run_timeout=timedelta(days=31), ) diff --git a/agents-api/agents_api/clients/worker/types.py b/agents-api/agents_api/clients/worker/types.py index 02b6add6c..3bf063083 100644 --- a/agents-api/agents_api/clients/worker/types.py +++ b/agents-api/agents_api/clients/worker/types.py @@ -1,108 +1,41 @@ -from typing import Callable, Literal, Optional, Protocol +from typing import Literal from uuid import UUID from pydantic import BaseModel from agents_api.autogen.openapi_model import ( - ChatMLImageContentPart, - ChatMLTextContentPart, + InputChatMLMessage, ) -class PromptModule(Protocol): - stop: list[str] - temperature: float - parser: Callable[[str], str] - make_prompt: Callable[..., str] - - -class ChatML(BaseModel): - role: Literal["system", "user", "assistant", "function_call"] - content: str | dict | list[ChatMLTextContentPart] | list[ChatMLImageContentPart] - - name: Optional[str] = None - entry_id: Optional[UUID] = None - - processed: bool = False - parent_id: Optional[UUID] = None - session_id: Optional[UUID] = None - timestamp: Optional[float] = None - token_count: Optional[int] = None - - -class BaseTask(BaseModel): ... - - -class BaseTaskArgs(BaseModel): ... - - -class MemoryManagementTaskArgs(BaseTaskArgs): +class MemoryManagementTaskArgs(BaseModel): session_id: UUID model: str - dialog: list[ChatML] + dialog: list[InputChatMLMessage] previous_memories: list[str] = [] -class MemoryManagementTask(BaseTask): +class MemoryManagementTask(BaseModel): name: Literal["memory_management.v1"] args: MemoryManagementTaskArgs -class MemoryDensityTaskArgs(BaseTaskArgs): +class MemoryDensityTaskArgs(BaseModel): memory: str -class MemoryDensityTask(BaseTask): +class MemoryDensityTask(BaseModel): name: Literal["memory_density.v1"] args: MemoryDensityTaskArgs -class MemoryRatingTaskArgs(BaseTaskArgs): +class MemoryRatingTaskArgs(BaseModel): memory: str -class MemoryRatingTask(BaseTask): +class MemoryRatingTask(BaseModel): name: Literal["memory_rating.v1"] args: MemoryRatingTaskArgs -class DialogInsightsTaskArgs(BaseTaskArgs): - dialog: list[ChatML] - person1: str - person2: str - - -class DialogInsightsTask(BaseTask): - name: Literal["dialog_insights.v1"] - args: DialogInsightsTaskArgs - - -class RelationshipSummaryTaskArgs(BaseTaskArgs): - statements: list[str] - person1: str - person2: str - - -class RelationshipSummaryTask(BaseTask): - name: Literal["relationship_summary.v1"] - args: RelationshipSummaryTaskArgs - - -class SalientQuestionsTaskArgs(BaseTaskArgs): - statements: list[str] - num: int = 3 - - -class SalientQuestionsTask(BaseTask): - name: Literal["salient_questions.v1"] - args: SalientQuestionsTaskArgs - - -CombinedTask = ( - MemoryManagementTask - | MemoryDensityTask - | MemoryRatingTask - | DialogInsightsTask - | RelationshipSummaryTask - | SalientQuestionsTask -) +CombinedTask = MemoryManagementTask | MemoryDensityTask | MemoryRatingTask diff --git a/agents-api/agents_api/clients/worker/worker.py b/agents-api/agents_api/clients/worker/worker.py index d7bcacc2b..1deb8d1c3 100644 --- a/agents-api/agents_api/clients/worker/worker.py +++ b/agents-api/agents_api/clients/worker/worker.py @@ -1,6 +1,6 @@ import httpx -from agents_api.env import worker_url +from agents_api.env import temporal_worker_url from .types import ( MemoryManagementTask, @@ -16,7 +16,7 @@ async def add_summarization_task(data: MemoryManagementTaskArgs): ) await client.post( - f"{worker_url}/task", + f"{temporal_worker_url}/task", headers={"Content-Type": "json"}, data=data.model_dump_json(), ) diff --git a/agents-api/agents_api/common/exceptions/__init__.py b/agents-api/agents_api/common/exceptions/__init__.py index fa0016b4e..a38a546a2 100644 --- a/agents-api/agents_api/common/exceptions/__init__.py +++ b/agents-api/agents_api/common/exceptions/__init__.py @@ -11,6 +11,6 @@ class BaseCommonException(Exception): - def __init__(self, msg: str, http_code: int): + def __init__(self, msg: str, http_code: int) -> None: super().__init__(msg) self.http_code = http_code diff --git a/agents-api/agents_api/common/protocol/developers.py b/agents-api/agents_api/common/protocol/developers.py new file mode 100644 index 000000000..4c1cfdac4 --- /dev/null +++ b/agents-api/agents_api/common/protocol/developers.py @@ -0,0 +1,22 @@ +""" +This module defines session-related data structures and settings used across the agents API. +It includes definitions for session settings and session data models. +""" + +from uuid import UUID + +from pydantic import AwareDatetime, BaseModel, EmailStr + + +class Developer(BaseModel): + """ + Represents the data associated with a developer + """ + + id: UUID + email: EmailStr + active: bool + tags: list[str] + settings: dict + created_at: AwareDatetime + created_at: AwareDatetime diff --git a/agents-api/agents_api/common/protocol/entries.py b/agents-api/agents_api/common/protocol/entries.py deleted file mode 100644 index 6ef7f70f2..000000000 --- a/agents-api/agents_api/common/protocol/entries.py +++ /dev/null @@ -1,54 +0,0 @@ -import json -from typing import Literal -from uuid import UUID - -from pydantic import Field, computed_field - -from ...autogen.openapi_model import ( - ChatMLImageContentPart, - ChatMLTextContentPart, -) -from ...autogen.openapi_model import ( - Entry as BaseEntry, -) - -EntrySource = Literal["api_request", "api_response", "internal", "summarizer"] -Tokenizer = Literal["character_count"] - - -LOW_IMAGE_TOKEN_COUNT = 85 -HIGH_IMAGE_TOKEN_COUNT = 85 + 4 * 170 - - -class Entry(BaseEntry): - """Represents an entry in the system, encapsulating all necessary details such as ID, session ID, source, role, and content among others.""" - - session_id: UUID - token_count: int - tokenizer: str = Field(default="character_count") - - @computed_field - @property - def token_count(self) -> int: - """Calculates the token count based on the content's character count. The tokenizer 'character_count' divides the length of the content by 3.5 to estimate the token count. Raises NotImplementedError for unknown tokenizers.""" - if self.tokenizer == "character_count": - content_length = 0 - if isinstance(self.content, str): - content_length = len(self.content) - elif isinstance(self.content, dict): - content_length = len(json.dumps(self.content)) - elif isinstance(self.content, list): - for part in self.content: - if isinstance(part, ChatMLTextContentPart): - content_length += len(part.text) - elif isinstance(part, ChatMLImageContentPart): - content_length += ( - LOW_IMAGE_TOKEN_COUNT - if part.image_url.detail == "low" - else HIGH_IMAGE_TOKEN_COUNT - ) - - # Divide the content length by 3.5 to estimate token count based on character count. - return int(content_length // 3.5) - - raise NotImplementedError(f"Unknown tokenizer: {self.tokenizer}") diff --git a/agents-api/agents_api/common/protocol/sessions.py b/agents-api/agents_api/common/protocol/sessions.py index 6973d61ad..1e98f7f12 100644 --- a/agents-api/agents_api/common/protocol/sessions.py +++ b/agents-api/agents_api/common/protocol/sessions.py @@ -3,19 +3,21 @@ It includes definitions for session settings and session data models. """ -from typing import Optional +from uuid import UUID from pydantic import BaseModel from ...autogen.openapi_model import ( Agent, - Entry, - GenerationPresetSettings, - OpenAISettings, + ChatInput, + ChatSettings, + MultiAgentMultiUserSession, Session, + SingleAgentMultiUserSession, + SingleAgentNoUserSession, + SingleAgentSingleUserSession, Tool, User, - VLLMSettings, ) from .agents import AgentDefaultSettings @@ -37,7 +39,12 @@ class SessionData(BaseModel): session: Session agents: list[Agent] users: list[User] = [] - settings: Optional[GenerationPresetSettings | OpenAISettings | VLLMSettings] = None + settings: ChatSettings | None = None + + +class Toolset(BaseModel): + agent_id: UUID + tools: list[Tool] class ChatContext(SessionData): @@ -45,5 +52,98 @@ class ChatContext(SessionData): Represents the data associated with a context, including for agents, and users. """ - entries: list[Entry] - tools: list[Tool] + toolsets: list[Toolset] + + def get_active_agent(self) -> Agent: + """ + Get the active agent from the session data. + """ + requested_agent: UUID | None = self.settings and self.settings.agent + + if requested_agent: + assert requested_agent in [agent.id for agent in self.agents], ( + f"Agent {requested_agent} not found in session agents: " + f"{[agent.id for agent in self.agents]}" + ) + + return next(agent for agent in self.agents if agent.id == requested_agent) + + return self.agents[0] + + def merge_settings(self, chat_input: ChatInput) -> ChatSettings: + request_settings = chat_input.model_dump(exclude_unset=True) + active_agent = self.get_active_agent() + + default_settings: AgentDefaultSettings | None = active_agent.default_settings + default_settings: dict = ( + default_settings and default_settings.model_dump() or {} + ) + + self.settings = settings = ChatSettings( + **{ + "model": active_agent.model, + **default_settings, + **request_settings, + } + ) + + return settings + + def get_active_tools(self) -> list[Tool]: + """ + Get the active toolset from the session data. + """ + active_agent = self.get_active_agent() + active_toolset = next( + toolset for toolset in self.toolsets if toolset.agent_id == active_agent.id + ) + + return active_toolset.tools + + def get_chat_environment(self) -> dict[str, dict | list[dict]]: + """ + Get the chat environment from the session data. + """ + current_agent = self.get_active_agent() + tools = self.get_active_tools() + settings: ChatSettings | None = self.settings + settings: dict = settings and settings.model_dump() or {} + + return { + "session": self.session.model_dump(), + "agents": [agent.model_dump() for agent in self.agents], + "current_agent": current_agent.model_dump(), + "users": [user.model_dump() for user in self.users], + "settings": settings, + "tools": [tool.model_dump() for tool in tools], + } + + +def make_session( + *, + agents: list[UUID], + users: list[UUID], + **data: dict, +) -> Session: + """ + Create a new session object. + """ + cls, participants = None, {} + + match (len(agents), len(users)): + case (0, _): + raise ValueError("At least one agent must be provided.") + case (1, 0): + cls = SingleAgentNoUserSession + participants = {"agent": agents[0]} + case (1, 1): + cls = SingleAgentSingleUserSession + participants = {"agent": agents[0], "user": users[0]} + case (1, u) if u > 1: + cls = SingleAgentMultiUserSession + participants = {"agent": agents[0], "users": users} + case _: + cls = MultiAgentMultiUserSession + participants = {"agents": agents, "users": users} + + return cls(**{**data, **participants}) diff --git a/agents-api/agents_api/common/protocol/tasks.py b/agents-api/agents_api/common/protocol/tasks.py index 850aa700a..d0c335e95 100644 --- a/agents-api/agents_api/common/protocol/tasks.py +++ b/agents-api/agents_api/common/protocol/tasks.py @@ -1,11 +1,13 @@ -from typing import Annotated, Any, List, Tuple +from typing import Annotated, Any, Type from uuid import UUID -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, computed_field +from pydantic_partial import create_partial_model from ...autogen.openapi_model import ( Agent, CreateTaskRequest, + CreateTransitionRequest, Execution, PartialTaskSpecDef, PatchTaskRequest, @@ -15,42 +17,103 @@ TaskSpecDef, TaskToolDef, Tool, + TransitionTarget, + TransitionType, UpdateTaskRequest, User, Workflow, WorkflowStep, ) +### NOTE: Here, "init" is NOT a real state, but a placeholder for the start state of the state machine +valid_transitions = { + # Start state + "init": ["wait", "error", "step", "cancelled"], + # End states + "finish": [], + "error": [], + "cancelled": [], + # Intermediate states + "wait": ["resume", "error", "cancelled"], + "resume": ["wait", "error", "step", "finish", "cancelled"], + "step": ["wait", "error", "step", "finish", "cancelled"], +} + +valid_previous_statuses = { + "running": ["queued", "starting", "awaiting_input"], + "cancelled": ["queued", "starting", "awaiting_input", "running"], +} + +transition_to_execution_status = { + "init": "queued", + "wait": "awaiting_input", + "resume": "running", + "step": "running", + "finish": "succeeded", + "error": "failed", + "cancelled": "cancelled", +} + + +PendingTransition: Type[BaseModel] = create_partial_model(CreateTransitionRequest) + class ExecutionInput(BaseModel): developer_id: UUID execution: Execution - task: TaskSpec + task: TaskSpecDef agent: Agent tools: list[Tool] arguments: dict[str, Any] + + # Not used at the moment user: User | None = None session: Session | None = None -class StepContext(ExecutionInput): - definition: WorkflowStep - inputs: list[dict[str, Any]] +class StepContext(BaseModel): + execution_input: ExecutionInput + inputs: list[Any] + cursor: TransitionTarget + + @computed_field + @property + def outputs(self) -> Annotated[list[dict[str, Any]], Field(exclude=True)]: + return self.inputs[1:] + + @computed_field + @property + def current_input(self) -> Annotated[dict[str, Any], Field(exclude=True)]: + return self.inputs[-1] + + @computed_field + @property + def current_workflow(self) -> Annotated[Workflow, Field(exclude=True)]: + workflows: list[Workflow] = self.execution_input.task.workflows + return next(wf for wf in workflows if wf.name == self.cursor.workflow) + + @computed_field + @property + def current_step(self) -> Annotated[WorkflowStep, Field(exclude=True)]: + step = self.current_workflow.steps[self.cursor.step] + return step + + @computed_field + @property + def is_last_step(self) -> Annotated[bool, Field(exclude=True)]: + return (self.cursor.step + 1) == len(self.current_workflow.steps) def model_dump(self, *args, **kwargs) -> dict[str, Any]: dump = super().model_dump(*args, **kwargs) - - dump["_"] = self.inputs[-1] - dump["outputs"] = self.inputs[1:] + dump["_"] = self.current_input return dump -class TransitionInfo(BaseModel): - from_: Tuple[str, int] - to: List[str | int] | None = None - type: Annotated[str, Field(pattern="^(finish|wait|error|step)$")] - outputs: dict[str, Any] | None = None +class StepOutcome(BaseModel): + error: str | None = None + output: Any = None + transition_to: tuple[TransitionType, TransitionTarget] | None = None def task_to_spec( diff --git a/agents-api/agents_api/common/utils/cozo.py b/agents-api/agents_api/common/utils/cozo.py index db8f336f0..a8195a1ba 100644 --- a/agents-api/agents_api/common/utils/cozo.py +++ b/agents-api/agents_api/common/utils/cozo.py @@ -8,7 +8,7 @@ from pycozo import Client # Define a mock client for testing purposes, simulating Cozo API client behavior. -_fake_client = SimpleNamespace() +_fake_client: SimpleNamespace = SimpleNamespace() # Lambda function to process and mutate data dictionaries using the Cozo client's internal method. This is a workaround to access protected member functions for testing. _fake_client._process_mutate_data_dict = lambda data: ( Client._process_mutate_data_dict(_fake_client, data) @@ -20,5 +20,5 @@ ) -def uuid_int_list_to_uuid4(data): +def uuid_int_list_to_uuid4(data) -> UUID: return UUID(bytes=b"".join([i.to_bytes(1, "big") for i in data])) diff --git a/agents-api/agents_api/common/utils/debug.py b/agents-api/agents_api/common/utils/debug.py index c6e73f263..0be9eabb4 100644 --- a/agents-api/agents_api/common/utils/debug.py +++ b/agents-api/agents_api/common/utils/debug.py @@ -6,7 +6,9 @@ def pdb_on_exception(fn): def wrapper(*args, **kwargs): try: return fn(*args, **kwargs) - except Exception: + except Exception as exc: + print(repr(getattr(exc, "__cause__", exc))) + import pdb import traceback diff --git a/agents-api/agents_api/common/utils/json.py b/agents-api/agents_api/common/utils/json.py index 9beff6049..3157af9c8 100644 --- a/agents-api/agents_api/common/utils/json.py +++ b/agents-api/agents_api/common/utils/json.py @@ -10,7 +10,7 @@ class CustomJSONEncoder(json.JSONEncoder): """A custom JSON encoder subclass that handles None values and UUIDs for JSON serialization. It allows specifying a default value for None objects during initialization.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: """Initializes the custom JSON encoder. Parameters: *args: Variable length argument list. @@ -19,7 +19,7 @@ def __init__(self, *args, **kwargs): self._default_empty_value = kwargs.pop("default_empty_value") super().__init__(*args, **kwargs) - def encode(self, o): + def encode(self, o) -> str: """Encodes the given object into a JSON formatted string. Parameters: o: The object to encode. @@ -27,7 +27,7 @@ def encode(self, o): # Use the overridden default method for serialization before encoding return super().encode(self.default(o)) - def default(self, obj): + def default(self, obj) -> Any: """Provides a default serialization for objects that the standard JSON encoder cannot serialize. Parameters: obj: The object to serialize. diff --git a/agents-api/agents_api/common/utils/template.py b/agents-api/agents_api/common/utils/template.py index 0a1a29c73..35ae2c350 100644 --- a/agents-api/agents_api/common/utils/template.py +++ b/agents-api/agents_api/common/utils/template.py @@ -1,14 +1,16 @@ +from typing import List + import arrow from jinja2.sandbox import ImmutableSandboxedEnvironment from jinja2schema import infer, to_json_schema from jsonschema import validate -__all__ = [ +__all__: List[str] = [ "render_template", ] # jinja environment -jinja_env = ImmutableSandboxedEnvironment( +jinja_env: ImmutableSandboxedEnvironment = ImmutableSandboxedEnvironment( autoescape=False, trim_blocks=True, lstrip_blocks=True, @@ -40,13 +42,39 @@ async def render_template_string( return rendered +async def render_template_chatml( + messages: list[dict], variables: dict, check: bool = False +) -> list[dict]: + # Parse template + # FIXME: should template_strings contain a list of ChatMLTextContentPart? Should we handle it somehow? + templates = [jinja_env.from_string(msg["content"]) for msg in messages] + + # If check is required, get required vars from template and validate variables + if check: + for template in templates: + schema = to_json_schema(infer(template)) + validate(instance=variables, schema=schema) + + # Render + rendered = [ + ({**msg, "content": await template.render_async(**variables)}) + for template, msg in zip(templates, messages) + ] + + return rendered + + async def render_template_parts( template_strings: list[dict], variables: dict, check: bool = False ) -> list[dict]: # Parse template # FIXME: should template_strings contain a list of ChatMLTextContentPart? Should we handle it somehow? templates = [ - (jinja_env.from_string(msg["text"]) if msg["type"] == "text" else None) + ( + jinja_env.from_string(msg["content"]["text"]) + if msg["content"]["type"] == "text" + else None + ) for msg in template_strings ] @@ -73,7 +101,7 @@ async def render_template_parts( async def render_template( - template_string: str | list[dict], + input: str | list[dict], variables: dict, check: bool = False, skip_vars: list[str] | None = None, @@ -83,11 +111,15 @@ async def render_template( for name, val in variables.items() if not (skip_vars is not None and isinstance(name, str) and name in skip_vars) } - if isinstance(template_string, str): - return await render_template_string(template_string, variables, check) - elif isinstance(template_string, list): - return await render_template_parts(template_string, variables, check) + match input: + case str(): + future = render_template_string(input, variables, check) + + case [{"content": str()}, *_]: + future = render_template_chatml(input, variables, check) + + case _: + future = render_template_parts(input, variables, check) - else: - raise ValueError("template_string should be str or list[dict]") + return await future diff --git a/agents-api/agents_api/common/utils/types.py b/agents-api/agents_api/common/utils/types.py new file mode 100644 index 000000000..6bf9cd502 --- /dev/null +++ b/agents-api/agents_api/common/utils/types.py @@ -0,0 +1,22 @@ +from typing import Type + +from beartype.vale import Is +from beartype.vale._core._valecore import BeartypeValidator +from pydantic import BaseModel + + +def dict_like(pydantic_model_class: Type[BaseModel]) -> BeartypeValidator: + required_fields_set: set[str] = set( + [ + field + for field, info in pydantic_model_class.model_fields.items() + if info.is_required() + ] + ) + + validator = Is[ + lambda x: isinstance(x, pydantic_model_class) + or required_fields_set.issubset(set(x.keys())) + ] + + return validator diff --git a/agents-api/agents_api/dependencies/auth.py b/agents-api/agents_api/dependencies/auth.py index 0054cb1cc..e5e22995b 100644 --- a/agents-api/agents_api/dependencies/auth.py +++ b/agents-api/agents_api/dependencies/auth.py @@ -1,13 +1,18 @@ +from typing import Any + from fastapi import HTTPException, Security from fastapi.security.api_key import APIKeyHeader from starlette.status import HTTP_403_FORBIDDEN from ..env import api_key, api_key_header_name -api_key_header = APIKeyHeader(name=api_key_header_name, auto_error=False) +api_key_header: Any = APIKeyHeader(name=api_key_header_name, auto_error=False) -async def get_api_key(user_api_key: str = Security(api_key_header)): +async def get_api_key( + user_api_key: str = Security(api_key_header), +) -> str: + user_api_key = str(user_api_key) user_api_key = (user_api_key or "").replace("Bearer ", "").strip() if user_api_key != api_key: diff --git a/agents-api/agents_api/dependencies/developer_id.py b/agents-api/agents_api/dependencies/developer_id.py index a088389b5..af4aa04de 100644 --- a/agents-api/agents_api/dependencies/developer_id.py +++ b/agents-api/agents_api/dependencies/developer_id.py @@ -1,44 +1,49 @@ -import uuid from typing import Annotated +from uuid import UUID from fastapi import Header -from pydantic import validate_email -from pydantic_core import PydanticCustomError +from ..common.protocol.developers import Developer from ..env import skip_check_developer_headers +from ..models.developer.get_developer import get_developer, verify_developer from .exceptions import InvalidHeaderFormat async def get_developer_id( - x_developer_id: Annotated[uuid.UUID | None, Header()] = None, -): + x_developer_id: Annotated[UUID | None, Header()] = None, +) -> UUID: if skip_check_developer_headers: - return x_developer_id or uuid.UUID("00000000-0000-0000-0000-000000000000") + return x_developer_id or UUID("00000000-0000-0000-0000-000000000000") if not x_developer_id: - raise InvalidHeaderFormat("X-Developer-Id header invalid") + raise InvalidHeaderFormat("X-Developer-Id header required") if isinstance(x_developer_id, str): try: - x_developer_id = uuid.UUID(x_developer_id, version=4) - except ValueError: - raise InvalidHeaderFormat("X-Developer-Id must be a valid UUID") + x_developer_id = UUID(x_developer_id, version=4) + except ValueError as e: + raise InvalidHeaderFormat("X-Developer-Id must be a valid UUID") from e + + verify_developer(developer_id=x_developer_id) return x_developer_id -async def get_developer_email( - x_developer_email: Annotated[str | None, Header()] = None, -): +async def get_developer_data( + x_developer_id: Annotated[UUID | None, Header()] = None, +) -> Developer: if skip_check_developer_headers: - return x_developer_email or "unknown_user@mail.com" + x_developer_id = x_developer_id or UUID("00000000-0000-0000-0000-000000000000") + + if not x_developer_id: + raise InvalidHeaderFormat("X-Developer-Id header required") - if not x_developer_email: - raise InvalidHeaderFormat("X-Developer-Email header invalid") + if isinstance(x_developer_id, str): + try: + x_developer_id = UUID(x_developer_id, version=4) + except ValueError as e: + raise InvalidHeaderFormat("X-Developer-Id must be a valid UUID") from e - try: - validate_email(x_developer_email) - except PydanticCustomError: - raise InvalidHeaderFormat("X-Developer-Email header invalid") + developer = get_developer(developer_id=x_developer_id) - return x_developer_email + return developer diff --git a/agents-api/agents_api/embed_models_registry.py b/agents-api/agents_api/embed_models_registry.py deleted file mode 100644 index fe125fefb..000000000 --- a/agents-api/agents_api/embed_models_registry.py +++ /dev/null @@ -1,161 +0,0 @@ -from dataclasses import dataclass -from typing import Any, TypedDict - -import numpy as np -import tiktoken -from tokenizers import Tokenizer - -from agents_api.clients.embed import embed -from agents_api.clients.model import openai_client -from agents_api.env import embedding_service_url -from agents_api.exceptions import ( - ModelNotSupportedError, - PromptTooBigError, - UnknownTokenizerError, -) - - -def normalize_l2(x): - x = np.array(x) - if x.ndim == 1: - norm = np.linalg.norm(x) - if norm == 0: - return x - return x / norm - else: - norm = np.linalg.norm(x, 2, axis=1, keepdims=True) - return np.where(norm == 0, x, x / norm) - - -class EmbeddingInput(TypedDict): - instruction: str | None - text: str - - -@dataclass -class EmbeddingModel: - embedding_service_url: str | None - embedding_provider: str - embedding_model_name: str - original_embedding_dimensions: int - output_embedding_dimensions: int - context_window: int - tokenizer: Any - - @classmethod - def from_model_name(cls, model_name: str): - try: - return _embedding_model_registry[model_name] - except KeyError: - raise ModelNotSupportedError(model_name) - - def _token_count(self, text: str) -> int: - tokenize = getattr(self.tokenizer, "tokenize", None) - if tokenize: - return len(tokenize(text)) - - encode = getattr(self.tokenizer, "encode", None) - if encode: - return len(encode(text)) - - raise UnknownTokenizerError - - def preprocess(self, inputs: list[EmbeddingInput]) -> list[str]: - """Maybe use this function from embed() to truncate (if needed) or raise an error""" - result: list[str] = [] - - for i in inputs: - instruction = i.get("instruction", "") - sep = " " if len(instruction) else "" - result.append(f"{instruction}{sep}{i['text']}") - - token_count = self._token_count(" ".join(result)) - if token_count > self.context_window: - raise PromptTooBigError(token_count, self.context_window) - - return result - - async def embed( - self, inputs: list[EmbeddingInput] - ) -> list[np.ndarray | list[float]]: - input = self.preprocess(inputs) - embeddings: list[np.ndarray | list[float]] = [] - - if self.embedding_provider == "julep": - embeddings = await embed( - input, - embedding_service_url=self.embedding_service_url - or embedding_service_url, - embedding_model_name=self.embedding_model_name, - ) - elif self.embedding_provider == "openai": - embeddings = ( - await openai_client.embeddings.create( - input=input, model=self.embedding_model_name - ) - .data[0] - .embedding - ) - - return self.normalize(embeddings) - - def normalize( - self, embeddings: list[np.ndarray | list[float]] - ) -> list[np.ndarray | list[float]]: - return [ - ( - e - if len(e) <= self.original_embedding_dimensions - else normalize_l2(e[: self.original_embedding_dimensions]) - ) - for e in embeddings - ] - - -_embedding_model_registry = { - "text-embedding-3-small": EmbeddingModel( - embedding_service_url=None, - embedding_provider="openai", - embedding_model_name="text-embedding-3-small", - original_embedding_dimensions=1024, - output_embedding_dimensions=1024, - context_window=8192, - tokenizer=tiktoken.encoding_for_model("text-embedding-3-small"), - ), - "text-embedding-3-large": EmbeddingModel( - embedding_service_url=None, - embedding_provider="openai", - embedding_model_name="text-embedding-3-large", - original_embedding_dimensions=1024, - output_embedding_dimensions=1024, - context_window=8192, - tokenizer=tiktoken.encoding_for_model("text-embedding-3-large"), - ), - "Alibaba-NLP/gte-large-en-v1.5": EmbeddingModel( - embedding_service_url=embedding_service_url, - embedding_provider="julep", - embedding_model_name="Alibaba-NLP/gte-large-en-v1.5", - original_embedding_dimensions=1024, - output_embedding_dimensions=1024, - context_window=8192, - tokenizer=Tokenizer.from_pretrained("Alibaba-NLP/gte-large-en-v1.5"), - ), - "BAAI/bge-m3": EmbeddingModel( - embedding_service_url=embedding_service_url, - embedding_provider="julep", - embedding_model_name="BAAI/bge-m3", - original_embedding_dimensions=1024, - output_embedding_dimensions=1024, - context_window=8192, - tokenizer=Tokenizer.from_pretrained("BAAI/bge-m3"), - ), - "BAAI/llm-embedder": EmbeddingModel( - embedding_service_url=embedding_service_url, - embedding_provider="julep", - embedding_model_name="BAAI/llm-embedder", - original_embedding_dimensions=1024, - output_embedding_dimensions=1024, - context_window=8192, - tokenizer=Tokenizer.from_pretrained("BAAI/llm-embedder"), - ), -} diff --git a/agents-api/agents_api/env.py b/agents-api/agents_api/env.py index 7fa4fe94c..64d9082ef 100644 --- a/agents-api/agents_api/env.py +++ b/agents-api/agents_api/env.py @@ -3,82 +3,101 @@ It utilizes the environs library for environment variable parsing. """ +import random from pprint import pprint +from typing import Any, Dict from environs import Env # Initialize the Env object for environment variable parsing. -env = Env() -env.read_env() +env: Any = Env() -# Debug mode + +# Debug +# ----- debug: bool = env.bool("AGENTS_API_DEBUG", default=False) +testing: bool = env.bool("AGENTS_API_TESTING", default=False) +sentry_dsn: str = env.str("SENTRY_DSN", default=None) -# Base URL for the COZO service. Defaults to the local development URL if not specified. + +# Cozo +# ---- cozo_host: str = env.str("COZO_HOST", default="http://127.0.0.1:9070") cozo_auth: str = env.str("COZO_AUTH_TOKEN", default=None) -model_api_key: str = env.str("MODEL_API_KEY", default=None) -model_inference_url: str = env.str("MODEL_INFERENCE_URL", default=None) -openai_api_key: str = env.str("OPENAI_API_KEY", default="") summarization_model_name: str = env.str( "SUMMARIZATION_MODEL_NAME", default="gpt-4-turbo" ) -worker_url: str = env.str("WORKER_URL", default=None) -sentry_dsn: str = env.str("SENTRY_DSN", default=None) -# Temporal -temporal_endpoint = env.str("TEMPORAL_ENDPOINT", default="localhost:7233") -temporal_task_queue = env.str("TEMPORAL_TASK_QUEUE", default="memory-task-queue") +# Auth +# ---- +_random_generated_key: str = "".join(str(random.randint(0, 9)) for _ in range(32)) +api_key: str = env.str("AGENTS_API_KEY", _random_generated_key) + +if api_key == _random_generated_key: + print(f"Generated API key since not set in the environment: {api_key}") -# auth -api_key: str = env.str("AGENTS_API_KEY") api_key_header_name: str = env.str("AGENTS_API_KEY_HEADER_NAME", default="X-Auth-Key") skip_check_developer_headers: bool = env.bool( "SKIP_CHECK_DEVELOPER_HEADERS", default=False ) -embedding_service_url: str = env.str( - "EMBEDDING_SERVICE_URL", default="http://0.0.0.0:8083/embed" -) +# Litellm API +# ----------- +litellm_url: str = env.str("LITELLM_URL", default="http://0.0.0.0:4000") +litellm_master_key: str = env.str("LITELLM_MASTER_KEY", default="") -embedding_model_id: str = env.str("EMBEDDING_MODEL_ID", default="BAAI/bge-m3") +# Embedding service +# ----------------- +embedding_service_base: str = env.str( + "EMBEDDING_SERVICE_BASE", default="http://0.0.0.0:8082" +) +embedding_model_id: str = env.str( + "EMBEDDING_MODEL_ID", default="Alibaba-NLP/gte-large-en-v1.5" +) truncate_embed_text: bool = env.bool("TRUNCATE_EMBED_TEXT", default=False) + # Temporal +# -------- temporal_worker_url: str = env.str("TEMPORAL_WORKER_URL", default="localhost:7233") temporal_namespace: str = env.str("TEMPORAL_NAMESPACE", default="default") temporal_client_cert: str = env.str("TEMPORAL_CLIENT_CERT", default=None) temporal_private_key: str = env.str("TEMPORAL_PRIVATE_KEY", default=None) +temporal_endpoint: Any = env.str("TEMPORAL_ENDPOINT", default="localhost:7233") +temporal_task_queue: Any = env.str("TEMPORAL_TASK_QUEUE", default="julep-task-queue") -# Consolidate environment variables into a dictionary for easy access and debugging. -environment = dict( + +# Consolidate environment variables +environment: Dict[str, Any] = dict( debug=debug, cozo_host=cozo_host, cozo_auth=cozo_auth, - worker_url=worker_url, sentry_dsn=sentry_dsn, temporal_endpoint=temporal_endpoint, temporal_task_queue=temporal_task_queue, api_key=api_key, api_key_header_name=api_key_header_name, skip_check_developer_headers=skip_check_developer_headers, - embedding_service_url=embedding_service_url, + embedding_service_base=embedding_service_base, truncate_embed_text=truncate_embed_text, temporal_worker_url=temporal_worker_url, temporal_namespace=temporal_namespace, - openai_api_key=openai_api_key, - docs_embedding_service_url=embedding_service_url, embedding_model_id=embedding_model_id, + testing=testing, ) -if openai_api_key == "": - print("OpenAI API key not found. OpenAI API will not be enabled.") - -if debug: +if debug or testing: # Print the loaded environment variables for debugging purposes. print("Environment variables:") pprint(environment) print() + + # Yell if testing is enabled + print("@" * 80) + print( + f"@@@ Running in {'testing' if testing else 'debug'} mode. This should not be enabled in production. @@@" + ) + print("@" * 80) diff --git a/agents-api/agents_api/exceptions.py b/agents-api/agents_api/exceptions.py index 2ccc5a67f..fbb8f00f8 100644 --- a/agents-api/agents_api/exceptions.py +++ b/agents-api/agents_api/exceptions.py @@ -3,17 +3,17 @@ class AgentsBaseException(Exception): class ModelNotSupportedError(AgentsBaseException): - def __init__(self, model_name): + def __init__(self, model_name) -> None: super().__init__(f"model {model_name} is not supported") class PromptTooBigError(AgentsBaseException): - def __init__(self, token_count, max_tokens): + def __init__(self, token_count, max_tokens) -> None: super().__init__( f"prompt is too big, {token_count} tokens provided, exceeds maximum of {max_tokens}" ) class UnknownTokenizerError(AgentsBaseException): - def __init__(self): + def __init__(self) -> None: super().__init__("unknown tokenizer") diff --git a/agents-api/agents_api/model_registry.py b/agents-api/agents_api/model_registry.py index a7bf13e39..0120cc205 100644 --- a/agents-api/agents_api/model_registry.py +++ b/agents-api/agents_api/model_registry.py @@ -2,20 +2,7 @@ Model Registry maintains a list of supported models and their configs. """ -import ast -import json -import xml.etree.ElementTree as ET -from typing import Dict, Literal, Optional - -import litellm -from litellm.utils import get_valid_models -from pydantic import BaseModel - -from agents_api.clients.worker.types import ChatML -from agents_api.common.exceptions.agents import ( - AgentModelNotValid, - MissingAgentModelAPIKeyError, -) +from typing import Dict GPT4_MODELS: Dict[str, int] = { # stable model names: @@ -79,7 +66,7 @@ } -DISCONTINUED_MODELS = { +DISCONTINUED_MODELS: Dict[str, int] = { "code-davinci-002": 8001, "code-davinci-001": 8001, "code-cushman-002": 2048, @@ -97,164 +84,29 @@ "claude-3-haiku-20240307": 180000, } -OPENAI_MODELS = {**GPT4_MODELS, **TURBO_MODELS, **GPT3_5_MODELS, **GPT3_MODELS} +OPENAI_MODELS: Dict[str, int] = { + **GPT4_MODELS, + **TURBO_MODELS, + **GPT3_5_MODELS, + **GPT3_MODELS, +} -LOCAL_MODELS = { - "julep-ai/samantha-1-turbo": 32768, - "julep-ai/samantha-1-turbo-awq": 32768, +LOCAL_MODELS: Dict[str, int] = { + "gpt-4o": 32768, + "gpt-4o-awq": 32768, "TinyLlama/TinyLlama_v1.1": 2048, "casperhansen/llama-3-8b-instruct-awq": 8192, "julep-ai/Hermes-2-Theta-Llama-3-8B": 8192, "OpenPipe/Hermes-2-Theta-Llama-3-8B-32k": 32768, } -LOCAL_MODELS_WITH_TOOL_CALLS = { +LOCAL_MODELS_WITH_TOOL_CALLS: Dict[str, int] = { "OpenPipe/Hermes-2-Theta-Llama-3-8B-32k": 32768, "julep-ai/Hermes-2-Theta-Llama-3-8B": 8192, } -OLLAMA_MODELS = { +OLLAMA_MODELS: Dict[str, int] = { "llama2": 4096, } -CHAT_MODELS = {**GPT4_MODELS, **TURBO_MODELS, **CLAUDE_MODELS} - -ALL_AVAILABLE_MODELS = litellm.model_list + list(LOCAL_MODELS.keys()) -VALID_MODELS = get_valid_models() + list(LOCAL_MODELS.keys()) - - -class FunctionCall(BaseModel): - arguments: dict - """ - The arguments to call the function with, as generated by the model in JSON - format. Note that the model does not always generate valid JSON, and may - hallucinate parameters not defined by your function schema. Validate the - arguments in your code before calling your function. - """ - - name: str - """The name of the function to call.""" - - -class FunctionDefinition(BaseModel): - name: str - description: Optional[str] = None - parameters: Optional[Dict[str, object]] = None - - -class FunctionSignature(BaseModel): - function: FunctionDefinition - type: Literal["function"] - - -class PromptSchema(BaseModel): - Role: str - Objective: str - Tools: str - Schema: str - Instructions: str - - -def validate_configuration(model: str): - """ - Validates the model specified in the request - """ - if model not in ALL_AVAILABLE_MODELS: - raise AgentModelNotValid(model, ALL_AVAILABLE_MODELS) - elif model not in VALID_MODELS: - raise MissingAgentModelAPIKeyError(model) - - -def load_context(init_context: list[ChatML], model: str): - """ - Converts the message history into a format supported by the model. - """ - if model in litellm.utils.get_valid_models(): - init_context = [ - { - "role": "assistant" if msg.role == "function_call" else msg.role, - "content": msg.content, - } - for msg in init_context - ] - elif model in LOCAL_MODELS: - init_context = [ - {"name": msg.name, "role": msg.role, "content": msg.content} - for msg in init_context - ] - else: - raise AgentModelNotValid(model, ALL_AVAILABLE_MODELS) - return init_context - - -def validate_and_extract_tool_calls(assistant_content): - validation_result = False - tool_calls = [] - error_message = None - - try: - # wrap content in root element - xml_root_element = f"{assistant_content}" - root = ET.fromstring(xml_root_element) - - # extract JSON data - for element in root.findall(".//tool_call"): - json_data = None - try: - if element.text is None: - continue - - json_text = element.text.strip() - - try: - # Prioritize json.loads for better error handling - json_data = json.loads(json_text) - except json.JSONDecodeError as json_err: - try: - # Fallback to ast.literal_eval if json.loads fails - json_data = ast.literal_eval(json_text) - except (SyntaxError, ValueError) as eval_err: - error_message = ( - f"JSON parsing failed with both json.loads and ast.literal_eval:\n" - f"- JSON Decode Error: {json_err}\n" - f"- Fallback Syntax/Value Error: {eval_err}\n" - f"- Problematic JSON text: {json_text}" - ) - continue - except BaseException as e: - error_message = f"Cannot strip text: {e}" - - if json_data is not None: - tool_calls.append(json_data) - validation_result = True - - except ET.ParseError as err: - error_message = f"XML Parse Error: {err}" - - # Return default values if no valid data is extracted - return validation_result, tool_calls, error_message - - -def get_extra_settings(settings): - extra_settings = ( - dict( - repetition_penalty=settings.repetition_penalty, - best_of=1, - top_k=1, - length_penalty=settings.length_penalty, - logit_bias=settings.logit_bias, - preset=settings.preset.name if settings.preset else None, - ) - if settings.model in LOCAL_MODELS - else {} - ) - - return extra_settings - - -# TODO: implement and use this to work with the response from different model formats -def parse_response(): - """ - method that converts the response from the provider back into the openai format - """ - pass +CHAT_MODELS: Dict[str, int] = {**GPT4_MODELS, **TURBO_MODELS, **CLAUDE_MODELS} diff --git a/agents-api/agents_api/models/__init__.py b/agents-api/agents_api/models/__init__.py index d90013a48..b1f918ee7 100644 --- a/agents-api/agents_api/models/__init__.py +++ b/agents-api/agents_api/models/__init__.py @@ -5,3 +5,15 @@ This module also integrates with the `common` module for exception handling and utility functions, ensuring robust error management and providing reusable components for data processing and query construction. """ + +# ruff: noqa: F401, F403, F405 + +import agents_api.models.agent as agent +import agents_api.models.developer as developer +import agents_api.models.docs as docs +import agents_api.models.entry as entry +import agents_api.models.execution as execution +import agents_api.models.session as session +import agents_api.models.task as task +import agents_api.models.tools as tools +import agents_api.models.user as user diff --git a/agents-api/agents_api/models/agent/__init__.py b/agents-api/agents_api/models/agent/__init__.py index 41d808c70..2beaf8166 100644 --- a/agents-api/agents_api/models/agent/__init__.py +++ b/agents-api/agents_api/models/agent/__init__.py @@ -10,3 +10,13 @@ This module serves as the backbone for agent management within the CozoDB ecosystem, facilitating a wide range of operations necessary for the effective handling of agent data. """ + +# ruff: noqa: F401, F403, F405 + +from .create_agent import create_agent +from .create_or_update_agent import create_or_update_agent +from .delete_agent import delete_agent +from .get_agent import get_agent +from .list_agents import list_agents +from .patch_agent import patch_agent +from .update_agent import update_agent diff --git a/agents-api/agents_api/models/agent/create_agent.py b/agents-api/agents_api/models/agent/create_agent.py index 0e86c6634..73be4ec77 100644 --- a/agents-api/agents_api/models/agent/create_agent.py +++ b/agents-api/agents_api/models/agent/create_agent.py @@ -3,6 +3,7 @@ It includes functions to construct and execute datalog queries for inserting new agent records. """ +from typing import Any, TypeVar from uuid import UUID, uuid4 from beartype import beartype @@ -20,16 +21,27 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { + lambda e: isinstance(e, QueryException) + and "asserted to return some results, but returned none" + in str(e): lambda *_: HTTPException( + detail="developer not found", status_code=403 + ), QueryException: partialclass(HTTPException, status_code=400), ValidationError: partialclass(HTTPException, status_code=400), TypeError: partialclass(HTTPException, status_code=400), } ) @wrap_in_class( - Agent, one=True, transform=lambda d: {"id": UUID(d.pop("agent_id")), **d} + Agent, + one=True, + transform=lambda d: {"id": UUID(d.pop("agent_id")), **d}, + _kind="inserted", ) @cozo_query @beartype @@ -43,32 +55,27 @@ def create_agent( Constructs and executes a datalog query to create a new agent in the database. Parameters: - - agent_id (UUID): The unique identifier for the agent. + - agent_id (UUID | None): The unique identifier for the agent. - developer_id (UUID): The unique identifier for the developer creating the agent. - - name (str): The name of the agent. - - about (str): A description of the agent. - - instructions (list[str], optional): A list of instructions for using the agent. Defaults to an empty list. - - model (str, optional): The model identifier for the agent. Defaults to "julep-ai/samantha-1-turbo". - - metadata (dict, optional): A dictionary of metadata for the agent. Defaults to an empty dict. - - default_settings (dict, optional): A dictionary of default settings for the agent. Defaults to an empty dict. - - client (CozoClient, optional): The CozoDB client instance to use for the query. Defaults to a preconfigured client instance. + - data (CreateAgentRequest): The data for the new agent. Returns: - Agent: The newly created agent record. + - Agent: The newly created agent record. """ agent_id = agent_id or uuid4() # Extract the agent data from the payload data.metadata = data.metadata or {} + data.default_settings = data.default_settings or {} + data.instructions = ( data.instructions if isinstance(data.instructions, list) else [data.instructions] ) - data.default_settings = data.default_settings or {} - agent_data = data.model_dump() + agent_data = data.model_dump(exclude_unset=True) default_settings = agent_data.pop("default_settings") settings_cols, settings_vals = cozo_process_mutate_data( diff --git a/agents-api/agents_api/models/agent/create_or_update_agent.py b/agents-api/agents_api/models/agent/create_or_update_agent.py index 2c05529f2..64b008d44 100644 --- a/agents-api/agents_api/models/agent/create_or_update_agent.py +++ b/agents-api/agents_api/models/agent/create_or_update_agent.py @@ -3,6 +3,7 @@ It includes functions to construct and execute datalog queries for inserting new agent records. """ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -20,6 +21,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -38,7 +42,7 @@ def create_or_update_agent( developer_id: UUID, agent_id: UUID, data: CreateOrUpdateAgentRequest, -) -> tuple[list[str], dict]: +) -> tuple[list[str | None], dict]: """ Constructs and executes a datalog query to create a new agent in the database. @@ -48,7 +52,7 @@ def create_or_update_agent( - name (str): The name of the agent. - about (str): A description of the agent. - instructions (list[str], optional): A list of instructions for using the agent. Defaults to an empty list. - - model (str, optional): The model identifier for the agent. Defaults to "julep-ai/samantha-1-turbo". + - model (str, optional): The model identifier for the agent. Defaults to "gpt-4o". - metadata (dict, optional): A dictionary of metadata for the agent. Defaults to an empty dict. - default_settings (dict, optional): A dictionary of default settings for the agent. Defaults to an empty dict. - client (CozoClient, optional): The CozoDB client instance to use for the query. Defaults to a preconfigured client instance. @@ -123,7 +127,7 @@ def create_or_update_agent( queries = [ verify_developer_id_query(developer_id), - default_settings and default_settings_query, + default_settings_query if default_settings else None, agent_query, ] diff --git a/agents-api/agents_api/models/agent/delete_agent.py b/agents-api/agents_api/models/agent/delete_agent.py index a9af4e2bc..409c755d3 100644 --- a/agents-api/agents_api/models/agent/delete_agent.py +++ b/agents-api/agents_api/models/agent/delete_agent.py @@ -2,6 +2,7 @@ This module contains the implementation of the delete_agent_query function, which is responsible for deleting an agent and its related default settings from the CozoDB database. """ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -20,9 +21,17 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { + lambda e: isinstance(e, QueryException) + and "asserted to return some results, but returned none" + in str(e): lambda *_: HTTPException( + detail="developer not found or doesnt own resource", status_code=404 + ), QueryException: partialclass(HTTPException, status_code=400), ValidationError: partialclass(HTTPException, status_code=400), TypeError: partialclass(HTTPException, status_code=400), @@ -36,6 +45,7 @@ "deleted_at": utcnow(), "jobs": [], }, + _kind="deleted", ) @cozo_query @beartype @@ -57,14 +67,18 @@ def delete_agent(*, developer_id: UUID, agent_id: UUID) -> tuple[list[str], dict verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id), """ # Delete docs - ?[agent_id, doc_id] := - *agent_docs{ - agent_id, + ?[owner_id, owner_type, doc_id] := + *docs{ + owner_type, + owner_id, doc_id, - }, agent_id = to_uuid($agent_id) + }, + owner_id = to_uuid($agent_id), + owner_type = "agent" - :delete agent_docs { - agent_id, + :delete docs { + owner_type, + owner_id, doc_id } :returning diff --git a/agents-api/agents_api/models/agent/get_agent.py b/agents-api/agents_api/models/agent/get_agent.py index a57c14a31..c977fa614 100644 --- a/agents-api/agents_api/models/agent/get_agent.py +++ b/agents-api/agents_api/models/agent/get_agent.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -15,9 +16,21 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { + lambda e: isinstance(e, QueryException) + and "Developer not found" in str(e): lambda *_: HTTPException( + detail="developer does not exist", status_code=403 + ), + lambda e: isinstance(e, QueryException) + and "asserted to return some results, but returned none" + in str(e): lambda *_: HTTPException( + detail="developer doesnt own resource", status_code=404 + ), QueryException: partialclass(HTTPException, status_code=400), ValidationError: partialclass(HTTPException, status_code=400), TypeError: partialclass(HTTPException, status_code=400), diff --git a/agents-api/agents_api/models/agent/list_agents.py b/agents-api/agents_api/models/agent/list_agents.py index 5d291f654..5266659d7 100644 --- a/agents-api/agents_api/models/agent/list_agents.py +++ b/agents-api/agents_api/models/agent/list_agents.py @@ -1,4 +1,4 @@ -from typing import Any, Literal +from typing import Any, Literal, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +16,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/agent/patch_agent.py b/agents-api/agents_api/models/agent/patch_agent.py index 3a374f91a..72fdc7811 100644 --- a/agents-api/agents_api/models/agent/patch_agent.py +++ b/agents-api/agents_api/models/agent/patch_agent.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -17,6 +18,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -29,6 +33,7 @@ ResourceUpdatedResponse, one=True, transform=lambda d: {"id": d["agent_id"], "jobs": [], **d}, + _kind="inserted", ) @cozo_query @beartype diff --git a/agents-api/agents_api/models/agent/test_agent_queries.py b/agents-api/agents_api/models/agent/test_agent_queries.py deleted file mode 100644 index b416b3628..000000000 --- a/agents-api/agents_api/models/agent/test_agent_queries.py +++ /dev/null @@ -1,167 +0,0 @@ -# # Tests for agent queries -# from uuid import uuid4 - -# from cozo_migrate.api import init, apply -# from pycozo import Client -# from ward import test - -# from .create_agent import create_agent_query -# from .delete_agent import delete_agent_query -# from .get_agent import get_agent_query -# from .list_agents import list_agents_query -# from .update_agent import update_agent_query - -# MODEL = "julep-ai/samantha-1-turbo" - - -# def cozo_client(migrations_dir: str = "./migrations"): -# # Create a new client for each test -# # and initialize the schema. -# client = Client() - -# init(client) -# apply(client, migrations_dir=migrations_dir, all_=True) - -# return client - - -# @test("model: create agent") -# def _(): -# client = cozo_client() -# agent_id = uuid4() -# developer_id = uuid4() - -# create_agent_query( -# agent_id=agent_id, -# model=MODEL, -# developer_id=developer_id, -# name="test agent", -# about="test agent about", -# client=client, -# ) - - -# @test("model: create agent with instructions") -# def _(): -# client = cozo_client() -# agent_id = uuid4() -# developer_id = uuid4() - -# create_agent_query( -# agent_id=agent_id, -# model=MODEL, -# developer_id=developer_id, -# name="test agent", -# about="test agent about", -# instructions=[ -# "test instruction", -# ], -# client=client, -# ) - - -# @test("model: get agent not exists") -# def _(): -# client = cozo_client() -# agent_id = uuid4() -# developer_id = uuid4() - -# result = get_agent_query( -# agent_id=agent_id, developer_id=developer_id, client=client -# ) - -# assert len(result["id"]) == 0 - - -# @test("model: get agent exists") -# def _(): -# client = cozo_client() -# agent_id = uuid4() -# developer_id = uuid4() - -# result = create_agent_query( -# agent_id=agent_id, -# model=MODEL, -# developer_id=developer_id, -# name="test agent", -# about="test agent about", -# default_settings={"temperature": 1.5}, -# client=client, -# ) - -# result = get_agent_query( -# agent_id=agent_id, developer_id=developer_id, client=client -# ) - -# assert len(result["id"]) == 1 -# assert "temperature" in result["default_settings"][0] -# assert result["default_settings"][0]["temperature"] == 1.5 - - -# @test("model: delete agent") -# def _(): -# client = cozo_client() -# agent_id = uuid4() -# developer_id = uuid4() - -# # Create the agent -# result = create_agent_query( -# agent_id=agent_id, -# model=MODEL, -# developer_id=developer_id, -# name="test agent", -# about="test agent about", -# client=client, -# ) - -# # Delete the agent -# result = delete_agent_query( -# agent_id=agent_id, developer_id=developer_id, client=client -# ) - -# # Check that the agent is deleted -# result = get_agent_query( -# agent_id=agent_id, developer_id=developer_id, client=client -# ) - -# assert len(result["id"]) == 0 - - -# @test("model: update agent") -# def _(): -# client = cozo_client() -# agent_id = uuid4() -# developer_id = uuid4() - -# create_agent_query( -# agent_id=agent_id, -# model=MODEL, -# developer_id=developer_id, -# name="test agent", -# about="test agent about", -# client=client, -# ) - -# result = update_agent_query( -# agent_id=agent_id, -# developer_id=developer_id, -# name="updated agent", -# about="updated agent about", -# default_settings={"temperature": 1.5}, -# client=client, -# ) - -# data = result.iloc[0].to_dict() - -# assert data["updated_at"] > data["created_at"] - - -# @test("model: list agents") -# def _(): -# """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved.""" -# client = cozo_client() -# developer_id = uuid4() - -# result = list_agents_query(developer_id=developer_id, client=client) - -# assert len(result["id"]) == 0 diff --git a/agents-api/agents_api/models/agent/update_agent.py b/agents-api/agents_api/models/agent/update_agent.py index f699be248..be7e9ea21 100644 --- a/agents-api/agents_api/models/agent/update_agent.py +++ b/agents-api/agents_api/models/agent/update_agent.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +17,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -28,6 +32,7 @@ ResourceUpdatedResponse, one=True, transform=lambda d: {"id": d["agent_id"], "jobs": [], **d}, + _kind="inserted", ) @cozo_query @beartype @@ -49,7 +54,11 @@ def update_agent( Returns: ResourceUpdatedResponse: The updated agent data. """ - default_settings = data.default_settings.model_dump(exclude_none=True) + default_settings = ( + data.default_settings.model_dump(exclude_none=True) + if data.default_settings + else {} + ) update_data = data.model_dump() # Remove default settings from the agent update data diff --git a/agents-api/agents_api/models/chat/__init__.py b/agents-api/agents_api/models/chat/__init__.py new file mode 100644 index 000000000..428b72572 --- /dev/null +++ b/agents-api/agents_api/models/chat/__init__.py @@ -0,0 +1,22 @@ +""" +Module: agents_api/models/docs + +This module is responsible for managing document-related operations within the application, particularly for agents and possibly other entities. It serves as a core component of the document management system, enabling features such as document creation, listing, deletion, and embedding of snippets for enhanced search and retrieval capabilities. + +Main functionalities include: +- Creating new documents and associating them with agents or users. +- Listing documents based on various criteria, including ownership and metadata filters. +- Deleting documents by their unique identifiers. +- Embedding document snippets for retrieval purposes. + +The module interacts with other parts of the application, such as the agents and users modules, to provide a comprehensive document management system. Its role is crucial in enabling document search, retrieval, and management features within the context of agents and users. + +This documentation aims to provide clear, concise, and sufficient context for new developers or contributors to understand the module's role without needing to dive deep into the code immediately. +""" + +# ruff: noqa: F401, F403, F405 + +from .gather_messages import gather_messages +from .get_cached_response import get_cached_response +from .prepare_chat_context import prepare_chat_context +from .set_cached_response import set_cached_response diff --git a/agents-api/agents_api/models/chat/gather_messages.py b/agents-api/agents_api/models/chat/gather_messages.py new file mode 100644 index 000000000..f8e08632d --- /dev/null +++ b/agents-api/agents_api/models/chat/gather_messages.py @@ -0,0 +1,85 @@ +from typing import TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from agents_api.autogen.Chat import ChatInput + +from ...autogen.openapi_model import DocReference, History +from ...clients import embed +from ...common.protocol.developers import Developer +from ...common.protocol.sessions import ChatContext +from ..docs.search_docs_hybrid import search_docs_hybrid +from ..entry.get_history import get_history +from ..utils import ( + partialclass, + rewrap_exceptions, +) + +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@beartype +async def gather_messages( + *, + developer: Developer, + session_id: UUID, + chat_context: ChatContext, + chat_input: ChatInput, +): + new_raw_messages = [msg.model_dump() for msg in chat_input.messages] + recall = chat_input.recall + + assert len(new_raw_messages) > 0 + + # Get the session history + history: History = get_history( + developer_id=developer.id, + session_id=session_id, + allowed_sources=["api_request", "api_response", "tool_response", "summarizer"], + ) + + # Keep leaf nodes only + relations = history.relations + past_messages = [ + entry.model_dump() + for entry in history.entries + if entry.id not in {r.head for r in relations} + ] + + if not recall: + return past_messages, [] + + # Search matching docs + [query_embedding, *_] = await embed.embed( + inputs=[ + f"{msg.get('name') or msg['role']}: {msg['content']}" + for msg in new_raw_messages + ], + join_inputs=True, + ) + query_text = new_raw_messages[-1]["content"] + + # List all the applicable owners to search docs from + active_agent_id = chat_context.get_active_agent().id + user_ids = [user.id for user in chat_context.users] + owners = [("user", user_id) for user_id in user_ids] + [("agent", active_agent_id)] + + doc_references: list[DocReference] = search_docs_hybrid( + developer_id=developer.id, + owners=owners, + query=query_text, + query_embedding=query_embedding, + ) + + return past_messages, doc_references diff --git a/agents-api/agents_api/models/session/get_cached_response.py b/agents-api/agents_api/models/chat/get_cached_response.py similarity index 100% rename from agents-api/agents_api/models/session/get_cached_response.py rename to agents-api/agents_api/models/chat/get_cached_response.py diff --git a/agents-api/agents_api/models/chat/prepare_chat_context.py b/agents-api/agents_api/models/chat/prepare_chat_context.py new file mode 100644 index 000000000..742038535 --- /dev/null +++ b/agents-api/agents_api/models/chat/prepare_chat_context.py @@ -0,0 +1,131 @@ +from typing import Any, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ...common.protocol.sessions import ChatContext, make_session +from ..session.prepare_session_data import prepare_session_data +from ..utils import ( + cozo_query, + fix_uuid_if_present, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + verify_developer_owns_resource_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + ChatContext, + one=True, + transform=lambda d: { + **d, + "session": make_session( + agents=[a["id"] for a in d["agents"]], + users=[u["id"] for u in d["users"]], + **d["session"], + ), + "toolsets": [ + {**ts, "tools": [*map(fix_uuid_if_present, ts["tools"])]} + for ts in d["toolsets"] + ], + }, +) +@cozo_query +@beartype +def prepare_chat_context( + *, + developer_id: UUID, + session_id: UUID, +) -> tuple[list[str], dict]: + """ + Executes a complex query to retrieve memory context based on session ID. + """ + + [*_, session_data_query], sd_vars = prepare_session_data.__wrapped__( + developer_id=developer_id, session_id=session_id + ) + + session_data_fields = ("session", "agents", "users") + + session_data_query += """ + :create _session_data_json { + agents: [Json], + users: [Json], + session: Json, + } + """ + + toolsets_query = """ + input[session_id] <- [[to_uuid($session_id)]] + + tools_by_agent[agent_id, collect(tool)] := + input[session_id], + *session_lookup{ + session_id, + participant_id: agent_id, + participant_type: "agent", + }, + + *tools { agent_id, tool_id, name, type, spec, updated_at, created_at }, + tool = { + "id": tool_id, + "name": name, + "type": type, + "spec": spec, + "updated_at": updated_at, + "created_at": created_at, + } + + agent_toolsets[collect(toolset)] := + tools_by_agent[agent_id, tools], + toolset = { + "agent_id": agent_id, + "tools": tools, + } + + ?[toolsets] := + agent_toolsets[toolsets] + + :create _toolsets_json { + toolsets: [Json], + } + """ + + combine_query = f""" + ?[{', '.join(session_data_fields)}, toolsets] := + *_session_data_json {{ {', '.join(session_data_fields)} }}, + *_toolsets_json {{ toolsets }} + """ + + queries = [ + verify_developer_id_query(developer_id), + verify_developer_owns_resource_query( + developer_id, "sessions", session_id=session_id + ), + session_data_query, + toolsets_query, + combine_query, + ] + + return ( + queries, + { + "session_id": str(session_id), + **sd_vars, + }, + ) diff --git a/agents-api/agents_api/models/session/set_cached_response.py b/agents-api/agents_api/models/chat/set_cached_response.py similarity index 100% rename from agents-api/agents_api/models/session/set_cached_response.py rename to agents-api/agents_api/models/chat/set_cached_response.py diff --git a/agents-api/agents_api/models/developer/__init__.py b/agents-api/agents_api/models/developer/__init__.py new file mode 100644 index 000000000..a7117c06b --- /dev/null +++ b/agents-api/agents_api/models/developer/__init__.py @@ -0,0 +1,19 @@ +""" +Module: agents_api/models/docs + +This module is responsible for managing document-related operations within the application, particularly for agents and possibly other entities. It serves as a core component of the document management system, enabling features such as document creation, listing, deletion, and embedding of snippets for enhanced search and retrieval capabilities. + +Main functionalities include: +- Creating new documents and associating them with agents or users. +- Listing documents based on various criteria, including ownership and metadata filters. +- Deleting documents by their unique identifiers. +- Embedding document snippets for retrieval purposes. + +The module interacts with other parts of the application, such as the agents and users modules, to provide a comprehensive document management system. Its role is crucial in enabling document search, retrieval, and management features within the context of agents and users. + +This documentation aims to provide clear, concise, and sufficient context for new developers or contributors to understand the module's role without needing to dive deep into the code immediately. +""" + +# ruff: noqa: F401, F403, F405 + +from .get_developer import get_developer, verify_developer diff --git a/agents-api/agents_api/models/developer/get_developer.py b/agents-api/agents_api/models/developer/get_developer.py new file mode 100644 index 000000000..31ade5334 --- /dev/null +++ b/agents-api/agents_api/models/developer/get_developer.py @@ -0,0 +1,72 @@ +"""Module for retrieving document snippets from the CozoDB based on document IDs.""" + +from typing import Any, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ...common.protocol.developers import Developer +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions({QueryException: partialclass(HTTPException, status_code=401)}) +@cozo_query +@beartype +def verify_developer( + *, + developer_id: UUID, +) -> tuple[str, dict]: + return (verify_developer_id_query(developer_id), {}) + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=403), + ValidationError: partialclass(HTTPException, status_code=500), + } +) +@wrap_in_class(Developer, one=True, transform=lambda d: {**d, "id": d["developer_id"]}) +@cozo_query +@beartype +def get_developer( + *, + developer_id: UUID, +) -> tuple[str, dict]: + developer_id = str(developer_id) + + query = """ + input[developer_id] <- [[to_uuid($developer_id)]] + ?[ + developer_id, + email, + active, + tags, + settings, + created_at, + updated_at, + ] := + input[developer_id], + *developers { + developer_id, + email, + active, + tags, + settings, + created_at, + updated_at, + } + """ + + return (query, {"developer_id": developer_id}) diff --git a/agents-api/agents_api/models/docs/__init__.py b/agents-api/agents_api/models/docs/__init__.py index 4cda7a210..0ba3db0d4 100644 --- a/agents-api/agents_api/models/docs/__init__.py +++ b/agents-api/agents_api/models/docs/__init__.py @@ -13,3 +13,13 @@ This documentation aims to provide clear, concise, and sufficient context for new developers or contributors to understand the module's role without needing to dive deep into the code immediately. """ + +# ruff: noqa: F401, F403, F405 + +from .create_doc import create_doc +from .delete_doc import delete_doc +from .embed_snippets import embed_snippets +from .get_doc import get_doc +from .list_docs import list_docs +from .search_docs_by_embedding import search_docs_by_embedding +from .search_docs_by_text import search_docs_by_text diff --git a/agents-api/agents_api/models/docs/create_doc.py b/agents-api/agents_api/models/docs/create_doc.py new file mode 100644 index 000000000..ee26df484 --- /dev/null +++ b/agents-api/agents_api/models/docs/create_doc.py @@ -0,0 +1,137 @@ +from typing import Any, Literal, TypeVar +from uuid import UUID, uuid4 + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ...autogen.openapi_model import CreateDocRequest, Doc +from ...common.utils.cozo import cozo_process_mutate_data +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + verify_developer_owns_resource_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + Doc, + one=True, + transform=lambda d: { + "id": UUID(d["doc_id"]), + **d, + }, +) +@cozo_query +@beartype +def create_doc( + *, + developer_id: UUID, + owner_type: Literal["user", "agent"], + owner_id: UUID, + doc_id: UUID | None = None, + data: CreateDocRequest, +) -> tuple[list[str], dict]: + """ + Constructs and executes a datalog query to create a new document and its associated snippets in the 'cozodb' database. + + Parameters: + - owner_type (Literal["user", "agent"]): The type of the owner of the document. + - owner_id (UUID): The UUID of the document owner. + - id (UUID): The UUID of the document to be created. + - data (CreateDocRequest): The content of the document. + """ + + doc_id = str(doc_id or uuid4()) + owner_id = str(owner_id) + + if isinstance(data.content, str): + data.content = [data.content] + + data.metadata = data.metadata or {} + + doc_data = data.model_dump() + content = doc_data.pop("content") + + doc_data["owner_type"] = owner_type + doc_data["owner_id"] = owner_id + doc_data["doc_id"] = doc_id + + doc_cols, doc_rows = cozo_process_mutate_data(doc_data) + + snippet_cols, snippet_rows = "", [] + + # Process each content snippet and prepare data for the datalog query. + for snippet_idx, snippet in enumerate(content): + snippet_cols, new_snippet_rows = cozo_process_mutate_data( + dict( + doc_id=doc_id, + index=snippet_idx, + content=snippet, + ) + ) + + snippet_rows += new_snippet_rows + + create_snippets_query = f""" + ?[{snippet_cols}] <- $snippet_rows + + :create _snippets {{ {snippet_cols} }} + }} {{ + ?[{snippet_cols}] <- $snippet_rows + :insert snippets {{ {snippet_cols} }} + :returning + """ + + # Construct the datalog query for creating the document and its snippets. + create_doc_query = f""" + ?[{doc_cols}] <- $doc_rows + + :create _docs {{ {doc_cols} }} + }} {{ + ?[{doc_cols}] <- $doc_rows + :insert docs {{ {doc_cols} }} + :returning + }} {{ + snippet_rows[collect(content)] := + *_snippets {{ + content + }} + + ?[{doc_cols}, content, created_at] := + *_docs {{ {doc_cols} }}, + snippet_rows[content], + created_at = now() + """ + + queries = [ + verify_developer_id_query(developer_id), + verify_developer_owns_resource_query( + developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id} + ), + create_snippets_query, + create_doc_query, + ] + + # Execute the constructed datalog query and return the results as a DataFrame. + return ( + queries, + { + "doc_rows": doc_rows, + "snippet_rows": snippet_rows, + }, + ) diff --git a/agents-api/agents_api/models/docs/create_docs.py b/agents-api/agents_api/models/docs/create_docs.py deleted file mode 100644 index fd2edc645..000000000 --- a/agents-api/agents_api/models/docs/create_docs.py +++ /dev/null @@ -1,97 +0,0 @@ -from typing import Literal -from uuid import UUID - -from beartype import beartype - -from ...common.utils.cozo import cozo_process_mutate_data -from ...common.utils.datetime import utcnow -from ..utils import cozo_query - - -@cozo_query -@beartype -def create_docs_query( - owner_type: Literal["user", "agent"], - owner_id: UUID, - id: UUID, - title: str, - content: list[str] | str, - metadata: dict = {}, -): - """ - Constructs and executes a datalog query to create a new document and its associated snippets in the 'cozodb' database. - - Parameters: - - owner_type (Literal["user", "agent"]): The type of the owner of the document. - - owner_id (UUID): The UUID of the document owner. - - id (UUID): The UUID of the document to be created. - - title (str): The title of the document. - - content (str): The content of the document, which will be split into snippets. - - metadata (dict): Metadata associated with the document. Defaults to an empty dictionary. - - Returns: - pd.DataFrame: A DataFrame containing the results of the query execution. - """ - - if isinstance(content, str): - content = [content] - - created_at: float = utcnow().timestamp() - snippet_cols, snippet_rows = "", [] - - # Process each content snippet and prepare data for the datalog query. - for snippet_idx, snippet in enumerate(content): - snippet_cols, new_snippet_rows = cozo_process_mutate_data( - dict( - doc_id=str(id), - snippet_idx=snippet_idx, - title=title, - snippet=snippet, - ) - ) - - snippet_rows += new_snippet_rows - - # Construct the datalog query for creating the document and its snippets. - query = f""" - {{ - # This query creates a new document and its associated snippets in the database. - # Section to create the document in the database - ?[{owner_type}_id, doc_id, created_at, metadata] <- [[ - to_uuid($owner_id), - to_uuid($id), - $created_at, - $metadata, - ]] - - :insert {owner_type}_docs {{ - {owner_type}_id, doc_id, created_at, metadata, - }} - }} {{ - # Section to create and associate snippets with the document - ?[{snippet_cols}] <- $snippet_rows - - :insert information_snippets {{ - {snippet_cols} - }} - }} {{ - # Section to return the created document and its snippets - ?[{owner_type}_id, doc_id, created_at, metadata] <- [[ - to_uuid($owner_id), - to_uuid($id), - $created_at, - $metadata, - ]] - }}""" - - # Execute the constructed datalog query and return the results as a DataFrame. - return ( - query, - { - "owner_id": str(owner_id), - "id": str(id), - "created_at": created_at, - "metadata": metadata, - "snippet_rows": snippet_rows, - }, - ) diff --git a/agents-api/agents_api/models/docs/delete_doc.py b/agents-api/agents_api/models/docs/delete_doc.py new file mode 100644 index 000000000..c02705756 --- /dev/null +++ b/agents-api/agents_api/models/docs/delete_doc.py @@ -0,0 +1,102 @@ +from typing import Any, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ...autogen.openapi_model import ResourceDeletedResponse +from ...common.utils.datetime import utcnow +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + verify_developer_owns_resource_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + ResourceDeletedResponse, + one=True, + transform=lambda d: { + "id": UUID(d.pop("doc_id")), + "deleted_at": utcnow(), + "jobs": [], + }, + _kind="deleted", +) +@cozo_query +@beartype +def delete_doc( + *, + developer_id: UUID, + owner_id: UUID, + owner_type: str, + doc_id: UUID, +) -> tuple[list[str], dict]: + """Constructs and returns a datalog query for deleting documents and associated information snippets. + + This function targets the 'cozodb' database, allowing for the removal of documents and their related information snippets based on the provided document ID and owner (user or agent). + + Parameters: + doc_id (UUID): The UUID of the document to be deleted. + client (CozoClient): An instance of the CozoClient to execute the query. + + Returns: + pd.DataFrame: The result of the executed datalog query. + """ + # Convert UUID parameters to string format for use in the datalog query + doc_id = str(doc_id) + owner_id = str(owner_id) + + # The following query is divided into two main parts: + # 1. Deleting information snippets associated with the document + # 2. Deleting the document itself + delete_snippets_query = """ + # This section constructs the subquery for identifying and deleting all information snippets associated with the given document ID. + # Delete snippets + input[doc_id] <- [[to_uuid($doc_id)]] + ?[doc_id, index] := + input[doc_id], + *snippets { + doc_id, + index, + } + + :delete snippets { + doc_id, + index + } + """ + + delete_doc_query = """ + # Delete the docs + ?[doc_id, owner_type, owner_id] <- [[ to_uuid($doc_id), $owner_type, to_uuid($owner_id) ]] + + :delete docs { doc_id, owner_type, owner_id } + :returning + """ + + queries = [ + verify_developer_id_query(developer_id), + verify_developer_owns_resource_query( + developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id} + ), + delete_snippets_query, + delete_doc_query, + ] + + return (queries, {"doc_id": doc_id, "owner_type": owner_type, "owner_id": owner_id}) diff --git a/agents-api/agents_api/models/docs/delete_docs.py b/agents-api/agents_api/models/docs/delete_docs.py deleted file mode 100644 index 8be5d243b..000000000 --- a/agents-api/agents_api/models/docs/delete_docs.py +++ /dev/null @@ -1,67 +0,0 @@ -from typing import Literal -from uuid import UUID - -from beartype import beartype - -from ..utils import cozo_query - - -@cozo_query -@beartype -def delete_docs_by_id_query( - owner_type: Literal["user", "agent"], - owner_id: UUID, - doc_id: UUID, -) -> tuple[str, dict]: - """Constructs and returns a datalog query for deleting documents and associated information snippets. - - This function targets the 'cozodb' database, allowing for the removal of documents and their related information snippets based on the provided document ID and owner (user or agent). - - Parameters: - owner_type (Literal["user", "agent"]): The type of the owner, either 'user' or 'agent'. - owner_id (UUID): The UUID of the owner. - doc_id (UUID): The UUID of the document to be deleted. - client (CozoClient): An instance of the CozoClient to execute the query. - - Returns: - pd.DataFrame: The result of the executed datalog query. - """ - # Convert UUID parameters to string format for use in the datalog query - owner_id = str(owner_id) - doc_id = str(doc_id) - - # The following query is divided into two main parts: - # 1. Deleting information snippets associated with the document - # 2. Deleting the document itself from the owner's collection - query = f""" - {{ - # This section constructs the subquery for identifying and deleting all information snippets associated with the given document ID. - # Delete snippets - input[doc_id] <- [[to_uuid($doc_id)]] - ?[doc_id, snippet_idx] := - input[doc_id], - *information_snippets {{ - doc_id, - snippet_idx, - }} - - :delete information_snippets {{ - doc_id, - snippet_idx - }} - }} {{ - # This section constructs the subquery for deleting the document from the specified owner's (user or agent) document collection. - # Delete the docs - ?[doc_id, {owner_type}_id] <- [[ - to_uuid($doc_id), - to_uuid($owner_id), - ]] - - :delete {owner_type}_docs {{ - doc_id, - {owner_type}_id, - }} - :returning - }}""" - - return (query, {"doc_id": doc_id, "owner_id": owner_id}) diff --git a/agents-api/agents_api/models/docs/embed_docs.py b/agents-api/agents_api/models/docs/embed_docs.py deleted file mode 100644 index 3b5c9cb41..000000000 --- a/agents-api/agents_api/models/docs/embed_docs.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Module for embedding documents in the cozodb database. Contains functions to update document embeddings.""" - -from uuid import UUID - -from beartype import beartype - -from ..utils import cozo_query - - -@cozo_query -@beartype -def embed_docs_snippets_query( - doc_id: UUID, - snippet_indices: list[int] | tuple[int], - embeddings: list[list[float]], -) -> tuple[str, dict]: - """Embeds document snippets in the cozodb database. - - Parameters: - doc_id (UUID): The unique identifier for the document. - snippet_indices (list[int]): Indices of the snippets in the document. - embeddings (list[list[float]]): Embedding vectors for the snippets. - - Returns: - tuple[str, dict]: A DataFrame containing the results of the embedding operation. - """ - - doc_id = str(doc_id) - # Ensure the number of snippet indices matches the number of embeddings. - assert len(snippet_indices) == len(embeddings) - - # Prepare records for the database query by combining doc_id, snippet indices, and embeddings. - records = [ - [doc_id, snippet_idx, embedding] - for snippet_idx, embedding in zip(snippet_indices, embeddings) - ] - - # Define the datalog query for updating document snippet embeddings in the database. - query = """ - { - ?[doc_id, snippet_idx, embedding] <- $records - - :update information_snippets { - doc_id, - snippet_idx, - embedding, - } - :returning - }""" - - return (query, {"records": records}) diff --git a/agents-api/agents_api/models/docs/embed_snippets.py b/agents-api/agents_api/models/docs/embed_snippets.py new file mode 100644 index 000000000..e810d0379 --- /dev/null +++ b/agents-api/agents_api/models/docs/embed_snippets.py @@ -0,0 +1,101 @@ +"""Module for embedding documents in the cozodb database. Contains functions to update document embeddings.""" + +from typing import Any, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ...autogen.openapi_model import ResourceUpdatedResponse +from ...common.utils.cozo import cozo_process_mutate_data +from ...common.utils.datetime import utcnow +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + ResourceUpdatedResponse, + one=True, + transform=lambda d: {"id": d["doc_id"], "updated_at": utcnow(), "jobs": []}, + _kind="inserted", +) +@cozo_query +@beartype +def embed_snippets( + *, + developer_id: UUID, + doc_id: UUID, + snippet_indices: list[int] | tuple[int, ...], + embeddings: list[list[float]], + embedding_size: int = 1024, +) -> tuple[list[str], dict]: + """Embeds document snippets in the cozodb database. + + Parameters: + doc_id (UUID): The unique identifier for the document. + snippet_indices (list[int]): Indices of the snippets in the document. + embeddings (list[list[float]]): Embedding vectors for the snippets. + """ + + doc_id = str(doc_id) + + # Ensure the number of snippet indices matches the number of embeddings. + assert len(snippet_indices) == len(embeddings) + assert all(len(embedding) == embedding_size for embedding in embeddings) + assert min(snippet_indices) >= 0 + + # Ensure all embeddings are non-zero. + assert all(sum(embedding) for embedding in embeddings) + + # Create a list of records to update the document snippet embeddings in the database. + records = [ + {"doc_id": doc_id, "index": snippet_idx, "embedding": embedding} + for snippet_idx, embedding in zip(snippet_indices, embeddings) + ] + + cols, vals = cozo_process_mutate_data(records) + + # Ensure that index is present in the records. + check_indices_query = f""" + ?[index] := + *snippets {{ + doc_id: $doc_id, + index, + }}, + index > {max(snippet_indices)} + + :assert none + """ + + # Define the datalog query for updating document snippet embeddings in the database. + embed_query = f""" + ?[{cols}] <- $vals + + :update snippets {{ {cols} }} + :returning + """ + + queries = [ + verify_developer_id_query(developer_id), + check_indices_query, + embed_query, + ] + + return (queries, {"vals": vals, "doc_id": doc_id}) diff --git a/agents-api/agents_api/models/docs/get_doc.py b/agents-api/agents_api/models/docs/get_doc.py new file mode 100644 index 000000000..84cd181ec --- /dev/null +++ b/agents-api/agents_api/models/docs/get_doc.py @@ -0,0 +1,95 @@ +"""Module for retrieving document snippets from the CozoDB based on document IDs.""" + +from typing import Any, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ...autogen.openapi_model import Doc +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + lambda e: isinstance(e, AssertionError) + and "Expected one result" in repr(e): partialclass( + HTTPException, status_code=404 + ), + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + Doc, + one=True, + transform=lambda d: { + "content": [s[1] for s in sorted(d["snippet_data"], key=lambda x: x[0])], + **d, + }, +) +@cozo_query +@beartype +def get_doc( + *, + developer_id: UUID, + doc_id: UUID, +) -> tuple[list[str], dict]: + """ + Retrieves snippets of documents by their ID from the CozoDB. + + Parameters: + doc_id (UUID): The unique identifier of the document. + client (CozoClient, optional): The CozoDB client instance. Defaults to a pre-configured client. + + Returns: + pd.DataFrame: A DataFrame containing the document snippets and related metadata. + """ + + doc_id = str(doc_id) + + get_query = """ + input[doc_id] <- [[to_uuid($doc_id)]] + snippets[collect(snippet_data)] := + input[doc_id], + *snippets { + doc_id, + index, + content, + }, + snippet_data = [index, content] + + ?[ + id, + title, + snippet_data, + created_at, + metadata, + ] := input[id], + *docs { + doc_id: id, + title, + created_at, + metadata, + }, + snippets[snippet_data] + """ + + queries = [ + verify_developer_id_query(developer_id), + get_query, + ] + + return (queries, {"doc_id": doc_id}) diff --git a/agents-api/agents_api/models/docs/get_docs.py b/agents-api/agents_api/models/docs/get_docs.py deleted file mode 100644 index 862b17a0c..000000000 --- a/agents-api/agents_api/models/docs/get_docs.py +++ /dev/null @@ -1,60 +0,0 @@ -"""Module for retrieving document snippets from the CozoDB based on document IDs.""" - -from typing import Literal -from uuid import UUID - -from beartype import beartype - -from ..utils import cozo_query - - -@cozo_query -@beartype -def get_docs_snippets_by_id_query( - owner_type: Literal["user", "agent"], - doc_id: UUID, -) -> tuple[str, dict]: - """ - Retrieves snippets of documents by their ID from the CozoDB. - - Parameters: - owner_type (Literal["user", "agent"]): The type of the owner of the document. - doc_id (UUID): The unique identifier of the document. - client (CozoClient, optional): The CozoDB client instance. Defaults to a pre-configured client. - - Returns: - pd.DataFrame: A DataFrame containing the document snippets and related metadata. - """ - - doc_id = str(doc_id) - - query = f""" - {{ - input[doc_id] <- [[to_uuid($doc_id)]] - - ?[ - {owner_type}_id, - doc_id, - title, - snippet, - snippet_idx, - created_at, - embed_instruction, - metadata, - ] := input[doc_id], - *{owner_type}_docs {{ - {owner_type}_id, - doc_id, - created_at, - metadata, - }}, - *information_snippets {{ - doc_id, - snippet_idx, - title, - snippet, - embed_instruction, - }} - }}""" - - return (query, {"doc_id": doc_id}) diff --git a/agents-api/agents_api/models/docs/list_docs.py b/agents-api/agents_api/models/docs/list_docs.py index 576c4a7f6..afdf06c2d 100644 --- a/agents-api/agents_api/models/docs/list_docs.py +++ b/agents-api/agents_api/models/docs/list_docs.py @@ -1,48 +1,56 @@ """This module contains functions for querying document-related data from the 'cozodb' database using datalog queries.""" import json -from typing import Any, Literal +from typing import Any, Literal, TypeVar from uuid import UUID from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError -from ..utils import cozo_query +from ...autogen.openapi_model import Doc +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + verify_developer_owns_resource_query, + wrap_in_class, +) - -@cozo_query -@beartype -def ensure_owner_exists_query( - owner_type: Literal["user", "agent"], - owner_id: UUID, -) -> tuple[str, dict]: - owner_id = str(owner_id) - - # Query to check if an owner (user or agent) exists in the database - query = f"""{{ - # Convert owner_id to UUID and set as input - input[{owner_type}_id] <- [[to_uuid($owner_id)]] - - # Retrieve owner_id if it exists in the database - ?[ - {owner_type}_id, - ] := input[{owner_type}_id], - *{owner_type}s {{ - {owner_type}_id, - }} - }}""" - - return (query, {"owner_id": owner_id}) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + Doc, + transform=lambda d: { + "content": [s[1] for s in sorted(d["snippet_data"], key=lambda x: x[0])], + **d, + }, +) @cozo_query @beartype -def list_docs_snippets_by_owner_query( +def list_docs( + *, + developer_id: UUID, owner_type: Literal["user", "agent"], owner_id: UUID, + limit: int = 100, + offset: int = 0, + sort_by: Literal["created_at"] = "created_at", + direction: Literal["asc", "desc"] = "desc", metadata_filter: dict[str, Any] = {}, -) -> tuple[str, dict]: - owner_id = str(owner_id) - +) -> tuple[list[str], dict]: + # Transforms the metadata_filter dictionary into a string representation for the datalog query. metadata_filter_str = ", ".join( [ f"metadata->{json.dumps(k)} == {json.dumps(v)}" @@ -50,35 +58,58 @@ def list_docs_snippets_by_owner_query( ] ) - # Query to retrieve document snippets by owner (user or agent) - query = f""" - {{ - # Convert owner_id to UUID and set as input - input[{owner_type}_id] <- [[to_uuid($owner_id)]] + owner_id = str(owner_id) + sort = f"{'-' if direction == 'desc' else ''}{sort_by}" + + get_query = f""" + snippets[id, collect(snippet_data)] := + *snippets {{ + doc_id: id, + index, + content, + }}, + snippet_data = [index, content] - # Retrieve documents and snippets associated with the owner ?[ - {owner_type}_id, - doc_id, + owner_type, + id, title, - snippet, - snippet_idx, + snippet_data, created_at, metadata, - ] := input[{owner_type}_id], - *{owner_type}_docs {{ - {owner_type}_id, - doc_id, + ] := + owner_type = $owner_type, + owner_id = to_uuid($owner_id), + *docs {{ + owner_type, + owner_id, + doc_id: id, + title, created_at, metadata, }}, - *information_snippets {{ - doc_id, - snippet_idx, - title, - snippet, - }}, - {metadata_filter_str} - }}""" + snippets[id, snippet_data] + + :limit $limit + :offset $offset + :sort {sort} + """ - return (query, {"owner_id": owner_id}) + queries = [ + verify_developer_id_query(developer_id), + verify_developer_owns_resource_query( + developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id} + ), + get_query, + ] + + return ( + queries, + { + "owner_id": owner_id, + "owner_type": owner_type, + "limit": limit, + "offset": offset, + "metadata_filter": metadata_filter_str, + }, + ) diff --git a/agents-api/agents_api/models/docs/search_docs.py b/agents-api/agents_api/models/docs/search_docs.py deleted file mode 100644 index 7c2ad90fa..000000000 --- a/agents-api/agents_api/models/docs/search_docs.py +++ /dev/null @@ -1,92 +0,0 @@ -"""This module contains functions for searching documents in the CozoDB based on embedding queries.""" - -from typing import Literal -from uuid import UUID - -from beartype import beartype - -from ..utils import cozo_query - - -@cozo_query -@beartype -def search_docs_snippets_by_embedding_query( - owner_type: Literal["user", "agent"], - owner_id: UUID, - query_embedding: list[float], - k: int = 3, - confidence: float = 0.8, -) -> tuple[str, dict]: - """ - Searches for document snippets in CozoDB by embedding query. - - Parameters: - - owner_type (Literal["user", "agent"]): The type of the owner of the documents. - - owner_id (UUID): The unique identifier of the owner. - - query_embedding (list[float]): The embedding vector of the query. - - k (int, optional): The number of nearest neighbors to retrieve. Defaults to 3. - - confidence (float, optional): The confidence threshold for filtering results. Defaults to 0.8. - - Returns: - - pd.DataFrame: A DataFrame containing the search results. - """ - - owner_id = str(owner_id) - # Calculate the search radius based on confidence level - radius: float = 1.0 - confidence - - # Construct the datalog query for searching document snippets - query = f""" - {{ - input[ - {owner_type}_id, - query_embedding, - ] <- [[ - to_uuid($owner_id), - vec($query_embedding), - ]] - - candidate[ - doc_id - ] := input[{owner_type}_id, _], - *{owner_type}_docs {{ - {owner_type}_id, - doc_id - }} - - ?[ - {owner_type}_id, - doc_id, - title, - snippet, - snippet_idx, - distance, - vector, - ] := input[{owner_type}_id, query_embedding], - candidate[doc_id], - ~information_snippets:embedding_space {{ - doc_id, - snippet_idx, - title, - snippet | - query: query_embedding, - k: $k, - ef: 128, - radius: $radius, - bind_distance: distance, - bind_vector: vector, - }} - - # Sort the results by distance to find the closest matches - :sort distance - }}""" - - return ( - query, - { - "owner_id": owner_id, - "query_embedding": query_embedding, - "k": k, - "radius": radius, - }, - ) diff --git a/agents-api/agents_api/models/docs/search_docs_by_embedding.py b/agents-api/agents_api/models/docs/search_docs_by_embedding.py new file mode 100644 index 000000000..acebd09cd --- /dev/null +++ b/agents-api/agents_api/models/docs/search_docs_by_embedding.py @@ -0,0 +1,252 @@ +"""This module contains functions for searching documents in the CozoDB based on embedding queries.""" + +from typing import Any, Literal, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ...autogen.openapi_model import DocReference +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + verify_developer_owns_resource_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + DocReference, + transform=lambda d: { + "owner": { + "id": d["owner_id"], + "role": d["owner_type"], + }, + **d, + }, +) +@cozo_query +@beartype +def search_docs_by_embedding( + *, + developer_id: UUID, + owners: list[tuple[Literal["user", "agent"], UUID]], + query_embedding: list[float], + k: int = 3, + confidence: float = 0.7, + ef: int = 128, + mmr_lambda: float = 0.25, + embedding_size: int = 1024, +) -> tuple[list[str], dict]: + """ + Searches for document snippets in CozoDB by embedding query. + + Parameters: + - owner_type (Literal["user", "agent"]): The type of the owner of the documents. + - owner_id (UUID): The unique identifier of the owner. + - query_embedding (list[float]): The embedding vector of the query. + - k (int, optional): The number of nearest neighbors to retrieve. Defaults to 3. + - confidence (float, optional): The confidence threshold for filtering results. Defaults to 0.8. + - mmr_lambda (float, optional): The lambda parameter for MMR. Defaults to 0.25. + """ + + assert len(query_embedding) == embedding_size + assert sum(query_embedding) + + owners: list[list[str]] = [ + [owner_type, str(owner_id)] for owner_type, owner_id in owners + ] + + # Calculate the search radius based on confidence level + radius: float = 1.0 - confidence + + # Construct the datalog query for searching document snippets + interim_query = f""" + owners[owner_type, owner_id] <- $owners + input[ + owner_type, + owner_id, + query_embedding, + ] := + owners[owner_type, owner_id_str], + owner_id = to_uuid(owner_id_str), + query_embedding = vec($query_embedding) + + candidate[doc_id] := + input[owner_type, owner_id, _], + *docs {{ + owner_type, + owner_id, + doc_id + }} + + intersnippet_distance[ + doc_id, + index1, + min(dist) + ] := + *snippets {{ + doc_id, + index: index1, + embedding: embedding1 + }}, + *snippets {{ + doc_id, + index: index2, + embedding: embedding2 + }}, + index1 < index2, + dist = cos_dist(embedding1, embedding2) + + doclength[doc_id, max(index)] := + *snippets {{ + doc_id, + index, + }} + + get_intersnippet[doc_id, index, distance] := + intersnippet_distance[doc_id, _, distance] + + get_intersnippet[doc_id, index, distance] := + not intersnippet_distance[doc_id, _, distance], + distance = 0.0 + + search_result[ + doc_id, + content, + index, + distance, + ] := + input[_, __, query], + candidate[doc_id], + ~snippets:embedding_space {{ + doc_id, + index, + content + | + query: query, + k: {k*2}, + ef: {ef}, + radius: {radius}, + bind_distance: distance, + }} + + apply_mmr[ + doc_id, + snippet_data, + distance, + mmr_score, + ] := + search_result[doc_id, content, index, distance], + get_intersnippet[doc_id, index, intersnippet_distance], + mmr_score = {mmr_lambda} * (distance - (1.0 - {mmr_lambda}) * intersnippet_distance), + snippet_data = [index, content] + + ?[ + owner_type, + owner_id, + doc_id, + snippet_data, + distance, + mmr_score, + title, + ] := + *docs {{ + owner_type, + owner_id, + doc_id, + title, + }}, + apply_mmr[ + doc_id, + snippet_data, + distance, + mmr_score, + ] + + # Sort the results by distance to find the closest matches + :sort -mmr_score + :limit {k} + + :create _interim {{ + owner_type, + owner_id, + doc_id, + snippet_data, + distance, + mmr_score, + title, + }} + """ + + collect_query = """ + m[ + doc_id, + owner_type, + owner_id, + collect(snippet), + distance, + title, + ] := + *_interim { + owner_type, + owner_id, + doc_id, + snippet_data, + distance, + title, + }, snippet = { + "index": snippet_data->0, + "content": snippet_data->1, + } + + ?[ + id, + owner_type, + owner_id, + snippets, + distance, + title, + ] := m[ + id, + owner_type, + owner_id, + snippets, + distance, + title, + ] + """ + + queries = [ + verify_developer_id_query(developer_id), + *[ + verify_developer_owns_resource_query( + developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id} + ) + for owner_type, owner_id in owners + ], + interim_query, + collect_query, + ] + + return ( + queries, + { + "owners": owners, + "query_embedding": query_embedding, + }, + ) diff --git a/agents-api/agents_api/models/docs/search_docs_by_text.py b/agents-api/agents_api/models/docs/search_docs_by_text.py new file mode 100644 index 000000000..0662aa84d --- /dev/null +++ b/agents-api/agents_api/models/docs/search_docs_by_text.py @@ -0,0 +1,181 @@ +"""This module contains functions for searching documents in the CozoDB based on embedding queries.""" + +from typing import Any, Literal, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ...autogen.openapi_model import DocReference +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + verify_developer_owns_resource_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + DocReference, + transform=lambda d: { + "owner": { + "id": d["owner_id"], + "role": d["owner_type"], + }, + **d, + }, +) +@cozo_query +@beartype +def search_docs_by_text( + *, + developer_id: UUID, + owners: list[tuple[Literal["user", "agent"], UUID]], + query: str, + k: int = 3, +) -> tuple[list[str], dict]: + """ + Searches for document snippets in CozoDB by embedding query. + + Parameters: + - owners (list[tuple[Literal["user", "agent"], UUID]]): The type of the owner of the documents. + - query (str): The query string. + - k (int, optional): The number of nearest neighbors to retrieve. Defaults to 3. + """ + + owners: list[list[str]] = [ + [owner_type, str(owner_id)] for owner_type, owner_id in owners + ] + + # Construct the datalog query for searching document snippets + search_query = f""" + owners[owner_type, owner_id] <- $owners + input[ + owner_type, + owner_id, + ] := + owners[owner_type, owner_id_str], + owner_id = to_uuid(owner_id_str) + + candidate[doc_id] := + input[owner_type, owner_id], + *docs {{ + owner_type, + owner_id, + doc_id + }} + + search_result[ + doc_id, + snippet_data, + distance, + ] := + candidate[doc_id], + ~snippets:lsh {{ + doc_id, + index, + content + | + query: $query, + k: {k}, + }}, + distance = 10000000, # Very large distance to depict no valid distance + snippet_data = [index, content] + + search_result[ + doc_id, + snippet_data, + distance, + ] := + candidate[doc_id], + ~snippets:fts {{ + doc_id, + index, + content + | + query: $query, + k: {k}, + score_kind: 'tf_idf', + bind_score: score, + }}, + distance = -score, + snippet_data = [index, content] + + m[ + doc_id, + collect(snippet), + distance, + title, + owner_type, + owner_id, + ] := + candidate[doc_id], + *docs {{ + owner_type, + owner_id, + doc_id, + title, + }}, + search_result [ + doc_id, + snippet_data, + distance, + ], + snippet = {{ + "index": snippet_data->0, + "content": snippet_data->1, + }} + + + ?[ + id, + owner_type, + owner_id, + snippets, + distance, + title, + ] := + input[owner_type, owner_id], + m[ + id, + snippets, + distance, + title, + owner_type, + owner_id, + ] + + # Sort the results by distance to find the closest matches + :sort distance + :limit {k} + """ + + queries = [ + verify_developer_id_query(developer_id), + *[ + verify_developer_owns_resource_query( + developer_id, f"{owner_type}s", **{f"{owner_type}_id": owner_id} + ) + for owner_type, owner_id in owners + ], + search_query, + ] + + return ( + queries, + {"owners": owners, "query": query}, + ) diff --git a/agents-api/agents_api/models/docs/search_docs_hybrid.py b/agents-api/agents_api/models/docs/search_docs_hybrid.py new file mode 100644 index 000000000..03fb44037 --- /dev/null +++ b/agents-api/agents_api/models/docs/search_docs_hybrid.py @@ -0,0 +1,123 @@ +"""This module contains functions for searching documents in the CozoDB based on embedding queries.""" + +from statistics import mean, stdev +from typing import Literal +from uuid import UUID + +from beartype import beartype + +from ...autogen.openapi_model import DocReference +from .search_docs_by_embedding import search_docs_by_embedding +from .search_docs_by_text import search_docs_by_text + + +# Distribution based score normalization +# https://medium.com/plain-simple-software/distribution-based-score-fusion-dbsf-a-new-approach-to-vector-search-ranking-f87c37488b18 +def dbsf_normalize(scores: list[float]) -> list[float]: + """ + Scores scaled using minmax scaler with our custom feature range + (extremes indicated as 3 standard deviations from the mean) + """ + if len(scores) < 2: + return scores + + sd = stdev(scores) + if sd == 0: + return scores + + m = mean(scores) + m3d = 3 * sd + m + m_3d = m - 3 * sd + + return [(s - m_3d) / (m3d - m_3d) for s in scores] + + +def dbsf_fuse( + text_results: list[DocReference], + embedding_results: list[DocReference], + alpha: float = 0.7, # Weight of the embedding search results (this is a good default) +) -> list[DocReference]: + """ + Weighted reciprocal-rank fusion of text and embedding search results + """ + all_docs = {doc.id: doc for doc in text_results + embedding_results} + + assert all(doc.distance is not None in all_docs for doc in text_results) + + text_scores: dict[UUID, float] = { + doc.id: -(doc.distance or 0.0) for doc in text_results + } + + # Because these are cosine distances, we need to invert them + embedding_scores: dict[UUID, float] = { + doc.id: 1.0 - doc.distance for doc in embedding_results + } + + # normalize the scores + text_scores_normalized = dbsf_normalize(list(text_scores.values())) + text_scores = { + doc_id: score + for doc_id, score in zip(text_scores.keys(), text_scores_normalized) + } + + embedding_scores_normalized = dbsf_normalize(list(embedding_scores.values())) + embedding_scores = { + doc_id: score + for doc_id, score in zip(embedding_scores.keys(), embedding_scores_normalized) + } + + # Combine the scores + text_weight: float = 1 - alpha + embedding_weight: float = alpha + + combined_scores = [] + + for id in all_docs.keys(): + text_score = text_weight * text_scores.get(id, 0) + embedding_score = embedding_weight * embedding_scores.get(id, 0) + + combined_scores.append((id, text_score + embedding_score)) + + # Sort by the combined score + combined_scores = sorted(combined_scores, key=lambda x: x[1], reverse=True) + + # Rank the results + ranked_results = [] + for id, score in combined_scores: + doc = all_docs[id].model_copy() + doc.distance = 1.0 - score + ranked_results.append(doc) + + return ranked_results + + +@beartype +def search_docs_hybrid( + *, + developer_id: UUID, + owners: list[tuple[Literal["user", "agent"], UUID]], + query: str, + query_embedding: list[float], + k: int = 3, + alpha: float = 0.7, # Weight of the embedding search results (this is a good default) + embed_search_options: dict = {}, + text_search_options: dict = {}, +) -> list[DocReference]: + # TODO: We should probably parallelize these queries + text_results = search_docs_by_text( + developer_id=developer_id, + owners=owners, + query=query, + k=2 * k, + **text_search_options, + ) + + embedding_results = search_docs_by_embedding( + developer_id=developer_id, + owners=owners, + query_embedding=query_embedding, + k=2 * k, + **embed_search_options, + ) + + return dbsf_fuse(text_results, embedding_results, alpha)[:k] diff --git a/agents-api/agents_api/models/docs/test_docs_queries.py b/agents-api/agents_api/models/docs/test_docs_queries.py deleted file mode 100644 index 6b5528b40..000000000 --- a/agents-api/agents_api/models/docs/test_docs_queries.py +++ /dev/null @@ -1,158 +0,0 @@ -# # Tests for entry queries -# from uuid import uuid4 - -# from cozo_migrate.api import init, apply -# from pycozo import Client -# from ward import test - - -# from .create_docs import create_docs_query -# from .delete_docs import delete_docs_by_id_query -# from .get_docs import get_docs_snippets_by_id_query -# from .list_docs import list_docs_snippets_by_owner_query -# from .embed_docs import embed_docs_snippets_query -# from .search_docs import search_docs_snippets_by_embedding_query - - -# EMBEDDING_SIZE: int = 1024 - - -# def cozo_client(migrations_dir: str = "./migrations"): -# # Create a new client for each test -# # and initialize the schema. -# client = Client() - -# init(client) -# apply(client, migrations_dir=migrations_dir, all_=True) - -# return client - - -# @test("model: create docs") -# def _(): -# client = cozo_client() - -# for owner_type in ("user", "agent"): -# owner_id = uuid4() -# id = uuid4() - -# result = create_docs_query( -# owner_type, owner_id, id, title="Hello", content="World", client=client -# ) - -# assert result["created_at"][0] - - -# @test("model: get docs") -# def _(): -# client = cozo_client() - -# for owner_type in ("user", "agent"): -# owner_id = uuid4() -# id = uuid4() - -# create_docs_query( -# owner_type, owner_id, id, title="Hello", content="World", client=client -# ) - -# result = get_docs_snippets_by_id_query(owner_type, id, client=client) - -# assert len(result) == 1, "Only 1 should have been found" - - -# @test("model: delete docs") -# def _(): -# client = cozo_client() - -# for owner_type in ("user", "agent"): -# owner_id = uuid4() -# id = uuid4() - -# create_docs_query( -# owner_type, owner_id, id, title="Hello", content="World", client=client -# ) - -# result = delete_docs_by_id_query(owner_type, owner_id, id, client=client) - -# delete_info = next( -# (row for row in result.to_dict("records") if row["_kind"] == "deleted"), -# None, -# ) - -# assert delete_info is not None, "Delete operation found the row" - - -# @test("model: list docs") -# def _(): -# client = cozo_client() - -# for owner_type in ("user", "agent"): -# owner_id = uuid4() -# id = uuid4() - -# create_docs_query( -# owner_type, owner_id, id, title="Hello", content="World", client=client -# ) - -# result = list_docs_snippets_by_owner_query(owner_type, owner_id, client=client) - -# assert len(result) == 1, "Only 1 should have been found" - - -# @test("model: search docs") -# def _(): -# client = cozo_client() - -# for owner_type in ("user", "agent"): -# owner_id = uuid4() -# id = uuid4() - -# create_docs_query( -# owner_type, owner_id, id, title="Hello", content="World", client=client -# ) - -# ### Add embedding to the snippet -# client.update( -# "information_snippets", -# dict(doc_id=str(id), snippet_idx=0, embedding=[1.0] * EMBEDDING_SIZE), -# ) - -# ### Search -# query_embedding = [0.99] * EMBEDDING_SIZE - -# result = search_docs_snippets_by_embedding_query( -# owner_type, owner_id, query_embedding, client=client -# ) - -# assert len(result) == 1, "Only 1 should have been found" - - -# @test("model: embed docs") -# def _(): -# client = cozo_client() - -# for owner_type in ("user", "agent"): -# owner_id = uuid4() -# id = uuid4() - -# snippets = [ -# "Hello World", -# "Hello Banana", -# "Hello Apple", -# ] - -# create_docs_query( -# owner_type, -# owner_id, -# id, -# title="Hi", -# content=snippets, -# client=client, -# ) - -# ### Add embedding to the snippet -# snippet_indices = [*range(len(snippets))] - -# embeddings = [[1.0] * EMBEDDING_SIZE for _ in snippets] - -# embed_docs_snippets_query(id, snippet_indices, embeddings, client=client) diff --git a/agents-api/agents_api/models/entry/__init__.py b/agents-api/agents_api/models/entry/__init__.py index 3ba31b722..32231c364 100644 --- a/agents-api/agents_api/models/entry/__init__.py +++ b/agents-api/agents_api/models/entry/__init__.py @@ -10,3 +10,10 @@ The module utilizes pandas DataFrames for handling query results and integrates with the CozoClient for database operations, ensuring efficient and effective management of entries. """ + +# ruff: noqa: F401, F403, F405 + +from .create_entries import create_entries +from .delete_entries import delete_entries +from .get_history import get_history +from .list_entries import list_entries diff --git a/agents-api/agents_api/models/entry/create_entries.py b/agents-api/agents_api/models/entry/create_entries.py index 7e582414e..e227714d1 100644 --- a/agents-api/agents_api/models/entry/create_entries.py +++ b/agents-api/agents_api/models/entry/create_entries.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID, uuid4 from beartype import beartype @@ -11,6 +12,7 @@ from ...common.utils.messages import content_to_json from ..utils import ( cozo_query, + mark_session_updated_query, partialclass, rewrap_exceptions, verify_developer_id_query, @@ -18,6 +20,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -32,6 +37,7 @@ "id": UUID(d.pop("entry_id")), **d, }, + _kind="inserted", ) @cozo_query @beartype @@ -40,11 +46,12 @@ def create_entries( developer_id: UUID, session_id: UUID, data: list[CreateEntryRequest], + mark_session_as_updated: bool = True, ) -> tuple[list[str], dict]: developer_id = str(developer_id) session_id = str(session_id) - data_dicts = [item.model_dump() for item in data] + data_dicts = [item.model_dump(exclude_unset=True) for item in data] for item in data_dicts: item["content"] = content_to_json(item["content"]) @@ -71,6 +78,9 @@ def create_entries( verify_developer_owns_resource_query( developer_id, "sessions", session_id=session_id ), + mark_session_updated_query(developer_id, session_id) + if mark_session_as_updated + else "", create_query, ] @@ -84,7 +94,7 @@ def create_entries( TypeError: partialclass(HTTPException, status_code=400), } ) -@wrap_in_class(Relation) +@wrap_in_class(Relation, _kind="inserted") @cozo_query @beartype def add_entry_relations( diff --git a/agents-api/agents_api/models/entry/delete_entries.py b/agents-api/agents_api/models/entry/delete_entries.py index a156275b0..48c37cd25 100644 --- a/agents-api/agents_api/models/entry/delete_entries.py +++ b/agents-api/agents_api/models/entry/delete_entries.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -9,6 +10,7 @@ from ...common.utils.datetime import utcnow from ..utils import ( cozo_query, + mark_session_updated_query, partialclass, rewrap_exceptions, verify_developer_id_query, @@ -16,6 +18,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -33,11 +38,12 @@ "deleted_at": utcnow(), "jobs": [], }, + _kind="deleted", ) @cozo_query @beartype def delete_entries_for_session( - *, developer_id: UUID, session_id: UUID + *, developer_id: UUID, session_id: UUID, mark_session_as_updated: bool = True ) -> tuple[list[str], dict]: """ Constructs and returns a datalog query for deleting entries associated with a given session ID from the 'cozodb' database. @@ -79,6 +85,9 @@ def delete_entries_for_session( verify_developer_owns_resource_query( developer_id, "sessions", session_id=session_id ), + mark_session_updated_query(developer_id, session_id) + if mark_session_as_updated + else "", delete_query, ] diff --git a/agents-api/agents_api/models/entry/get_history.py b/agents-api/agents_api/models/entry/get_history.py index 49eb7b929..68fe05979 100644 --- a/agents-api/agents_api/models/entry/get_history.py +++ b/agents-api/agents_api/models/entry/get_history.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +17,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -62,6 +66,7 @@ def get_history( content, source, token_count, + tokenizer, created_at, timestamp, }, @@ -75,6 +80,7 @@ def get_history( "content": content, "source": source, "token_count": token_count, + "tokenizer": tokenizer, "created_at": created_at, "timestamp": timestamp } diff --git a/agents-api/agents_api/models/entry/list_entries.py b/agents-api/agents_api/models/entry/list_entries.py index 0c47d9a74..d3081a9b0 100644 --- a/agents-api/agents_api/models/entry/list_entries.py +++ b/agents-api/agents_api/models/entry/list_entries.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Any, Literal, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +16,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -65,6 +68,7 @@ def list_entries( content, source, token_count, + tokenizer, created_at, timestamp, ] := *entries {{ @@ -75,6 +79,7 @@ def list_entries( content, source, token_count, + tokenizer, created_at, timestamp, }}, diff --git a/agents-api/agents_api/models/entry/test_entry_queries.py b/agents-api/agents_api/models/entry/test_entry_queries.py deleted file mode 100644 index d4549898d..000000000 --- a/agents-api/agents_api/models/entry/test_entry_queries.py +++ /dev/null @@ -1,192 +0,0 @@ -# """ -# This module contains tests for entry queries against the CozoDB database. -# It verifies the functionality of adding, retrieving, and processing entries as defined in the schema. -# """ - -# # Tests for entry queries -# from uuid import uuid4 - -# from cozo_migrate.api import init, apply -# from pycozo import Client -# from ward import test - -# from ...autogen.openapi_model import FunctionDef -# from ...common.protocol.entries import Entry -# from ..docs.create_docs import create_docs_query -# from ..docs.embed_docs import embed_docs_snippets_query -# from ..agent.create_agent import create_agent_query -# from ..session.create_session import create_session_query -# from ..tools.create_tools import create_function_query -# from ..tools.embed_tools import embed_functions_query -# from ..user.create_user import create_user_query -# from .add_entries import add_entries_query -# from .get_entries import get_entries_query -# from .proc_mem_context import proc_mem_context_query - -# MODEL = "julep-ai/samantha-1-turbo" - - -# # Initializes a new CozoDB client for testing, applying all migrations. -# def cozo_client(migrations_dir: str = "./migrations"): -# # Create a new client for each test -# # and initialize the schema. -# client = Client() - -# init(client) -# apply(client, migrations_dir=migrations_dir, all_=True) - -# return client - - -# @test("model: create entry") -# def _(): -# """ -# Tests the addition of a new entry to the database. -# Verifies that the entry can be successfully added using the add_entries_query function. -# """ -# client = cozo_client() -# session_id = uuid4() - -# test_entry = Entry( -# session_id=session_id, -# role="user", -# content="test entry content", -# ) - -# add_entries_query(entries=[test_entry], client=client) - - -# @test("model: get entries") -# def _(): -# """ -# Tests the retrieval of entries from the database. -# Verifies that entries matching specific criteria can be successfully retrieved. -# """ -# client = cozo_client() -# session_id = uuid4() - -# test_entry = Entry( -# session_id=session_id, -# role="user", -# content="test entry content", -# ) - -# internal_entry = Entry( -# session_id=session_id, -# role="user", -# content="test entry content", -# source="internal", -# ) - -# add_entries_query(entries=[test_entry, internal_entry], client=client) - -# result = get_entries_query(session_id=session_id, client=client) - -# # Asserts that only one entry is retrieved, matching the session_id. -# assert len(result["entry_id"]) == 1 - - -# @test("model: procedural memory context") -# def _(): -# """ -# Tests the procedural memory context in the database. -# Verifies the functionality of retrieving relevant memory context based on embeddings. -# """ -# client = cozo_client() -# developer_id = uuid4() -# user_id = uuid4() -# agent_id = uuid4() -# session_id = uuid4() -# tool_id = uuid4() -# user_doc_id = uuid4() -# agent_doc_id = uuid4() - -# # Setup: Creates a user, agent, session, function, and documents, then embeds tools and document snippets. -# # Create stuff -# test_entry = Entry( -# session_id=session_id, -# role="user", -# content="test entry content", -# source="api_request", -# ) - -# test_instruction1 = "test instruction" -# test_instruction2 = "test instruction" -# test_function = FunctionDef( -# name="test function", -# description="test function description", -# parameters={"type": "object", "properties": {}}, -# ) - -# test_user_doc = "test user doc" -# test_agent_doc = "test agent doc" - -# [ -# add_entries_query(entries=[test_entry], client=client), -# create_user_query( -# user_id=user_id, -# developer_id=developer_id, -# name="test user", -# about="test user about", -# client=client, -# ), -# create_agent_query( -# agent_id=agent_id, -# model=MODEL, -# developer_id=developer_id, -# name="test agent", -# about="test agent about", -# instructions=[test_instruction1, test_instruction2], -# client=client, -# ), -# create_session_query( -# developer_id=developer_id, -# session_id=session_id, -# user_id=user_id, -# agent_id=agent_id, -# situation="test situation", -# client=client, -# ), -# create_function_query( -# agent_id=agent_id, id=tool_id, function=test_function, client=client -# ), -# create_docs_query( -# owner_type="agent", -# owner_id=agent_id, -# id=agent_doc_id, -# title=test_agent_doc, -# content=test_agent_doc, -# client=client, -# ), -# create_docs_query( -# owner_type="user", -# owner_id=user_id, -# id=user_doc_id, -# title=test_user_doc, -# content=test_user_doc, -# client=client, -# ), -# embed_functions_query( -# agent_id=agent_id, -# tool_ids=[tool_id], -# embeddings=[[1.0] * 768], -# client=client, -# ), -# embed_docs_snippets_query( -# agent_doc_id, snippet_indices=[0], embeddings=[[1.0] * 1024], client=client -# ), -# embed_docs_snippets_query( -# user_doc_id, snippet_indices=[0], embeddings=[[1.0] * 1024], client=client -# ), -# ] - -# # Executes the procedural memory context query to retrieve relevant memory context based on embeddings. -# # Run the query -# result = proc_mem_context_query( -# session_id=session_id, -# tool_query_embedding=[0.9] * 768, -# doc_query_embedding=[0.9] * 1024, -# client=client, -# ) - -# assert len(result) == 8 diff --git a/agents-api/agents_api/models/execution/__init__.py b/agents-api/agents_api/models/execution/__init__.py index e69de29bb..1e4bd85be 100644 --- a/agents-api/agents_api/models/execution/__init__.py +++ b/agents-api/agents_api/models/execution/__init__.py @@ -0,0 +1,10 @@ +# ruff: noqa: F401, F403, F405 + +from .create_execution import create_execution +from .create_execution_transition import create_execution_transition +from .get_execution import get_execution +from .get_execution_transition import get_execution_transition +from .list_execution_transitions import list_execution_transitions +from .list_executions import list_executions +from .prepare_execution_input import prepare_execution_input +from .update_execution import update_execution diff --git a/agents-api/agents_api/models/execution/create_execution.py b/agents-api/agents_api/models/execution/create_execution.py index 959bb92de..b4918065b 100644 --- a/agents-api/agents_api/models/execution/create_execution.py +++ b/agents-api/agents_api/models/execution/create_execution.py @@ -1,3 +1,4 @@ +from typing import Annotated, Any, TypeVar from uuid import UUID, uuid4 from beartype import beartype @@ -7,6 +8,7 @@ from ...autogen.openapi_model import CreateExecutionRequest, Execution from ...common.utils.cozo import cozo_process_mutate_data +from ...common.utils.types import dict_like from ..utils import ( cozo_query, partialclass, @@ -16,6 +18,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -28,6 +33,7 @@ Execution, one=True, transform=lambda d: {"id": d["execution_id"], **d}, + _kind="inserted", ) @cozo_query @beartype @@ -36,7 +42,7 @@ def create_execution( developer_id: UUID, task_id: UUID, execution_id: UUID | None = None, - data: CreateExecutionRequest, + data: Annotated[CreateExecutionRequest | dict, dict_like(CreateExecutionRequest)], ) -> tuple[list[str], dict]: execution_id = execution_id or uuid4() @@ -44,8 +50,12 @@ def create_execution( task_id = str(task_id) execution_id = str(execution_id) - data.metadata = data.metadata or {} - execution_data = data.model_dump() + if isinstance(data, CreateExecutionRequest): + data.metadata = data.metadata or {} + execution_data = data.model_dump() + else: + data["metadata"] = data.get("metadata", {}) + execution_data = data columns, values = cozo_process_mutate_data( { diff --git a/agents-api/agents_api/models/execution/create_execution_transition.py b/agents-api/agents_api/models/execution/create_execution_transition.py index 606ca9406..88f29da0b 100644 --- a/agents-api/agents_api/models/execution/create_execution_transition.py +++ b/agents-api/agents_api/models/execution/create_execution_transition.py @@ -5,7 +5,12 @@ from pycozo.client import QueryException from pydantic import ValidationError -from ...autogen.openapi_model import CreateTransitionRequest, Transition +from ...autogen.openapi_model import ( + CreateTransitionRequest, + Transition, + UpdateExecutionRequest, +) +from ...common.protocol.tasks import transition_to_execution_status, valid_transitions from ...common.utils.cozo import cozo_process_mutate_data from ..utils import ( cozo_query, @@ -15,19 +20,24 @@ verify_developer_owns_resource_query, wrap_in_class, ) +from .update_execution import update_execution + + +def validate_transition_targets(data: CreateTransitionRequest) -> None: + # Make sure the current/next targets are valid + if data.type in ("finish", "error", "cancelled"): + assert data.next is None, "Next target must be None for finish/error/cancelled" + + if data.type in ("wait", "init"): + assert data.next is None, "Next target must be None for wait/init" -valid_transitions = { - # Start state - "init": ["wait", "error", "step", "cancelled"], - # End states - "finish": [], - "error": [], - "cancelled": [], - # Intermediate states - "wait": ["resume", "error", "cancelled"], - "resume": ["wait", "error", "step", "finish", "cancelled"], - "step": ["wait", "error", "step", "finish", "cancelled"], -} + if data.type in ("resume", "step"): + assert data.next is not None, "Next target must be provided for resume/step" + + if data.next.workflow == data.current.workflow: + assert ( + data.next.step > data.current.step + ), "Next step must be greater than current" @rewrap_exceptions( @@ -37,24 +47,51 @@ TypeError: partialclass(HTTPException, status_code=400), } ) -@wrap_in_class(Transition, transform=lambda d: {"id": d["transition_id"], **d}) +@wrap_in_class( + Transition, + transform=lambda d: { + **d, + "id": d["transition_id"], + "current": {"workflow": d["current"][0], "step": d["current"][1]}, + "next": d["next"] and {"workflow": d["next"][0], "step": d["next"][1]}, + }, + one=True, + _kind="inserted", +) @cozo_query @beartype def create_execution_transition( *, developer_id: UUID, execution_id: UUID, - transition_id: UUID | None = None, data: CreateTransitionRequest, + # Only one of these needed + transition_id: UUID | None = None, task_token: str | None = None, + # Only required for updating the execution status as well + update_execution_status: bool = False, + task_id: UUID | None = None, ) -> tuple[list[str], dict]: transition_id = transition_id or uuid4() data.metadata = data.metadata or {} data.execution_id = execution_id - transition_data = data.model_dump(exclude_unset=True) - columns, values = cozo_process_mutate_data( + # Prepare the transition data + transition_data = data.model_dump(exclude_unset=True, exclude={"id"}) + + # Parse the current and next targets + validate_transition_targets(data) + current_target = transition_data.pop("current") + next_target = transition_data.pop("next") + + transition_data["current"] = (current_target["workflow"], current_target["step"]) + transition_data["next"] = next_target and ( + next_target["workflow"], + next_target["step"], + ) + + columns, transition_values = cozo_process_mutate_data( { **transition_data, "task_token": task_token, @@ -76,17 +113,21 @@ def create_execution_transition( }}, type_created_at = [type, -created_at] - ?[last_type] := + matched[collect(last_type)] := last_transition_type[data], last_type_data = first(data), last_type = if(is_null(last_type_data), "init", last_type_data), valid_transition[last_type, $next_type] - :assert some + ?[valid] := + matched[prev_transitions], + found = length(prev_transitions), + valid = assert(found > 0, "Invalid transition"), """ + # Prepare the insert query insert_query = f""" - ?[{columns}] <- $values + ?[{columns}] <- $transition_values :insert transitions {{ {columns} @@ -95,6 +136,29 @@ def create_execution_transition( :returning """ + validate_status_query, update_execution_query, update_execution_params = ( + "", + "", + {}, + ) + + if update_execution_status: + assert ( + task_id is not None + ), "task_id is required for updating the execution status" + + # Prepare the execution update query + [*_, validate_status_query, update_execution_query], update_execution_params = ( + update_execution.__wrapped__( + developer_id=developer_id, + task_id=task_id, + execution_id=execution_id, + data=UpdateExecutionRequest( + status=transition_to_execution_status[data.type] + ), + ) + ) + queries = [ verify_developer_id_query(developer_id), verify_developer_owns_resource_query( @@ -103,6 +167,8 @@ def create_execution_transition( execution_id=execution_id, parents=[("agents", "agent_id"), ("tasks", "task_id")], ), + validate_status_query, + update_execution_query, check_last_transition_query, insert_query, ] @@ -110,8 +176,9 @@ def create_execution_transition( return ( queries, { - "values": values, + "transition_values": transition_values, "next_type": data.type, "valid_transitions": valid_transitions, + **update_execution_params, }, ) diff --git a/agents-api/agents_api/models/execution/create_temporal_lookup.py b/agents-api/agents_api/models/execution/create_temporal_lookup.py new file mode 100644 index 000000000..21c3005dd --- /dev/null +++ b/agents-api/agents_api/models/execution/create_temporal_lookup.py @@ -0,0 +1,73 @@ +from typing import TypeVar +from uuid import UUID, uuid4 + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError +from temporalio.client import WorkflowHandle + +from ...common.utils.cozo import cozo_process_mutate_data +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + verify_developer_owns_resource_query, +) + +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@cozo_query +@beartype +def create_temporal_lookup( + *, + developer_id: UUID, + task_id: UUID, + execution_id: UUID | None = None, + workflow_handle: WorkflowHandle, +) -> tuple[list[str], dict]: + execution_id = execution_id or uuid4() + + developer_id = str(developer_id) + task_id = str(task_id) + execution_id = str(execution_id) + + temporal_columns, temporal_values = cozo_process_mutate_data( + { + "execution_id": execution_id, + "id": workflow_handle.id, + "run_id": workflow_handle.run_id, + "first_execution_run_id": workflow_handle.first_execution_run_id, + "result_run_id": workflow_handle.result_run_id, + } + ) + + temporal_executions_lookup_query = f""" + ?[{temporal_columns}] <- $temporal_values + + :insert temporal_executions_lookup {{ + {temporal_columns} + }} + """ + + queries = [ + verify_developer_id_query(developer_id), + verify_developer_owns_resource_query( + developer_id, + "tasks", + task_id=task_id, + parents=[("agents", "agent_id")], + ), + temporal_executions_lookup_query, + ] + + return (queries, {"temporal_values": temporal_values}) diff --git a/agents-api/agents_api/models/execution/get_execution.py b/agents-api/agents_api/models/execution/get_execution.py index a0bbe550c..cf9df2e09 100644 --- a/agents-api/agents_api/models/execution/get_execution.py +++ b/agents-api/agents_api/models/execution/get_execution.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -13,6 +14,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/execution/get_execution_transition.py b/agents-api/agents_api/models/execution/get_execution_transition.py index 23ce46d40..d418ef5f4 100644 --- a/agents-api/agents_api/models/execution/get_execution_transition.py +++ b/agents-api/agents_api/models/execution/get_execution_transition.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -14,6 +15,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -37,9 +41,6 @@ def get_execution_transition( transition_id or task_token ), "At least one of `transition_id` or `task_token` must be provided." - fields = [k for k in Transition.model_fields.keys() if k != "id"] - fields_str = ", ".join(fields) - if transition_id: transition_id = str(transition_id) filter = "id = to_uuid($transition_id)" @@ -47,12 +48,24 @@ def get_execution_transition( else: filter = "task_token = $task_token" - get_query = f""" - ?[id, {fields_str}] := - *transitions {{ - transition_id: id, - {fields_str} - }}, + get_query = """ + ?[id, type, current, next, output, metadata, updated_at, created_at] := + *transitions { + transition_id: id, + type, + current: current_tuple, + next: next_tuple, + output, + metadata, + updated_at, + created_at, + }, + current = {"state": current_tuple->0, "step": current_tuple->1}, + next = if( + isnull(next_tuple), + null, + {"state": next_tuple->0, "step": next_tuple->1}, + ), """ get_query += filter diff --git a/agents-api/agents_api/models/execution/get_paused_execution_token.py b/agents-api/agents_api/models/execution/get_paused_execution_token.py new file mode 100644 index 000000000..d8b0945c1 --- /dev/null +++ b/agents-api/agents_api/models/execution/get_paused_execution_token.py @@ -0,0 +1,73 @@ +from typing import Any, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + AssertionError: partialclass(HTTPException, status_code=500), + } +) +@wrap_in_class(dict, one=True) +@cozo_query +@beartype +def get_paused_execution_token( + *, + developer_id: UUID, + execution_id: UUID, +) -> tuple[list[str], dict]: + execution_id = str(execution_id) + + check_status_query = """ + ?[execution_id, status] := + *executions { + execution_id, + status, + }, + execution_id = to_uuid($execution_id), + status = "awaiting_input" + + :assert some + """ + + get_query = """ + ?[task_token, max(created_at)] := + execution_id = to_uuid($execution_id), + *executions { + execution_id, + }, + *transitions { + execution_id, + created_at, + task_token, + type, + }, + type = "wait" + + """ + + queries = [ + verify_developer_id_query(developer_id), + check_status_query, + get_query, + ] + + return (queries, {"execution_id": execution_id}) diff --git a/agents-api/agents_api/models/execution/get_temporal_workflow_data.py b/agents-api/agents_api/models/execution/get_temporal_workflow_data.py new file mode 100644 index 000000000..bb0a462ef --- /dev/null +++ b/agents-api/agents_api/models/execution/get_temporal_workflow_data.py @@ -0,0 +1,55 @@ +from typing import Any, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class(dict, one=True) +@cozo_query +@beartype +def get_temporal_workflow_data( + *, + execution_id: UUID, +) -> tuple[str, dict]: + # Executions are allowed direct GET access if they have execution_id + + query = """ + input[execution_id] <- [[to_uuid($execution_id)]] + + ?[id, run_id, result_run_id, first_execution_run_id] := + input[execution_id], + *temporal_executions_lookup { + execution_id, + id, + run_id, + result_run_id, + first_execution_run_id, + } + """ + + return ( + query, + { + "execution_id": str(execution_id), + }, + ) diff --git a/agents-api/agents_api/models/execution/list_execution_transitions.py b/agents-api/agents_api/models/execution/list_execution_transitions.py index a36135f2d..45aca935a 100644 --- a/agents-api/agents_api/models/execution/list_execution_transitions.py +++ b/agents-api/agents_api/models/execution/list_execution_transitions.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Any, Literal, TypeVar from uuid import UUID from beartype import beartype @@ -9,6 +9,9 @@ from ...autogen.openapi_model import Transition from ..utils import cozo_query, partialclass, rewrap_exceptions, wrap_in_class +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -31,17 +34,25 @@ def list_execution_transitions( sort = f"{'-' if direction == 'desc' else ''}{sort_by}" query = f""" - ?[id, execution_id, type, current, next, output, metadata, updated_at, created_at] := *transitions {{ - execution_id, - transition_id: id, - type, - current, - next, - output, - metadata, - updated_at, - created_at, - }}, execution_id = to_uuid($execution_id) + ?[id, execution_id, type, current, next, output, metadata, updated_at, created_at] := + *transitions {{ + execution_id, + transition_id: id, + type, + current: current_tuple, + next: next_tuple, + output, + metadata, + updated_at, + created_at, + }}, + current = {{"state": current_tuple->0, "step": current_tuple->1}}, + next = if( + isnull(next_tuple), + null, + {{"state": next_tuple->0, "step": next_tuple->1}}, + ), + execution_id = to_uuid($execution_id) :limit $limit :offset $offset diff --git a/agents-api/agents_api/models/execution/list_executions.py b/agents-api/agents_api/models/execution/list_executions.py index e497ec4fe..09194cdbd 100644 --- a/agents-api/agents_api/models/execution/list_executions.py +++ b/agents-api/agents_api/models/execution/list_executions.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Any, Literal, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +16,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/execution/prepare_execution_input.py b/agents-api/agents_api/models/execution/prepare_execution_input.py index c1a767740..c858bc6a0 100644 --- a/agents-api/agents_api/models/execution/prepare_execution_input.py +++ b/agents-api/agents_api/models/execution/prepare_execution_input.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -20,6 +21,9 @@ ) from .get_execution import get_execution +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -29,7 +33,7 @@ } ) @wrap_in_class(ExecutionInput, one=True) -@cozo_query(debug=True) +@cozo_query @beartype def prepare_execution_input( *, @@ -65,7 +69,7 @@ def prepare_execution_input( ) # Remove the outer curly braces - task_query = task_query.strip()[1:-1] + task_query = task_query[-1].strip() task_fields = ( "id", @@ -156,7 +160,7 @@ def prepare_execution_input( *_execution {{ {', '.join(execution_fields)} }}, execution = {{ {make_cozo_json_query(execution_fields)} }} - ?[developer_id, execution, task, agent, user, session, tools] := + ?[developer_id, execution, task, agent, user, session, tools, arguments] := developer_id = to_uuid($developer_id), agent_json[agent], @@ -167,6 +171,7 @@ def prepare_execution_input( # TODO: Enable these later user = null, session = null, + arguments = execution->"input" """ queries = [ diff --git a/agents-api/agents_api/models/execution/test_execution_queries.py b/agents-api/agents_api/models/execution/test_execution_queries.py deleted file mode 100644 index eade4329c..000000000 --- a/agents-api/agents_api/models/execution/test_execution_queries.py +++ /dev/null @@ -1,377 +0,0 @@ -# # Tests for execution queries -# from uuid import uuid4 -# -# from cozo_migrate.api import init, apply -# from pycozo import Client -# from ward import test - -# from ..agent.create_agent import create_agent_query -# from ..task.create_task import create_task_query -# from .create_execution import create_execution_query -# from .get_execution import get_execution_query -# from .get_execution_status import get_execution_status_query -# from .get_execution_input import get_execution_input_query -# from .list_executions import list_task_executions_query -# from .update_execution_status import update_execution_status_query -# from .create_execution_transition import create_execution_transition_query -# from .get_execution_transition import get_execution_transition_query -# from .list_execution_transitions import list_execution_transitions_query -# from .update_execution_transition import update_execution_transition_query - -# from ...common.protocol.tasks import ExecutionInput - - -# def cozo_client(migrations_dir: str = "./migrations"): -# # Create a new client for each test -# # and initialize the schema. -# client = Client() - -# init(client) -# apply(client, migrations_dir=migrations_dir, all_=True) - -# return client - - -# @test("model: create execution") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() -# execution_id = uuid4() - -# create_execution_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# execution_id=execution_id, -# arguments={"input": "test"}, -# client=client, -# ) - - -# @test("model: create execution with session") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() -# execution_id = uuid4() -# session_id = uuid4() - -# create_execution_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# execution_id=execution_id, -# session_id=session_id, -# arguments={"input": "test"}, -# client=client, -# ) - - -# @test("model: get execution") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() -# execution_id = uuid4() - -# create_execution_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# execution_id=execution_id, -# arguments={"input": "test"}, -# client=client, -# ) - -# result = get_execution_query( -# task_id=task_id, execution_id=execution_id, client=client -# ) - -# assert len(result["status"]) == 1 -# assert result["status"][0] == "queued" - - -# @test("model: get execution status") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() -# execution_id = uuid4() - -# create_execution_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# execution_id=execution_id, -# arguments={"input": "test"}, -# client=client, -# ) - -# result = get_execution_status_query( -# task_id=task_id, execution_id=execution_id, client=client -# ) - -# assert len(result["status"]) == 1 -# assert result["status"][0] == "queued" - - -# @test("model: get execution input") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() -# execution_id = uuid4() - -# create_agent_query( -# agent_id=agent_id, -# developer_id=developer_id, -# name="test", -# about="test", -# model="gpt-4", -# metadata={"test": "test"}, -# client=client, -# ) - -# create_task_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# name="test", -# description="test", -# input_schema={"test": "test"}, -# tools_available=[], -# workflows=[], -# client=client, -# ) - -# create_execution_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# execution_id=execution_id, -# arguments={"input": "test"}, -# client=client, -# ) - -# result = get_execution_input_query( -# task_id=task_id, execution_id=execution_id, client=client -# ) - -# assert len(result["execution"]) == 1 - - -# @test("model: fetch execution input") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() -# execution_id = uuid4() - -# create_agent_query( -# agent_id=agent_id, -# developer_id=developer_id, -# name="test", -# about="test", -# model="gpt-4", -# metadata={"test": "test"}, -# client=client, -# ) - -# create_task_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# name="test", -# description="test", -# input_schema={"test": "test"}, -# tools_available=[], -# workflows=[{"name": "main", "steps": []}], -# client=client, -# ) - -# create_execution_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# execution_id=execution_id, -# arguments={"input": "test"}, -# client=client, -# ) - -# result = ExecutionInput.fetch( -# developer_id=developer_id, -# task_id=task_id, -# execution_id=execution_id, -# client=client, -# ) - -# assert result.execution.id == execution_id - - -# @test("model: list executions empty") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() - -# result = list_task_executions_query( -# task_id=task_id, agent_id=agent_id, developer_id=developer_id, client=client -# ) - -# assert len(result) == 0 - - -# @test("model: list executions") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() -# execution_id = uuid4() - -# create_execution_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# execution_id=execution_id, -# arguments={"input": "test"}, -# client=client, -# ) - -# result = list_task_executions_query( -# task_id=task_id, agent_id=agent_id, developer_id=developer_id, client=client -# ) - -# assert len(result["status"]) == 1 -# assert result["status"][0] == "queued" - - -# @test("model: update execution status") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() -# execution_id = uuid4() - -# create_execution_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# execution_id=execution_id, -# arguments={"input": "test"}, -# client=client, -# ) - -# result = update_execution_status_query( -# task_id=task_id, execution_id=execution_id, status="running", client=client -# ) - -# updated_rows = result[result["_kind"] == "inserted"].reset_index() -# assert len(updated_rows) == 1 -# assert updated_rows["status"][0] == "running" - - -# @test("model: create execution transition") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# execution_id = uuid4() -# transition_id = uuid4() - -# create_execution_transition_query( -# developer_id=developer_id, -# execution_id=execution_id, -# transition_id=transition_id, -# type="step", -# from_=("test", 1), -# to=("test", 2), -# outputs={"input": "test"}, -# client=client, -# ) - - -# @test("model: get execution transition") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# execution_id = uuid4() -# transition_id = uuid4() - -# create_execution_transition_query( -# developer_id=developer_id, -# execution_id=execution_id, -# transition_id=transition_id, -# type="step", -# from_=("test", 1), -# to=("test", 2), -# outputs={"input": "test"}, -# client=client, -# ) - -# result = get_execution_transition_query( -# execution_id=execution_id, transition_id=transition_id, client=client -# ) - -# assert len(result["type"]) == 1 - - -# @test("model: list execution transitions") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# execution_id = uuid4() -# transition_id = uuid4() - -# create_execution_transition_query( -# developer_id=developer_id, -# execution_id=execution_id, -# transition_id=transition_id, -# type="step", -# from_=("test", 1), -# to=("test", 2), -# outputs={"input": "test"}, -# client=client, -# ) - -# result = list_execution_transitions_query(execution_id=execution_id, client=client) - -# assert len(result["type"]) == 1 - - -# @test("model: update execution transitions") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# execution_id = uuid4() -# transition_id = uuid4() - -# create_execution_transition_query( -# developer_id=developer_id, -# execution_id=execution_id, -# transition_id=transition_id, -# type="step", -# from_=("test", 1), -# to=("test", 2), -# outputs={"input": "test"}, -# client=client, -# ) - -# result = update_execution_transition_query( -# execution_id=execution_id, -# transition_id=transition_id, -# type="finished", -# client=client, -# ) - -# updated_rows = result[result["_kind"] == "inserted"].reset_index() -# assert len(updated_rows) == 1 -# assert updated_rows["type"][0] == "finished" diff --git a/agents-api/agents_api/models/execution/update_execution.py b/agents-api/agents_api/models/execution/update_execution.py index 1424c4ef2..90a8cb1cc 100644 --- a/agents-api/agents_api/models/execution/update_execution.py +++ b/agents-api/agents_api/models/execution/update_execution.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -9,6 +10,9 @@ ResourceUpdatedResponse, UpdateExecutionRequest, ) +from ...common.protocol.tasks import ( + valid_previous_statuses as valid_previous_statuses_map, +) from ...common.utils.cozo import cozo_process_mutate_data from ..utils import ( cozo_query, @@ -19,6 +23,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -31,6 +38,7 @@ ResourceUpdatedResponse, one=True, transform=lambda d: {"id": d["execution_id"], "jobs": [], **d}, + _kind="inserted", ) @cozo_query @beartype @@ -45,19 +53,11 @@ def update_execution( task_id = str(task_id) execution_id = str(execution_id) - valid_previous_statuses = [] - match data.status: - case "running": - valid_previous_statuses = ["queued", "starting", "awaiting_input"] - case "cancelled": - valid_previous_statuses = [ - "queued", - "starting", - "awaiting_input", - "running", - ] + valid_previous_statuses: list[str] | None = valid_previous_statuses_map.get( + data.status, None + ) - execution_data = data.model_dump(exclude_none=True) + execution_data: dict = data.model_dump(exclude_none=True) columns, values = cozo_process_mutate_data( { @@ -99,7 +99,7 @@ def update_execution( task_id=task_id, parents=[("agents", "agent_id")], ), - validate_status_query, + validate_status_query if valid_previous_statuses is not None else "", update_query, ] diff --git a/agents-api/agents_api/models/session/__init__.py b/agents-api/agents_api/models/session/__init__.py index c73d7ee82..bc5f7fbb4 100644 --- a/agents-api/agents_api/models/session/__init__.py +++ b/agents-api/agents_api/models/session/__init__.py @@ -8,3 +8,14 @@ - Deleting sessions and their associated data from the database. This module plays a crucial role in the application by facilitating the management of session data, which is essential for tracking and analyzing user interactions and behaviors within the system.""" + +# ruff: noqa: F401, F403, F405 + +from .create_or_update_session import create_or_update_session +from .create_session import create_session +from .delete_session import delete_session +from .get_session import get_session +from .list_sessions import list_sessions +from .patch_session import patch_session +from .prepare_session_data import prepare_session_data +from .update_session import update_session diff --git a/agents-api/agents_api/models/session/create_or_update_session.py b/agents-api/agents_api/models/session/create_or_update_session.py index 7b2e39e74..60c0b7f71 100644 --- a/agents-api/agents_api/models/session/create_or_update_session.py +++ b/agents-api/agents_api/models/session/create_or_update_session.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -19,6 +20,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/session/create_session.py b/agents-api/agents_api/models/session/create_session.py index aab7fddac..a83837ffd 100644 --- a/agents-api/agents_api/models/session/create_session.py +++ b/agents-api/agents_api/models/session/create_session.py @@ -3,6 +3,7 @@ It constructs and executes a datalog query to insert session data. """ +from typing import Any, TypeVar from uuid import UUID, uuid4 from beartype import beartype @@ -20,6 +21,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -36,6 +40,7 @@ "updated_at": (d.pop("updated_at")[0]), **d, }, + _kind="inserted", ) @cozo_query @beartype diff --git a/agents-api/agents_api/models/session/delete_session.py b/agents-api/agents_api/models/session/delete_session.py index e6b0037ca..af9e331c7 100644 --- a/agents-api/agents_api/models/session/delete_session.py +++ b/agents-api/agents_api/models/session/delete_session.py @@ -1,5 +1,6 @@ """This module contains the implementation for deleting sessions from the 'cozodb' database using datalog queries.""" +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -18,6 +19,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -34,6 +38,7 @@ "deleted_at": utcnow(), "jobs": [], }, + _kind="deleted", ) @cozo_query @beartype diff --git a/agents-api/agents_api/models/session/get_session.py b/agents-api/agents_api/models/session/get_session.py index 42dafc30d..0a365df2f 100644 --- a/agents-api/agents_api/models/session/get_session.py +++ b/agents-api/agents_api/models/session/get_session.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -5,7 +6,7 @@ from pycozo.client import QueryException from pydantic import ValidationError -from ...autogen.openapi_model import make_session +from ...common.protocol.sessions import make_session from ..utils import ( cozo_query, partialclass, @@ -15,6 +16,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/session/list_sessions.py b/agents-api/agents_api/models/session/list_sessions.py index 70ac23a3b..fa1097e5e 100644 --- a/agents-api/agents_api/models/session/list_sessions.py +++ b/agents-api/agents_api/models/session/list_sessions.py @@ -1,6 +1,6 @@ """This module contains functions for querying session data from the 'cozodb' database.""" -from typing import Any, Literal +from typing import Any, Literal, TypeVar from uuid import UUID from beartype import beartype @@ -8,7 +8,7 @@ from pycozo.client import QueryException from pydantic import ValidationError -from ...autogen.openapi_model import make_session +from ...common.protocol.sessions import make_session from ...common.utils import json from ..utils import ( cozo_query, @@ -18,6 +18,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/session/patch_session.py b/agents-api/agents_api/models/session/patch_session.py index 5aae245cc..e6e0e5750 100644 --- a/agents-api/agents_api/models/session/patch_session.py +++ b/agents-api/agents_api/models/session/patch_session.py @@ -1,5 +1,6 @@ """This module contains functions for patching session data in the 'cozodb' database using datalog queries.""" +from typing import Any, List, TypeVar from uuid import UUID from beartype import beartype @@ -18,7 +19,10 @@ wrap_in_class, ) -_fields = [ +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + +_fields: List[str] = [ "situation", "summary", "created_at", @@ -46,6 +50,7 @@ "jobs": [], **d, }, + _kind="inserted", ) @cozo_query @beartype @@ -92,7 +97,7 @@ def patch_session( *sessions{{ {rest_fields}, metadata: md, @ "NOW" }}, - updated_at = [floor(now()), true], + updated_at = 'ASSERT', metadata = concat(md, $metadata), :put sessions {{ diff --git a/agents-api/agents_api/models/session/prepare_chat_context.py b/agents-api/agents_api/models/session/prepare_chat_context.py deleted file mode 100644 index f599a8d25..000000000 --- a/agents-api/agents_api/models/session/prepare_chat_context.py +++ /dev/null @@ -1,150 +0,0 @@ -from uuid import UUID - -from beartype import beartype -from fastapi import HTTPException -from pycozo.client import QueryException -from pydantic import ValidationError - -from ...autogen.openapi_model import make_session -from ...common.protocol.sessions import ChatContext -from ..entry.list_entries import list_entries -from ..tools.list_tools import list_tools -from ..utils import ( - cozo_query, - make_cozo_json_query, - partialclass, - rewrap_exceptions, - verify_developer_id_query, - verify_developer_owns_resource_query, - wrap_in_class, -) -from .prepare_session_data import prepare_session_data - - -@rewrap_exceptions( - { - QueryException: partialclass(HTTPException, status_code=400), - ValidationError: partialclass(HTTPException, status_code=400), - TypeError: partialclass(HTTPException, status_code=400), - } -) -@wrap_in_class( - ChatContext, - one=True, - transform=lambda d: { - **d, - "session": make_session( - agents=[a["id"] for a in d["agents"]], - users=[u["id"] for u in d["users"]], - **d["session"], - ), - }, -) -@cozo_query -@beartype -def prepare_chat_context( - *, - developer_id: UUID, - agent_id: UUID, - session_id: UUID, - # doc_query_embedding: list[float], - # docs_confidence: float = 0.4, - # k_docs: int = 3, -) -> tuple[list[str], dict]: - """ - Executes a complex query to retrieve memory context based on session ID, tool and document embeddings. - """ - # VECTOR_SIZE = 1024 - # docs_radius: float = 1.0 - docs_confidence - - session_data_query, sd_vars = prepare_session_data.__wrapped__( - developer_id=developer_id, session_id=session_id - ) - - # Remove the outer curly braces - session_data_query = session_data_query.strip()[1:-1] - - session_data_fields = ("session", "agents", "users") - - session_data_query += """ - :create _session_data_json { - agents: [Json], - users: [Json], - session: Json, - } - """ - - tools_query, t_vars = list_tools.__wrapped__( - developer_id=developer_id, agent_id=agent_id - ) - - # Remove the outer curly braces - tools_query = tools_query.strip()[1:-1] - - tools_fields = ("name", "type", "spec") - - tools_query += f""" - :create _tools {{ - {', '.join(tools_fields)} - }} - """ - - # TODO: Implement the following queries - # docs_query = ... - - entries_query, e_vars = list_entries.__wrapped__( - developer_id=developer_id, - session_id=session_id, - allowed_sources=["api_request", "api_response", "summarizer"], - exclude_relations=["summary_of"], - ) - - # Remove the outer curly braces - entries_query = entries_query.strip()[1:-1] - - entries_fields = ("source", "role", "name", "content", "token_count", "timestamp") - - entries_query += f""" - :create _entries {{ - {', '.join(entries_fields)} - }} - """ - - combine_query = f""" - tools_json[collect(tool)] := - *_tools {{ {', '.join(tools_fields)} }}, - tool = {{ {make_cozo_json_query(tools_fields)} }} - - entries_json[collect(entry)] := - *_entries {{ {', '.join(entries_fields)} }}, - entry = {{ {make_cozo_json_query(entries_fields)} }} - - ?[{', '.join(session_data_fields)}, tools, entries] := - *_session_data_json {{ {', '.join(session_data_fields)} }}, - tools_json[tools], - entries_json[entries] - """ - - queries = [ - verify_developer_id_query(developer_id), - verify_developer_owns_resource_query( - developer_id, "sessions", session_id=session_id - ), - session_data_query, - tools_query, - entries_query, - combine_query, - ] - - return ( - queries, - { - "session_id": str(session_id), - **sd_vars, - **t_vars, - **e_vars, - # "doc_query_embedding": doc_query_embedding, - # "k_docs": k_docs, - # "docs_radius": round(docs_radius, 2), - }, - ) diff --git a/agents-api/agents_api/models/session/prepare_session_data.py b/agents-api/agents_api/models/session/prepare_session_data.py index 414f29007..9a936b183 100644 --- a/agents-api/agents_api/models/session/prepare_session_data.py +++ b/agents-api/agents_api/models/session/prepare_session_data.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -5,8 +6,7 @@ from pycozo.client import QueryException from pydantic import ValidationError -from ...autogen.openapi_model import make_session -from ...common.protocol.sessions import SessionData +from ...common.protocol.sessions import SessionData, make_session from ..utils import ( cozo_query, partialclass, @@ -16,6 +16,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/session/test_session_queries.py b/agents-api/agents_api/models/session/test_session_queries.py deleted file mode 100644 index 8bc750359..000000000 --- a/agents-api/agents_api/models/session/test_session_queries.py +++ /dev/null @@ -1,284 +0,0 @@ -# """ -# This module contains tests for session-related queries against the 'cozodb' database. It verifies the creation, retrieval, and deletion of session records as defined in the schema provided in agents-api/README.md. -# """ - -# # Tests for session queries -# from uuid import uuid4 - -# from cozo_migrate.api import init, apply -# from pycozo import Client -# from ward import test, skip - -# from ..agent.create_agent import create_agent_query -# from ..user.create_user import create_user_query - -# from .create_session import create_session_query -# from .delete_session import delete_session_query -# from .get_session import get_session_query -# from .list_sessions import list_sessions_query -# from .session_data import get_session_data, session_data_query - - -# MODEL = "julep-ai/samantha-1-turbo" - - -# def cozo_client(migrations_dir: str = "./migrations"): -# # Create a new client for each test -# # and initialize the schema. -# client = Client() - -# init(client) -# apply(client, migrations_dir=migrations_dir, all_=True) - -# return client - - -# @test("model: create session") -# def _(): -# """Test session creation with a valid session, user, agent, and developer IDs.""" -# client = cozo_client() -# session_id = uuid4() -# agent_id = uuid4() -# user_id = uuid4() -# developer_id = uuid4() - -# create_session_query( -# session_id=session_id, -# user_id=user_id, -# developer_id=developer_id, -# agent_id=agent_id, -# situation="test session about", -# client=client, -# ) - - -# @test("model: create session no user") -# def _(): -# """Test session creation without a user ID.""" -# client = cozo_client() -# session_id = uuid4() -# agent_id = uuid4() -# developer_id = uuid4() - -# create_session_query( -# session_id=session_id, -# user_id=None, -# developer_id=developer_id, -# agent_id=agent_id, -# situation="test session about", -# client=client, -# ) - - -# @test("model: get session not exists") -# def _(): -# """Verify that querying a non-existent session returns an empty result.""" -# client = cozo_client() -# session_id = uuid4() -# developer_id = uuid4() - -# result = get_session_query( -# session_id=session_id, -# developer_id=developer_id, -# client=client, -# ) - -# assert len(result["id"]) == 0 - - -# @test("model: get session exists") -# def _(): -# """Verify that a created session can be successfully retrieved.""" -# client = cozo_client() -# session_id = uuid4() -# agent_id = uuid4() -# user_id = uuid4() -# developer_id = uuid4() - -# result = create_session_query( -# session_id=session_id, -# user_id=user_id, -# agent_id=agent_id, -# developer_id=developer_id, -# situation="test session about", -# client=client, -# ) - -# result = get_session_query( -# session_id=session_id, -# developer_id=developer_id, -# client=client, -# ) - -# assert len(result["id"]) == 1 - - -# @test("model: get session data") -# def _(): -# """Test retrieval of session data for an existing session.""" -# # Setup client for user and agent -# client = cozo_client() - -# session_id = uuid4() -# agent_id = uuid4() -# user_id = uuid4() -# developer_id = uuid4() - -# # Create a user -# create_user_query( -# user_id=user_id, -# developer_id=developer_id, -# about="test user about", -# name="test user name", -# client=client, -# ) - -# # Create an agent -# create_agent_query( -# agent_id=agent_id, -# model=MODEL, -# developer_id=developer_id, -# about="test agent about", -# name="test agent name", -# client=client, -# ) - -# # Create a session - -# result = create_session_query( -# session_id=session_id, -# user_id=user_id, -# agent_id=agent_id, -# developer_id=developer_id, -# situation="test session about", -# client=client, -# ) - -# result = session_data_query( -# session_id=session_id, -# developer_id=developer_id, -# client=client, -# ) - -# assert len(result["user_about"]) == 1 - - -# @test("model: delete session") -# def _(): -# """Test the deletion of a session and verify it cannot be retrieved afterwards.""" -# # Setup client for user and agent -# client = cozo_client() - -# session_id = uuid4() -# agent_id = uuid4() -# user_id = uuid4() -# developer_id = uuid4() - -# # Create a user -# create_user_query( -# user_id=user_id, -# developer_id=developer_id, -# about="test user about", -# name="test user name", -# client=client, -# ) - -# # Create an agent -# create_agent_query( -# agent_id=agent_id, -# model=MODEL, -# developer_id=developer_id, -# about="test agent about", -# name="test agent name", -# client=client, -# ) - -# # Create a session -# result = create_session_query( -# session_id=session_id, -# user_id=user_id, -# agent_id=agent_id, -# developer_id=developer_id, -# situation="test session about", -# client=client, -# ) - -# # Delete the session -# result = delete_session_query( -# session_id=session_id, -# developer_id=developer_id, -# client=client, -# ) - -# # Check that the session is deleted -# result = get_session_query( -# session_id=session_id, -# developer_id=developer_id, -# client=client, -# ) - -# assert len(result["id"]) == 0 - - -# @skip("get session data using get_session_data") -# def _(): -# # Setup client for user and agent -# client = cozo_client() - -# developer_id = uuid4() -# session_id = uuid4() -# agent_id = uuid4() -# user_id = uuid4() - -# # Setup: Create a user, agent, and session for testing session data retrieval using get_session_data. -# # Create a user -# create_user_query( -# user_id=user_id, -# developer_id=developer_id, -# about="test user about", -# name="test user name", -# client=client, -# ) - -# # Create an agent -# create_agent_query( -# developer_id=developer_id, -# model=MODEL, -# agent_id=agent_id, -# about="test agent about", -# name="test agent name", -# client=client, -# ) - -# # Create a session - -# create_session_query( -# developer_id=developer_id, -# session_id=session_id, -# user_id=user_id, -# agent_id=agent_id, -# situation="test session about", -# client=client, -# ) - -# session_data = get_session_data( -# developer_id=developer_id, -# session_id=session_id, -# client=client, -# ) - -# assert session_data is not None -# assert session_data.user_about == "test user about" - - -# @skip("list sessions") -# def _(): -# client = cozo_client() -# developer_id = uuid4() - -# result = list_sessions_query( -# developer_id=developer_id, -# client=client, -# ) - -# assert len(result["id"]) == 0 diff --git a/agents-api/agents_api/models/session/update_session.py b/agents-api/agents_api/models/session/update_session.py index b498650e8..99688bd98 100644 --- a/agents-api/agents_api/models/session/update_session.py +++ b/agents-api/agents_api/models/session/update_session.py @@ -1,3 +1,4 @@ +from typing import Any, List, TypeVar from uuid import UUID from beartype import beartype @@ -16,7 +17,10 @@ wrap_in_class, ) -_fields = [ +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + +_fields: List[str] = [ "situation", "summary", "metadata", @@ -44,6 +48,7 @@ "jobs": [], **d, }, + _kind="inserted", ) @cozo_query @beartype @@ -81,7 +86,7 @@ def update_session( *sessions{{ {rest_fields}, @ "NOW" }}, - updated_at = [floor(now()), true] + updated_at = 'ASSERT' :put sessions {{ {all_fields}, updated_at diff --git a/agents-api/agents_api/models/task/__init__.py b/agents-api/agents_api/models/task/__init__.py index e69de29bb..2eaff3ab3 100644 --- a/agents-api/agents_api/models/task/__init__.py +++ b/agents-api/agents_api/models/task/__init__.py @@ -0,0 +1,9 @@ +# ruff: noqa: F401, F403, F405 + +from .create_or_update_task import create_or_update_task +from .create_task import create_task +from .delete_task import delete_task +from .get_task import get_task +from .list_tasks import list_tasks +from .patch_task import patch_task +from .update_task import update_task diff --git a/agents-api/agents_api/models/task/create_or_update_task.py b/agents-api/agents_api/models/task/create_or_update_task.py index a3014a64f..af7e258d9 100644 --- a/agents-api/agents_api/models/task/create_or_update_task.py +++ b/agents-api/agents_api/models/task/create_or_update_task.py @@ -3,6 +3,7 @@ It constructs and executes a datalog query to insert Task data. """ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -26,6 +27,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/task/create_task.py b/agents-api/agents_api/models/task/create_task.py index 17f991e7b..a44146c34 100644 --- a/agents-api/agents_api/models/task/create_task.py +++ b/agents-api/agents_api/models/task/create_task.py @@ -3,6 +3,7 @@ It constructs and executes a datalog query to insert Task data. """ +from typing import Any, TypeVar from uuid import UUID, uuid4 from beartype import beartype @@ -24,6 +25,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -32,7 +36,7 @@ TypeError: partialclass(HTTPException, status_code=400), } ) -@wrap_in_class(spec_to_task, one=True) +@wrap_in_class(spec_to_task, one=True, _kind="inserted") @cozo_query @beartype def create_task( diff --git a/agents-api/agents_api/models/task/delete_task.py b/agents-api/agents_api/models/task/delete_task.py index 28c3defb3..60d6f2756 100644 --- a/agents-api/agents_api/models/task/delete_task.py +++ b/agents-api/agents_api/models/task/delete_task.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +17,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -33,6 +37,7 @@ "deleted_at": utcnow(), **d, }, + _kind="deleted", ) @cozo_query @beartype diff --git a/agents-api/agents_api/models/task/get_task.py b/agents-api/agents_api/models/task/get_task.py index 975da28cd..076936b6c 100644 --- a/agents-api/agents_api/models/task/get_task.py +++ b/agents-api/agents_api/models/task/get_task.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -15,6 +16,9 @@ ) from .create_task import spec_to_task +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/task/list_tasks.py b/agents-api/agents_api/models/task/list_tasks.py index 1c6e16efd..573c1404e 100644 --- a/agents-api/agents_api/models/task/list_tasks.py +++ b/agents-api/agents_api/models/task/list_tasks.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Any, Literal, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +16,9 @@ ) from .create_task import spec_to_task +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/task/patch_task.py b/agents-api/agents_api/models/task/patch_task.py index 93d57f32f..1837064c7 100644 --- a/agents-api/agents_api/models/task/patch_task.py +++ b/agents-api/agents_api/models/task/patch_task.py @@ -3,6 +3,7 @@ It constructs and executes a datalog query to insert Task data. """ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -22,6 +23,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -39,6 +43,7 @@ "updated_at": d["updated_at_ms"][0] / 1000, **d, }, + _kind="inserted", ) @cozo_query @beartype diff --git a/agents-api/agents_api/models/task/test_task_queries.py b/agents-api/agents_api/models/task/test_task_queries.py deleted file mode 100644 index ff9150f99..000000000 --- a/agents-api/agents_api/models/task/test_task_queries.py +++ /dev/null @@ -1,90 +0,0 @@ -# # Tests for task queries -# from uuid import uuid4 - -# from cozo_migrate.api import init, apply -# from pycozo import Client -# from ward import test - -# from .create_task import create_task_query -# from .get_task import get_task_query -# from .list_tasks import list_tasks_query - - -# def cozo_client(migrations_dir: str = "./migrations"): -# # Create a new client for each test -# # and initialize the schema. -# client = Client() - -# init(client) -# apply(client, migrations_dir=migrations_dir, all_=True) - -# return client - - -# @test("model: create task") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() - -# create_task_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# name="test task", -# description="test task about", -# input_schema={"type": "object", "additionalProperties": True}, -# client=client, -# ) - - -# @test("model: list tasks") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() - -# result = list_tasks_query( -# developer_id=developer_id, -# agent_id=agent_id, -# client=client, -# ) - -# assert len(result["id"]) == 0 - - -# @test("model: get task exists") -# def _(): -# client = cozo_client() -# developer_id = uuid4() -# agent_id = uuid4() -# task_id = uuid4() - -# create_task_query( -# developer_id=developer_id, -# agent_id=agent_id, -# task_id=task_id, -# name="test task", -# description="test task about", -# input_schema={"type": "object", "additionalProperties": True}, -# client=client, -# ) - -# result = get_task_query( -# agent_id=agent_id, task_id=task_id, developer_id=developer_id, client=client -# ) - -# assert len(result["id"]) == 1 - - -# # @test("model: delete task") -# # def _(): -# # TODO: Implement this test -# # raise NotImplementedError - - -# # @test("model: update task") -# # def _(): -# # TODO: Implement this test -# # raise NotImplementedError diff --git a/agents-api/agents_api/models/task/update_task.py b/agents-api/agents_api/models/task/update_task.py index 151d4fb4d..9cfb04357 100644 --- a/agents-api/agents_api/models/task/update_task.py +++ b/agents-api/agents_api/models/task/update_task.py @@ -3,6 +3,7 @@ It constructs and executes a datalog query to insert Task data. """ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -22,6 +23,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/tools/__init__.py b/agents-api/agents_api/models/tools/__init__.py index 043da3916..98f3a5e3a 100644 --- a/agents-api/agents_api/models/tools/__init__.py +++ b/agents-api/agents_api/models/tools/__init__.py @@ -8,3 +8,12 @@ This module is crucial for the effective management and utilization of tools in the application, ensuring that tools can be created, managed, and utilized efficiently. """ + +# ruff: noqa: F401, F403, F405 + +from .create_tools import create_tools +from .delete_tool import delete_tool +from .get_tool import get_tool +from .list_tools import list_tools +from .patch_tool import patch_tool +from .update_tool import update_tool diff --git a/agents-api/agents_api/models/tools/create_tools.py b/agents-api/agents_api/models/tools/create_tools.py index 597268863..dd8397797 100644 --- a/agents-api/agents_api/models/tools/create_tools.py +++ b/agents-api/agents_api/models/tools/create_tools.py @@ -1,5 +1,6 @@ """This module contains functions for creating tools in the CozoDB database.""" +from typing import Any, TypeVar from uuid import UUID, uuid4 from beartype import beartype @@ -17,6 +18,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -32,6 +36,7 @@ d["type"]: d.pop("spec"), **d, }, + _kind="inserted", ) @cozo_query @beartype diff --git a/agents-api/agents_api/models/tools/delete_tools.py b/agents-api/agents_api/models/tools/delete_tool.py similarity index 93% rename from agents-api/agents_api/models/tools/delete_tools.py rename to agents-api/agents_api/models/tools/delete_tool.py index ad6a9d4f5..c79cdfd29 100644 --- a/agents-api/agents_api/models/tools/delete_tools.py +++ b/agents-api/agents_api/models/tools/delete_tool.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +17,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -28,6 +32,7 @@ ResourceDeletedResponse, one=True, transform=lambda d: {"id": d["tool_id"], "deleted_at": utcnow(), "jobs": [], **d}, + _kind="deleted", ) @cozo_query @beartype diff --git a/agents-api/agents_api/models/tools/get_tool.py b/agents-api/agents_api/models/tools/get_tool.py index f3e6a52c3..5ea009064 100644 --- a/agents-api/agents_api/models/tools/get_tool.py +++ b/agents-api/agents_api/models/tools/get_tool.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -15,6 +16,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/tools/list_tools.py b/agents-api/agents_api/models/tools/list_tools.py index e1636fdd4..4b44fc1e0 100644 --- a/agents-api/agents_api/models/tools/list_tools.py +++ b/agents-api/agents_api/models/tools/list_tools.py @@ -1,4 +1,4 @@ -from typing import Literal +from typing import Any, Literal, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +16,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/tools/patch_tool.py b/agents-api/agents_api/models/tools/patch_tool.py index d6c01c1df..5bbfe1c91 100644 --- a/agents-api/agents_api/models/tools/patch_tool.py +++ b/agents-api/agents_api/models/tools/patch_tool.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +17,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -28,11 +32,12 @@ ResourceUpdatedResponse, one=True, transform=lambda d: {"id": d["tool_id"], "jobs": [], **d}, + _kind="inserted", ) @cozo_query @beartype def patch_tool( - *, developer_id: UUID, agent_id: UUID, tool_id: UUID, patch_tool: PatchToolRequest + *, developer_id: UUID, agent_id: UUID, tool_id: UUID, data: PatchToolRequest ) -> tuple[list[str], dict]: """ # Execute the datalog query and return the results as a DataFrame @@ -41,14 +46,17 @@ def patch_tool( Parameters: - agent_id (UUID): The unique identifier of the agent. - tool_id (UUID): The unique identifier of the tool to be updated. - - patch_tool (PatchToolRequest): The request payload containing the updated tool information. + - data (PatchToolRequest): The request payload containing the updated tool information. Returns: - ResourceUpdatedResponse: The updated tool data. """ + agent_id = str(agent_id) + tool_id = str(tool_id) + # Extract the tool data from the payload - patch_data = patch_tool.model_dump(exclude_none=True) + patch_data = data.model_dump(exclude_none=True) # Assert that only one of the tool type fields is present tool_specs = [ @@ -64,16 +72,15 @@ def patch_tool( patch_data["type"] = patch_data.get("type", tool_type) assert patch_data["type"] == tool_type, "Invalid tool update" - if tool_spec is not None: - # Rename the tool definition to 'spec' - patch_data["spec"] = tool_spec + tool_spec = tool_spec or {} + if tool_spec: del patch_data[tool_type] tool_cols, tool_vals = cozo_process_mutate_data( { **patch_data, - "agent_id": str(agent_id), - "tool_id": str(tool_id), + "agent_id": agent_id, + "tool_id": tool_id, } ) @@ -81,11 +88,17 @@ def patch_tool( patch_query = f""" input[{tool_cols}] <- $input - ?[{tool_cols}, updated_at] := + ?[{tool_cols}, spec, updated_at] := + *tools {{ + agent_id: to_uuid($agent_id), + tool_id: to_uuid($tool_id), + spec: old_spec, + }}, input[{tool_cols}], + spec = concat(old_spec, $spec), updated_at = now() - :update tools {{ {tool_cols}, updated_at }} + :update tools {{ {tool_cols}, spec, updated_at }} :returning """ @@ -95,4 +108,7 @@ def patch_tool( patch_query, ] - return (queries, dict(input=tool_vals)) + return ( + queries, + dict(input=tool_vals, spec=tool_spec, agent_id=agent_id, tool_id=tool_id), + ) diff --git a/agents-api/agents_api/models/tools/test_tool_queries.py b/agents-api/agents_api/models/tools/test_tool_queries.py deleted file mode 100644 index 058b36eba..000000000 --- a/agents-api/agents_api/models/tools/test_tool_queries.py +++ /dev/null @@ -1,127 +0,0 @@ -# # Tests for entry queries -# from uuid import uuid4 - -# from cozo_migrate.api import init, apply -# from pycozo import Client -# from ward import test - -# from ...autogen.openapi_model import FunctionDef -# from .create_tools import create_function_query, create_multiple_functions_query -# from .delete_tools import delete_function_by_id_query -# from .get_tool import get_function_by_id_query -# from .list_tools import list_functions_by_agent_query - - -# def cozo_client(migrations_dir: str = "./migrations"): -# # Create a new client for each test -# # and initialize the schema. -# client = Client() - -# init(client) -# apply(client, migrations_dir=migrations_dir, all_=True) - -# return client - - -# @test("model: create function") -# def _(): -# client = cozo_client() - -# agent_id = uuid4() -# tool_id = uuid4() -# function = FunctionDef( -# name="hello_world", -# description="A function that prints hello world", -# parameters={"type": "object", "properties": {}}, -# ) - -# result = create_function_query(agent_id, tool_id, function, client=client) - -# assert result["created_at"][0] - - -# @test("model: create multiple functions") -# def _(): -# client = cozo_client() - -# agent_id = uuid4() -# function = FunctionDef( -# name="hello_world", -# description="A function that prints hello world", -# parameters={"type": "object", "properties": {}}, -# ) -# num_functions = 10 - -# result = create_multiple_functions_query( -# agent_id, [function] * num_functions, client=client -# ) - -# assert result["created_at"][0] -# assert len(result["tool_id"]) == num_functions - - -# @test("model: delete function") -# def _(): -# client = cozo_client() - -# # Create function -# agent_id = uuid4() -# tool_id = uuid4() -# function = FunctionDef( -# name="hello_world", -# description="A function that prints hello world", -# parameters={"type": "object", "properties": {}}, -# ) - -# create_function_query(agent_id, tool_id, function, client=client) - -# # Delete function -# result = delete_function_by_id_query(agent_id, tool_id, client=client) - -# delete_info = next( -# (row for row in result.to_dict("records") if row["_kind"] == "deleted"), None -# ) - -# assert delete_info is not None, "Delete operation did not find the row" - - -# @test("model: get function") -# def _(): -# client = cozo_client() - -# # Create function -# agent_id = uuid4() -# tool_id = uuid4() -# function = FunctionDef( -# name="hello_world", -# description="A function that prints hello world", -# parameters={"type": "object", "properties": {}}, -# ) - -# create_function_query(agent_id, tool_id, function, client=client) - -# # Get function -# result = get_function_by_id_query(agent_id, tool_id, client=client) - -# assert len(result["tool_id"]) == 1, "Get operation did not find the row" - - -# @test("model: list functions") -# def _(): -# client = cozo_client() - -# agent_id = uuid4() -# function = FunctionDef( -# name="hello_world", -# description="A function that prints hello world", -# parameters={"type": "object", "properties": {}}, -# ) -# num_functions = 10 - -# # Create functions -# create_multiple_functions_query(agent_id, [function] * num_functions, client=client) - -# # List functions -# result = list_functions_by_agent_query(agent_id, client=client) - -# assert len(result["tool_id"]) == num_functions diff --git a/agents-api/agents_api/models/tools/update_tool.py b/agents-api/agents_api/models/tools/update_tool.py index aa1238643..d1676e984 100644 --- a/agents-api/agents_api/models/tools/update_tool.py +++ b/agents-api/agents_api/models/tools/update_tool.py @@ -1,15 +1,43 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError from ...autogen.openapi_model import ( - PatchToolRequest, ResourceUpdatedResponse, UpdateToolRequest, ) -from .patch_tool import patch_tool +from ...common.utils.cozo import cozo_process_mutate_data +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + verify_developer_owns_resource_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + ResourceUpdatedResponse, + one=True, + transform=lambda d: {"id": d["tool_id"], "jobs": [], **d}, + _kind="inserted", +) +@cozo_query @beartype def update_tool( *, @@ -18,12 +46,62 @@ def update_tool( tool_id: UUID, data: UpdateToolRequest, **kwargs, -) -> ResourceUpdatedResponse: - # Same as patch_tool_query, but with a different request payload - return patch_tool( - developer_id=developer_id, - agent_id=agent_id, - tool_id=tool_id, - patch_tool=PatchToolRequest(**data.model_dump()), - **kwargs, +) -> tuple[list[str], dict]: + agent_id = str(agent_id) + tool_id = str(tool_id) + + # Extract the tool data from the payload + update_data = data.model_dump(exclude_none=True) + + # Assert that only one of the tool type fields is present + tool_specs = [ + (tool_type, update_data.get(tool_type)) + for tool_type in ["function", "integration", "system", "api_call"] + if update_data.get(tool_type) is not None + ] + + assert len(tool_specs) <= 1, "Invalid tool update" + tool_type, tool_spec = tool_specs[0] if tool_specs else (None, None) + + if tool_type is not None: + update_data["type"] = update_data.get("type", tool_type) + assert update_data["type"] == tool_type, "Invalid tool update" + + update_data["spec"] = tool_spec + del update_data[tool_type] + + tool_cols, tool_vals = cozo_process_mutate_data( + { + **update_data, + "agent_id": agent_id, + "tool_id": tool_id, + } + ) + + # Construct the datalog query for updating the tool information + patch_query = f""" + input[{tool_cols}] <- $input + + ?[{tool_cols}, created_at, updated_at] := + *tools {{ + agent_id: to_uuid($agent_id), + tool_id: to_uuid($tool_id), + created_at + }}, + input[{tool_cols}], + updated_at = now() + + :put tools {{ {tool_cols}, created_at, updated_at }} + :returning + """ + + queries = [ + verify_developer_id_query(developer_id), + verify_developer_owns_resource_query(developer_id, "agents", agent_id=agent_id), + patch_query, + ] + + return ( + queries, + dict(input=tool_vals, spec=tool_spec, agent_id=agent_id, tool_id=tool_id), ) diff --git a/agents-api/agents_api/models/user/__init__.py b/agents-api/agents_api/models/user/__init__.py index 1b1b2c0d9..5ae76865f 100644 --- a/agents-api/agents_api/models/user/__init__.py +++ b/agents-api/agents_api/models/user/__init__.py @@ -7,3 +7,12 @@ - list_users_query: Lists users associated with a specific developer, with support for pagination and metadata-based filtering. - patch_user_query: Updates a user's information in the CozoDB database, allowing for changes to fields such as name, about, and metadata. """ + +# ruff: noqa: F401, F403, F405 + +from .create_or_update_user import create_or_update_user +from .create_user import create_user +from .get_user import get_user +from .list_users import list_users +from .patch_user import patch_user +from .update_user import update_user diff --git a/agents-api/agents_api/models/user/create_or_update_user.py b/agents-api/agents_api/models/user/create_or_update_user.py index 5784db880..9e9045e74 100644 --- a/agents-api/agents_api/models/user/create_or_update_user.py +++ b/agents-api/agents_api/models/user/create_or_update_user.py @@ -3,6 +3,7 @@ It includes functions to construct and execute datalog queries for inserting new user records. """ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -19,6 +20,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/user/create_user.py b/agents-api/agents_api/models/user/create_user.py index fda675231..9dd036c57 100644 --- a/agents-api/agents_api/models/user/create_user.py +++ b/agents-api/agents_api/models/user/create_user.py @@ -3,6 +3,7 @@ It defines a query for inserting user data into the 'users' relation. """ +from typing import Any, TypeVar from uuid import UUID, uuid4 from beartype import beartype @@ -19,15 +20,28 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { + lambda e: isinstance(e, QueryException) + and "asserted to return some results, but returned none" + in str(e): lambda *_: HTTPException( + detail="developer not found", status_code=403 + ), QueryException: partialclass(HTTPException, status_code=400), ValidationError: partialclass(HTTPException, status_code=400), TypeError: partialclass(HTTPException, status_code=400), } ) -@wrap_in_class(User, one=True, transform=lambda d: {"id": UUID(d.pop("user_id")), **d}) +@wrap_in_class( + User, + one=True, + transform=lambda d: {"id": UUID(d.pop("user_id")), **d}, + _kind="inserted", +) @cozo_query @beartype def create_user( diff --git a/agents-api/agents_api/models/user/delete_user.py b/agents-api/agents_api/models/user/delete_user.py new file mode 100644 index 000000000..b5fcb8424 --- /dev/null +++ b/agents-api/agents_api/models/user/delete_user.py @@ -0,0 +1,93 @@ +""" +This module contains the implementation of the delete_user_query function, which is responsible for deleting an user and its related default settings from the CozoDB database. +""" + +from typing import Any, TypeVar +from uuid import UUID + +from beartype import beartype +from fastapi import HTTPException +from pycozo.client import QueryException +from pydantic import ValidationError + +from ...autogen.openapi_model import ResourceDeletedResponse +from ...common.utils.datetime import utcnow +from ..utils import ( + cozo_query, + partialclass, + rewrap_exceptions, + verify_developer_id_query, + verify_developer_owns_resource_query, + wrap_in_class, +) + +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + + +@rewrap_exceptions( + { + QueryException: partialclass(HTTPException, status_code=400), + ValidationError: partialclass(HTTPException, status_code=400), + TypeError: partialclass(HTTPException, status_code=400), + } +) +@wrap_in_class( + ResourceDeletedResponse, + one=True, + transform=lambda d: { + "id": UUID(d.pop("user_id")), + "deleted_at": utcnow(), + "jobs": [], + }, + _kind="deleted", +) +@cozo_query +@beartype +def delete_user(*, developer_id: UUID, user_id: UUID) -> tuple[list[str], dict]: + """ + Constructs and returns a datalog query for deleting an user and its default settings from the database. + + Parameters: + - developer_id (UUID): The UUID of the developer owning the user. + - user_id (UUID): The UUID of the user to be deleted. + - client (CozoClient, optional): An instance of the CozoClient to execute the query. + + Returns: + - ResourceDeletedResponse: The response indicating the deletion of the user. + """ + + queries = [ + verify_developer_id_query(developer_id), + verify_developer_owns_resource_query(developer_id, "users", user_id=user_id), + """ + # Delete docs + ?[owner_type, owner_id, doc_id] := + *docs{ + owner_id, + owner_type, + doc_id, + }, + owner_id = to_uuid($user_id), + owner_type = "user" + + :delete docs { + owner_type, + owner_id, + doc_id + } + :returning + """, + """ + # Delete the user + ?[user_id, developer_id] <- [[$user_id, $developer_id]] + + :delete users { + developer_id, + user_id + } + :returning + """, + ] + + return (queries, {"user_id": str(user_id), "developer_id": str(developer_id)}) diff --git a/agents-api/agents_api/models/user/get_user.py b/agents-api/agents_api/models/user/get_user.py index fa2dd5d38..181bf05f0 100644 --- a/agents-api/agents_api/models/user/get_user.py +++ b/agents-api/agents_api/models/user/get_user.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -15,9 +16,21 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { + lambda e: isinstance(e, QueryException) + and "Developer not found" in str(e): lambda *_: HTTPException( + detail="developer does not exist", status_code=403 + ), + lambda e: isinstance(e, QueryException) + and "asserted to return some results, but returned none" + in str(e): lambda *_: HTTPException( + detail="developer doesnt own resource", status_code=404 + ), QueryException: partialclass(HTTPException, status_code=400), ValidationError: partialclass(HTTPException, status_code=400), TypeError: partialclass(HTTPException, status_code=400), diff --git a/agents-api/agents_api/models/user/list_users.py b/agents-api/agents_api/models/user/list_users.py index 5fdc62ef0..57dc9b8c8 100644 --- a/agents-api/agents_api/models/user/list_users.py +++ b/agents-api/agents_api/models/user/list_users.py @@ -1,4 +1,4 @@ -from typing import Any, Literal +from typing import Any, Literal, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +16,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { diff --git a/agents-api/agents_api/models/user/patch_user.py b/agents-api/agents_api/models/user/patch_user.py index fdad01fe7..faf38298c 100644 --- a/agents-api/agents_api/models/user/patch_user.py +++ b/agents-api/agents_api/models/user/patch_user.py @@ -1,5 +1,6 @@ """Module for generating datalog queries to update user information in the 'cozodb' database.""" +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -19,6 +20,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -31,6 +35,7 @@ ResourceUpdatedResponse, one=True, transform=lambda d: {"id": d["user_id"], "jobs": [], **d}, + _kind="inserted", ) @cozo_query @beartype diff --git a/agents-api/agents_api/models/user/test_user_queries.py b/agents-api/agents_api/models/user/test_user_queries.py deleted file mode 100644 index e33d044e1..000000000 --- a/agents-api/agents_api/models/user/test_user_queries.py +++ /dev/null @@ -1,170 +0,0 @@ -# """This module contains tests for user-related queries against the 'cozodb' database. It includes tests for creating, updating, and retrieving user information.""" - -# # Tests for user queries -# from uuid import uuid4 - -# from cozo_migrate.api import init, apply -# from pycozo import Client -# from ward import raises, test - -# from .create_user import create_user_query -# from .get_user import get_user_query -# from .list_users import list_users_query -# from .update_user import update_user_query - - -# def cozo_client(migrations_dir: str = "./migrations"): -# """Initializes a new Cozo client for testing, applying all migrations to ensure the database schema is up to date.""" -# # Create a new client for each test -# # and initialize the schema. -# client = Client() - -# init(client) -# apply(client, migrations_dir=migrations_dir, all_=True) - -# return client - - -# @test("model: create user") -# def _(): -# """Test that a user can be successfully created.""" -# client = cozo_client() -# user_id = uuid4() -# developer_id = uuid4() - -# create_user_query( -# user_id=user_id, -# developer_id=developer_id, -# name="test user", -# about="test user about", -# client=client, -# ) - - -# @test("model: create user twice should fail") -# def _(): -# """Test that attempting to create the same user twice results in a failure.""" -# client = cozo_client() -# user_id = uuid4() -# developer_id = uuid4() - -# # Expect an exception to be raised as creating the same user twice should not be allowed. -# # Should fail because the user already exists. -# with raises(Exception): -# create_user_query( -# user_id=user_id, -# developer_id=developer_id, -# name="test user", -# about="test user about", -# client=client, -# ) - -# create_user_query( -# user_id=user_id, -# developer_id=developer_id, -# name="test user", -# about="test user about", -# client=client, -# ) - - -# @test("model: update non-existent user should fail") -# def _(): -# """Test that attempting to update a non-existent user results in a failure.""" -# client = cozo_client() -# user_id = uuid4() -# developer_id = uuid4() - -# # Should fail because the user doecn't exists. -# with raises(Exception): -# update_user_query( -# user_id=user_id, -# developer_id=developer_id, -# name="test user", -# about="test user about", -# client=client, -# ) - - -# @test("model: update user") -# def _(): -# """Test that an existing user's information can be successfully updated.""" -# client = cozo_client() -# user_id = uuid4() -# developer_id = uuid4() - -# create_user_query( -# user_id=user_id, -# developer_id=developer_id, -# name="test user", -# about="test user about", -# client=client, -# ) - -# # Verify that the 'updated_at' timestamp is greater than the 'created_at' timestamp, indicating a successful update. -# update_result = update_user_query( -# user_id=user_id, -# developer_id=developer_id, -# name="updated user", -# about="updated user about", -# client=client, -# ) - -# data = update_result.iloc[0].to_dict() - -# assert data["updated_at"] > data["created_at"] - - -# @test("model: get user not exists") -# def _(): -# """Test that retrieving a non-existent user returns an empty result.""" -# client = cozo_client() -# user_id = uuid4() -# developer_id = uuid4() - -# # Ensure that the query for an existing user returns exactly one result. -# result = get_user_query( -# user_id=user_id, -# developer_id=developer_id, -# client=client, -# ) - -# assert len(result["id"]) == 0 - - -# @test("model: get user exists") -# def _(): -# """Test that retrieving an existing user returns the correct user information.""" -# client = cozo_client() -# user_id = uuid4() -# developer_id = uuid4() - -# result = create_user_query( -# user_id=user_id, -# developer_id=developer_id, -# name="test user", -# about="test user about", -# client=client, -# ) - -# result = get_user_query( -# user_id=user_id, -# developer_id=developer_id, -# client=client, -# ) - -# assert len(result["id"]) == 1 - - -# @test("model: list users") -# def _(): -# """Test that listing users returns a collection of user information.""" -# client = cozo_client() -# developer_id = uuid4() - -# result = list_users_query( -# developer_id=developer_id, -# client=client, -# ) - -# assert len(result["id"]) == 0 diff --git a/agents-api/agents_api/models/user/update_user.py b/agents-api/agents_api/models/user/update_user.py index f929b8d7e..9a13d9369 100644 --- a/agents-api/agents_api/models/user/update_user.py +++ b/agents-api/agents_api/models/user/update_user.py @@ -1,3 +1,4 @@ +from typing import Any, TypeVar from uuid import UUID from beartype import beartype @@ -16,6 +17,9 @@ wrap_in_class, ) +ModelT = TypeVar("ModelT", bound=Any) +T = TypeVar("T") + @rewrap_exceptions( { @@ -28,11 +32,12 @@ ResourceUpdatedResponse, one=True, transform=lambda d: {"id": d["user_id"], "jobs": [], **d}, + _kind="inserted", ) @cozo_query @beartype def update_user( - *, developer_id: UUID, user_id: UUID, update_user: UpdateUserRequest + *, developer_id: UUID, user_id: UUID, data: UpdateUserRequest ) -> tuple[list[str], dict]: """Updates user information in the 'cozodb' database. @@ -47,7 +52,7 @@ def update_user( """ user_id = str(user_id) developer_id = str(developer_id) - update_data = update_user.model_dump() + update_data = data.model_dump() # Prepares the update data by filtering out None values and adding user_id and developer_id. user_update_cols, user_update_vals = cozo_process_mutate_data( diff --git a/agents-api/agents_api/models/utils.py b/agents-api/agents_api/models/utils.py index 120c8949c..bc36af8d2 100644 --- a/agents-api/agents_api/models/utils.py +++ b/agents-api/agents_api/models/utils.py @@ -1,16 +1,17 @@ import inspect import re from functools import partialmethod, wraps -from typing import Any, Callable, ParamSpec, Type +from typing import Any, Callable, ParamSpec, Type, TypeVar from uuid import UUID import pandas as pd from pydantic import BaseModel -from ..clients.cozo import client as cozo_client from ..common.utils.cozo import uuid_int_list_to_uuid4 P = ParamSpec("P") +T = TypeVar("T") +ModelT = TypeVar("ModelT", bound=BaseModel) def fix_uuid( @@ -63,14 +64,65 @@ class NewCls(cls): return NewCls +def mark_session_updated_query(developer_id: UUID | str, session_id: UUID | str) -> str: + return f""" + input[developer_id, session_id] <- [[ + to_uuid("{str(developer_id)}"), + to_uuid("{str(session_id)}"), + ]] + + ?[ + developer_id, + session_id, + situation, + summary, + created_at, + metadata, + render_templates, + token_budget, + context_overflow, + updated_at, + ] := + input[developer_id, session_id], + *sessions {{ + session_id, + situation, + summary, + created_at, + metadata, + render_templates, + token_budget, + context_overflow, + @ 'NOW' + }}, + updated_at = [floor(now()), true] + + :put sessions {{ + developer_id, + session_id, + situation, + summary, + created_at, + metadata, + render_templates, + token_budget, + context_overflow, + updated_at, + }} + """ + + def verify_developer_id_query(developer_id: UUID | str) -> str: return f""" - ?[developer_id] := + matched[count(developer_id)] := *developers{{ developer_id, }}, developer_id = to_uuid("{str(developer_id)}") - :assert some + ?[exists] := + matched[num], + exists = num > 0, + assert(exists, "Developer does not exist") """ @@ -113,7 +165,7 @@ def cozo_query( func: Callable[P, tuple[str | list[str], dict]] | None = None, debug: bool | None = None, ): - def cozo_query_dec(func: Callable[P, tuple[str | list[str], dict]]): + def cozo_query_dec(func: Callable[P, tuple[str | list[Any], dict]]): """ Decorator that wraps a function that takes arbitrary arguments, and returns a (query string, variables) tuple. @@ -122,28 +174,36 @@ def cozo_query_dec(func: Callable[P, tuple[str | list[str], dict]]): and then run the query using the client, returning a DataFrame. """ - if debug: - from pprint import pprint + from pprint import pprint @wraps(func) - def wrapper(*args, client=cozo_client, **kwargs) -> pd.DataFrame: + def wrapper(*args: P.args, client=None, **kwargs: P.kwargs) -> pd.DataFrame: queries, variables = func(*args, **kwargs) if isinstance(queries, str): query = queries else: - queries = [query for query in queries if query] + queries = [str(query) for query in queries if query] query = "}\n\n{\n".join(queries) query = f"{{ {query} }}" + debug and print(query) debug and pprint( dict( - query=query, variables=variables, ) ) - result = client.run(query, variables) + # Run the query + from ..clients import cozo + + try: + client = client or cozo.get_cozo_client() + result = client.run(query, variables) + + except Exception as e: + debug and print(repr(getattr(e, "__cause__", None) or e)) + raise # Need to fix the UUIDs in the result result = result.map(fix_uuid_if_present) @@ -169,24 +229,32 @@ def wrapper(*args, client=cozo_client, **kwargs) -> pd.DataFrame: def wrap_in_class( - cls: Type[BaseModel] | Callable[..., BaseModel], + cls: Type[ModelT] | Callable[..., ModelT], one: bool = False, transform: Callable[[dict], dict] | None = None, + _kind: str | None = None, ): - def decorator(func: Callable[..., pd.DataFrame]): + def decorator(func: Callable[P, pd.DataFrame]): @wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args: P.args, **kwargs: P.kwargs) -> ModelT | list[ModelT]: df = func(*args, **kwargs) # Convert df to list of dicts + if _kind: + df = df[df["_kind"] == _kind] + data = df.to_dict(orient="records") nonlocal transform transform = transform or (lambda x: x) + if one: - return cls(**transform(data[0])) + assert len(data) >= 1, "Expected one result, got none" + obj: ModelT = cls(**transform(data[0])) + return obj - return [cls(**item) for item in map(transform, data)] + objs: list[ModelT] = [cls(**item) for item in map(transform, data)] + return objs # Set the wrapped function as an attribute of the wrapper, # forwards the __wrapped__ attribute if it exists. @@ -204,13 +272,13 @@ def rewrap_exceptions( ], /, ): - def decorator(func: Callable[..., Any]): + def decorator(func: Callable[P, T]): @wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: nonlocal mapping try: - result = func(*args, **kwargs) + result: T = func(*args, **kwargs) except BaseException as error: for check, transform in mapping.items(): @@ -227,7 +295,9 @@ def wrapper(*args, **kwargs): else transform(error) ) - raise transform(new_error) from error + setattr(new_error, "__cause__", error) + + raise new_error from error raise diff --git a/agents-api/agents_api/rec_sum/data.py b/agents-api/agents_api/rec_sum/data.py index e7857a037..23474c995 100644 --- a/agents-api/agents_api/rec_sum/data.py +++ b/agents-api/agents_api/rec_sum/data.py @@ -1,24 +1,25 @@ import json from pathlib import Path +from typing import Any -module_directory = Path(__file__).parent +module_directory: Path = Path(__file__).parent with open(f"{module_directory}/entities_example_chat.json", "r") as _f: - entities_example_chat = json.load(_f) + entities_example_chat: Any = json.load(_f) with open(f"{module_directory}/trim_example_chat.json", "r") as _f: - trim_example_chat = json.load(_f) + trim_example_chat: Any = json.load(_f) with open(f"{module_directory}/trim_example_result.json", "r") as _f: - trim_example_result = json.load(_f) + trim_example_result: Any = json.load(_f) with open(f"{module_directory}/summarize_example_chat.json", "r") as _f: - summarize_example_chat = json.load(_f) + summarize_example_chat: Any = json.load(_f) with open(f"{module_directory}/summarize_example_result.json", "r") as _f: - summarize_example_result = json.load(_f) + summarize_example_result: Any = json.load(_f) diff --git a/agents-api/agents_api/rec_sum/entities.py b/agents-api/agents_api/rec_sum/entities.py index 11346447c..01b29951b 100644 --- a/agents-api/agents_api/rec_sum/entities.py +++ b/agents-api/agents_api/rec_sum/entities.py @@ -12,21 +12,21 @@ ## Entities ## ############## -entities_example_plan = """\ +entities_example_plan: str = """\ Thinking step by step: - To add context for future entries, let's outline the main entities in the session above. - In this session, as mentioned in the first message metadata, the user's name is Camille and the assistant's name is JaneBot. - They talk about Elon Musk and the banana tattoo on Camille's arm briefly.""" -entities_example_result = """\ +entities_example_result: str = """\ 1. Camille (The user): Humorous, creative, and enjoys playful banter. 2. JaneBot (The assistant): Engages in lighthearted conversation and tries to guess user's thoughts. 3. Elon Musk: Camille and JaneBot discuss the polarizing tech and space industry figure. 4. Banana Tattoo: Camille has a tattoo of a banana on their arm.""" -entities_instructions = """\ +entities_instructions: str = """\ Your goal is to identify the main entities in the session. Entities should include: - Characters in the conversation: Assistant, User1, User2 - People references or spoken about @@ -57,7 +57,7 @@ def make_entities_prompt( @retry(stop=stop_after_attempt(2)) async def get_entities( chat_session, - model="gpt-4-turbo", + model="gpt-4o", stop=[" dict: - base_url, api_key = None, None - if model in LOCAL_MODELS: - base_url, api_key = model_inference_url, model_api_key - model = f"openai/{model}" - - result = await acompletion( + result = await litellm.acompletion( model=model, messages=messages, - base_url=base_url, - api_key=api_key, + **kwargs, ) return result.choices[0].message.json() diff --git a/agents-api/agents_api/rec_sum/summarize.py b/agents-api/agents_api/rec_sum/summarize.py index 97f39905b..46a6662a3 100644 --- a/agents-api/agents_api/rec_sum/summarize.py +++ b/agents-api/agents_api/rec_sum/summarize.py @@ -1,4 +1,5 @@ import json +from typing import List from tenacity import retry, stop_after_attempt @@ -10,7 +11,7 @@ ## summarize ## ########## -summarize_example_plan = """\ +summarize_example_plan: str = """\ Planning step by step: - We can replace entries 1,2,3,4 with a summary of those messages. - We can replace entries 5,6,7,8 similarly. @@ -23,7 +24,7 @@ - We can safely summarize message 34's essay into just the salient points only.""" -summarize_instructions = """\ +summarize_instructions: str = """\ Your goal is to compactify the history by coalescing redundant information in messages into their summary in order to reduce its size and save costs. Instructions: @@ -34,7 +35,9 @@ - VERY IMPORTANT: Add the indices of messages that are being summarized so that those messages can then be removed from the session otherwise, there'll be no way to identify which messages to remove. See example for more details.""" -def make_summarize_prompt(session, user="a user", assistant="gpt-4-turbo", **_): +def make_summarize_prompt( + session, user="a user", assistant="gpt-4-turbo", **_ +) -> List[str]: return [ f"You are given a session history of a chat between {user or 'a user'} and {assistant or 'gpt-4-turbo'}. The session is formatted in the ChatML JSON format (from OpenAI).\n\n{summarize_instructions}\n\n\n{json.dumps(add_indices(summarize_example_chat), indent=2)}\n\n\n\n{summarize_example_plan}\n\n\n\n{json.dumps(summarize_example_result, indent=2)}\n", f"Begin! Write the summarized messages as a json list just like the example above. First write your plan inside and then your answer between . Don't forget to add the indices of the messages being summarized alongside each summary.\n\n\n{json.dumps(add_indices(session), indent=2)}\n\n", @@ -44,7 +47,7 @@ def make_summarize_prompt(session, user="a user", assistant="gpt-4-turbo", **_): @retry(stop=stop_after_attempt(2)) async def summarize_messages( chat_session, - model="gpt-4-turbo", + model="gpt-4o", stop=[" List[str]: return [ f"You are given a session history of a chat between {user or 'a user'} and {assistant or 'gpt-4-turbo'}. The session is formatted in the ChatML JSON format (from OpenAI).\n\n{trim_instructions}\n\n\n{json.dumps(add_indices(trim_example_chat), indent=2)}\n\n\n\n{trim_example_plan}\n\n\n\n{json.dumps(trim_example_result, indent=2)}\n", f"Begin! Write the trimmed messages as a json list. First write your plan inside and then your answer between .\n\n\n{json.dumps(add_indices(session), indent=2)}\n\n", @@ -42,7 +43,7 @@ def make_trim_prompt(session, user="a user", assistant="gpt-4-turbo", **_): @retry(stop=stop_after_attempt(2)) async def trim_messages( chat_session, - model="gpt-4-turbo", + model="gpt-4o", stop=[" Dict[str, _T2]: return { key: value for key, value in dict(role=role, name=name, content=content).items() @@ -13,39 +18,39 @@ def make(content, role="system", name=None, **_): } @staticmethod - def user(content, name=None): + def user(content, name=None) -> Any: return chatml.make(role="user", content=content, name=name) @staticmethod - def assistant(content, name=None): + def assistant(content, name=None) -> Any: return chatml.make(role="assistant", content=content, name=name) @staticmethod - def system(content, name=None): + def system(content, name=None) -> Any: return chatml.make(content, name=name) @staticmethod - def thought(content, name=None): + def thought(content, name=None) -> Any: return chatml.make(content, name="thought") @staticmethod - def information(content): + def information(content) -> Any: return chatml.system(content, name="information") @staticmethod - def summary(content): + def summary(content) -> Any: return chatml.system(content, name="summary") @staticmethod - def entities(content): + def entities(content) -> Any: return chatml.system(content, name="entity") -def add_indices(list_of_dicts, idx_name="index"): +def add_indices(list_of_dicts, idx_name="index") -> List[dict]: return [{idx_name: i, **msg} for i, msg in enumerate(list_of_dicts)] -def get_names_from_session(session): +def get_names_from_session(session) -> Dict[str, Any]: return { role: next( (msg.get("name", None) for msg in session if msg["role"] == role), None diff --git a/agents-api/agents_api/routers/__init__.py b/agents-api/agents_api/routers/__init__.py index 8d16aa32a..ac26a3e82 100644 --- a/agents-api/agents_api/routers/__init__.py +++ b/agents-api/agents_api/routers/__init__.py @@ -9,3 +9,12 @@ Each sub-module defines its own set of API endpoints and is responsible for handling requests and responses related to its domain, ensuring a modular and organized approach to API development. """ + +# ruff: noqa: F401 + +from .agents import router as agents_router +from .docs import router as docs_router +from .jobs import router as jobs_router +from .sessions import router as sessions_router +from .tasks import router as tasks_router +from .users import router as users_router diff --git a/agents-api/agents_api/routers/agents/__init__.py b/agents-api/agents_api/routers/agents/__init__.py index 54624e374..2eadecb3d 100644 --- a/agents-api/agents_api/routers/agents/__init__.py +++ b/agents-api/agents_api/routers/agents/__init__.py @@ -1,7 +1,15 @@ -from .create_agent import create_agent # noqa: F401 -from .delete_agent import delete_agent # noqa: F401 -from .get_agent_details import get_agent_details # noqa: F401 -from .list_agents import list_agents # noqa: F401 -from .patch_agent import patch_agent # noqa: F401 -from .router import router # noqa: F401 -from .update_agent import update_agent # noqa: F401 +# ruff: noqa: F401 + +from .create_agent import create_agent +from .create_agent_tool import create_agent_tool +from .create_or_update_agent import create_or_update_agent +from .delete_agent import delete_agent +from .delete_agent_tool import delete_agent_tool +from .get_agent_details import get_agent_details +from .list_agent_tools import list_agent_tools +from .list_agents import list_agents +from .patch_agent import patch_agent +from .patch_agent_tool import patch_agent_tool +from .router import router +from .update_agent import update_agent +from .update_agent_tool import update_agent_tool diff --git a/agents-api/agents_api/routers/agents/create_agent.py b/agents-api/agents_api/routers/agents/create_agent.py index d1b04e1f4..d1cac0d6b 100644 --- a/agents-api/agents_api/routers/agents/create_agent.py +++ b/agents-api/agents_api/routers/agents/create_agent.py @@ -1,37 +1,27 @@ from typing import Annotated -from uuid import uuid4 from fastapi import Depends from pydantic import UUID4 from starlette.status import HTTP_201_CREATED +import agents_api.models as models + from ...autogen.openapi_model import ( CreateAgentRequest, ResourceCreatedResponse, ) from ...dependencies.developer_id import get_developer_id -from ...models.agent.create_agent import create_agent as create_agent_query from .router import router @router.post("/agents", status_code=HTTP_201_CREATED, tags=["agents"]) async def create_agent( - request: CreateAgentRequest, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + data: CreateAgentRequest, ) -> ResourceCreatedResponse: - new_agent_id = uuid4() - - _, resp = next( - create_agent_query( - developer_id=x_developer_id, - agent_id=new_agent_id, - name=request.name, - about=request.about, - instructions=request.instructions or [], - model=request.model, - default_settings=request.default_settings or {}, - metadata=request.metadata or {}, - ).iterrows() + agent = models.agent.create_agent( + developer_id=x_developer_id, + data=data, ) - return ResourceCreatedResponse(id=new_agent_id, created_at=resp["created_at"]) + return ResourceCreatedResponse(id=agent.id, created_at=agent.created_at, jobs=[]) diff --git a/agents-api/agents_api/routers/agents/create_agent_tools.py b/agents-api/agents_api/routers/agents/create_agent_tool.py similarity index 53% rename from agents-api/agents_api/routers/agents/create_agent_tools.py rename to agents-api/agents_api/routers/agents/create_agent_tool.py index cafd6cb07..46442ba01 100644 --- a/agents-api/agents_api/routers/agents/create_agent_tools.py +++ b/agents-api/agents_api/routers/agents/create_agent_tool.py @@ -5,29 +5,26 @@ from pydantic import UUID4 from starlette.status import HTTP_201_CREATED +import agents_api.models as models + from ...autogen.openapi_model import ( CreateToolRequest, ResourceCreatedResponse, ) from ...dependencies.developer_id import get_developer_id -from ...models.tools.create_tools import create_tools as create_tools_query from .router import router @router.post("/agents/{agent_id}/tools", status_code=HTTP_201_CREATED, tags=["agents"]) -async def create_agent_tools( +async def create_agent_tool( agent_id: UUID, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], - data: list[CreateToolRequest], - ignore_existing: bool = False, + data: CreateToolRequest, ) -> ResourceCreatedResponse: - _, resp = next( - create_tools_query( - developer_id=x_developer_id, - agent_id=agent_id, - data=data, - ignore_existing=ignore_existing, - ).iterrows() - ) + tool = models.tools.create_tools( + developer_id=x_developer_id, + agent_id=agent_id, + data=[data], + )[0] - return ResourceCreatedResponse(id=resp["tool_id"], created_at=resp["created_at"]) + return ResourceCreatedResponse(id=tool.id, created_at=tool.created_at) diff --git a/agents-api/agents_api/routers/agents/create_or_update_agent.py b/agents-api/agents_api/routers/agents/create_or_update_agent.py index bcd53f800..fc2fa5563 100644 --- a/agents-api/agents_api/routers/agents/create_or_update_agent.py +++ b/agents-api/agents_api/routers/agents/create_or_update_agent.py @@ -5,29 +5,26 @@ from pydantic import UUID4 from starlette.status import HTTP_201_CREATED +import agents_api.models as models + from ...autogen.openapi_model import ( CreateOrUpdateAgentRequest, ResourceCreatedResponse, ) from ...dependencies.developer_id import get_developer_id -from ...models.agent.create_or_update_agent import ( - create_or_update_agent as create_or_update_agent_query, -) from .router import router @router.post("/agents/{agent_id}", status_code=HTTP_201_CREATED, tags=["agents"]) async def create_or_update_agent( agent_id: UUID, - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], data: CreateOrUpdateAgentRequest, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], ) -> ResourceCreatedResponse: - _, resp = next( - create_or_update_agent_query( - developer_id=x_developer_id, - agent_id=agent_id, - data=data, - ).iterrows() + agent = models.agent.create_or_update_agent( + developer_id=x_developer_id, + agent_id=agent_id, + data=data, ) - return ResourceCreatedResponse(id=agent_id, created_at=resp["created_at"]) + return ResourceCreatedResponse(id=agent.id, created_at=agent.created_at, jobs=[]) diff --git a/agents-api/agents_api/routers/agents/delete_agent.py b/agents-api/agents_api/routers/agents/delete_agent.py index 8a94dda46..4603c2217 100644 --- a/agents-api/agents_api/routers/agents/delete_agent.py +++ b/agents-api/agents_api/routers/agents/delete_agent.py @@ -1,12 +1,10 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 -from starlette.status import HTTP_202_ACCEPTED, HTTP_404_NOT_FOUND +from starlette.status import HTTP_202_ACCEPTED from ...autogen.openapi_model import ResourceDeletedResponse -from ...common.exceptions.agents import AgentNotFoundError -from ...common.utils.datetime import utcnow from ...dependencies.developer_id import get_developer_id from ...models.agent.delete_agent import delete_agent as delete_agent_query from .router import router @@ -16,8 +14,4 @@ async def delete_agent( agent_id: UUID4, x_developer_id: Annotated[UUID4, Depends(get_developer_id)] ) -> ResourceDeletedResponse: - try: - delete_agent_query(x_developer_id, agent_id) - except AgentNotFoundError as e: - raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=str(e)) - return ResourceDeletedResponse(id=agent_id, deleted_at=utcnow()) + return delete_agent_query(developer_id=x_developer_id, agent_id=agent_id) diff --git a/agents-api/agents_api/routers/agents/delete_agent_tools.py b/agents-api/agents_api/routers/agents/delete_agent_tool.py similarity index 54% rename from agents-api/agents_api/routers/agents/delete_agent_tools.py rename to agents-api/agents_api/routers/agents/delete_agent_tool.py index b1177b094..c220a2cf7 100644 --- a/agents-api/agents_api/routers/agents/delete_agent_tools.py +++ b/agents-api/agents_api/routers/agents/delete_agent_tool.py @@ -5,24 +5,19 @@ from pydantic import UUID4 from ...autogen.openapi_model import ResourceDeletedResponse -from ...common.utils.datetime import utcnow from ...dependencies.developer_id import get_developer_id -from ...models.tools.delete_tools import delete_tool as delete_tool_query +from ...models.tools.delete_tool import delete_tool from .router import router @router.delete("/agents/{agent_id}/tools/{tool_id}", tags=["agents"]) -async def delete_agent_tools( +async def delete_agent_tool( agent_id: UUID, tool_id: UUID, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], ) -> ResourceDeletedResponse: - _, resp = next( - delete_tool_query( - developer_id=x_developer_id, - agent_id=agent_id, - tool_id=tool_id, - ).iterrows() + return delete_tool( + developer_id=x_developer_id, + agent_id=agent_id, + tool_id=tool_id, ) - - return ResourceDeletedResponse(id=resp["tool_id"], deleted_at=utcnow()) diff --git a/agents-api/agents_api/routers/agents/get_agent_details.py b/agents-api/agents_api/routers/agents/get_agent_details.py index d9c865cf0..04511527a 100644 --- a/agents-api/agents_api/routers/agents/get_agent_details.py +++ b/agents-api/agents_api/routers/agents/get_agent_details.py @@ -1,11 +1,9 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 -from starlette.status import HTTP_404_NOT_FOUND from ...autogen.openapi_model import Agent -from ...common.exceptions.agents import AgentNotFoundError from ...dependencies.developer_id import get_developer_id from ...models.agent.get_agent import get_agent as get_agent_query from .router import router @@ -16,10 +14,4 @@ async def get_agent_details( agent_id: UUID4, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], ) -> Agent: - try: - agent = get_agent_query(developer_id=x_developer_id, agent_id=agent_id) - if not agent: - raise AgentNotFoundError(x_developer_id, agent_id) - return Agent(**agent) - except AgentNotFoundError as e: - raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=str(e)) + return get_agent_query(developer_id=x_developer_id, agent_id=agent_id) diff --git a/agents-api/agents_api/routers/agents/list_agent_tools.py b/agents-api/agents_api/routers/agents/list_agent_tools.py index d4068d8a2..dff135920 100644 --- a/agents-api/agents_api/routers/agents/list_agent_tools.py +++ b/agents-api/agents_api/routers/agents/list_agent_tools.py @@ -4,6 +4,7 @@ from fastapi import Depends from pydantic import UUID4 +from ...autogen.openapi_model import ListResponse, Tool from ...dependencies.developer_id import get_developer_id from ...models.tools.list_tools import list_tools as list_tools_query from .router import router @@ -11,21 +12,20 @@ @router.get("/agents/{agent_id}/tools", tags=["agents"]) async def list_agent_tools( - agent_id: UUID, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + agent_id: UUID, limit: int = 100, offset: int = 0, sort_by: Literal["created_at", "updated_at"] = "created_at", direction: Literal["asc", "desc"] = "desc", -) -> list[tuple[str, dict]]: - return [ - row - for _, row in list_tools_query( - developer_id=x_developer_id, - agent_id=agent_id, - limit=limit, - offset=offset, - sort_by=sort_by, - direction=direction, - ).iterrows() - ] +) -> ListResponse[Tool]: + tools = list_tools_query( + agent_id=agent_id, + developer_id=x_developer_id, + limit=limit, + offset=offset, + sort_by=sort_by, + direction=direction, + ) + + return ListResponse[Tool](items=tools) diff --git a/agents-api/agents_api/routers/agents/list_agents.py b/agents-api/agents_api/routers/agents/list_agents.py index 652a26de7..f64d4bdbf 100644 --- a/agents-api/agents_api/routers/agents/list_agents.py +++ b/agents-api/agents_api/routers/agents/list_agents.py @@ -1,11 +1,13 @@ -from typing import Annotated, List +import json +from json import JSONDecodeError +from typing import Annotated, Literal -from fastapi import Depends +from fastapi import Depends, HTTPException, status from pydantic import UUID4 -from ...autogen.openapi_model import Agent +from ...autogen.openapi_model import Agent, ListResponse from ...dependencies.developer_id import get_developer_id -from ...models.agent.list_agents import list_agents +from ...models.agent.list_agents import list_agents as list_agents_query from .router import router @@ -14,12 +16,25 @@ async def list_agents( x_developer_id: Annotated[UUID4, Depends(get_developer_id)], limit: int = 100, offset: int = 0, + sort_by: Literal["created_at", "updated_at"] = "created_at", + direction: Literal["asc", "desc"] = "desc", metadata_filter: str = "{}", -) -> List[Agent]: - agents = list_agents( +) -> ListResponse[Agent]: + try: + metadata_filter = json.loads(metadata_filter) + except JSONDecodeError: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="metadata_filter is not a valid JSON", + ) + + agents = list_agents_query( developer_id=x_developer_id, limit=limit, offset=offset, + sort_by=sort_by, + direction=direction, metadata_filter=metadata_filter, ) - return [Agent(**agent) for agent in agents] + + return ListResponse[Agent](items=agents) diff --git a/agents-api/agents_api/routers/agents/patch_agent.py b/agents-api/agents_api/routers/agents/patch_agent.py index 7731dda81..29596447e 100644 --- a/agents-api/agents_api/routers/agents/patch_agent.py +++ b/agents-api/agents_api/routers/agents/patch_agent.py @@ -1,11 +1,10 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 -from starlette.status import HTTP_200_OK, HTTP_404_NOT_FOUND +from starlette.status import HTTP_200_OK from ...autogen.openapi_model import PatchAgentRequest, ResourceUpdatedResponse -from ...common.exceptions.agents import AgentNotFoundError from ...dependencies.developer_id import get_developer_id from ...models.agent.patch_agent import patch_agent as patch_agent_query from .router import router @@ -18,21 +17,12 @@ tags=["agents"], ) async def patch_agent( - agent_id: UUID4, - request: PatchAgentRequest, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + agent_id: UUID4, + data: PatchAgentRequest, ) -> ResourceUpdatedResponse: - try: - updated_agent = patch_agent_query( - agent_id=agent_id, - developer_id=x_developer_id, - default_settings=request.default_settings, - name=request.name, - about=request.about, - model=request.model, - metadata=request.metadata, - instructions=request.instructions, - ) - return ResourceUpdatedResponse(**updated_agent) - except AgentNotFoundError as e: - raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=str(e)) + return patch_agent_query( + agent_id=agent_id, + developer_id=x_developer_id, + data=data, + ) diff --git a/agents-api/agents_api/routers/agents/patch_agent_tools.py b/agents-api/agents_api/routers/agents/patch_agent_tool.py similarity index 61% rename from agents-api/agents_api/routers/agents/patch_agent_tools.py rename to agents-api/agents_api/routers/agents/patch_agent_tool.py index ab8e0c206..843fa91eb 100644 --- a/agents-api/agents_api/routers/agents/patch_agent_tools.py +++ b/agents-api/agents_api/routers/agents/patch_agent_tool.py @@ -14,19 +14,15 @@ @router.patch("/agents/{agent_id}/tools/{tool_id}", tags=["agents"]) -async def patch_agent_tools( +async def patch_agent_tool( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], agent_id: UUID, tool_id: UUID, - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], - patch_tool: PatchToolRequest, + data: PatchToolRequest, ) -> ResourceUpdatedResponse: - _, resp = next( - patch_tool_query( - developer_id=x_developer_id, - agent_id=agent_id, - tool_id=tool_id, - patch_tool=patch_tool, - ).iterrows() + return patch_tool_query( + developer_id=x_developer_id, + agent_id=agent_id, + tool_id=tool_id, + data=data, ) - - return ResourceUpdatedResponse(id=resp["tool_id"], updated_at=resp["updated_at"]) diff --git a/agents-api/agents_api/routers/agents/router.py b/agents-api/agents_api/routers/agents/router.py index af9233c56..5c3ec9311 100644 --- a/agents-api/agents_api/routers/agents/router.py +++ b/agents-api/agents_api/routers/agents/router.py @@ -1,3 +1,3 @@ from fastapi import APIRouter -router = APIRouter() +router: APIRouter = APIRouter() diff --git a/agents-api/agents_api/routers/agents/update_agent.py b/agents-api/agents_api/routers/agents/update_agent.py index e37296ef3..865954a6d 100644 --- a/agents-api/agents_api/routers/agents/update_agent.py +++ b/agents-api/agents_api/routers/agents/update_agent.py @@ -1,11 +1,10 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 -from starlette.status import HTTP_200_OK, HTTP_404_NOT_FOUND +from starlette.status import HTTP_200_OK from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateAgentRequest -from ...common.exceptions.agents import AgentNotFoundError from ...dependencies.developer_id import get_developer_id from ...models.agent.update_agent import update_agent as update_agent_query from .router import router @@ -18,23 +17,12 @@ tags=["agents"], ) async def update_agent( - agent_id: UUID4, - request: UpdateAgentRequest, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + agent_id: UUID4, + data: UpdateAgentRequest, ) -> ResourceUpdatedResponse: - try: - _, updated_agent = next( - update_agent_query( - agent_id=agent_id, - developer_id=x_developer_id, - name=request.name, - about=request.about, - model=request.model, - default_settings=request.default_settings, - metadata=request.metadata, - instructions=request.instructions, - ).iterrows() - ) - return ResourceUpdatedResponse(**updated_agent) - except AgentNotFoundError as e: - raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=str(e)) + return update_agent_query( + developer_id=x_developer_id, + agent_id=agent_id, + data=data, + ) diff --git a/agents-api/agents_api/routers/agents/update_agent_tools.py b/agents-api/agents_api/routers/agents/update_agent_tool.py similarity index 65% rename from agents-api/agents_api/routers/agents/update_agent_tools.py rename to agents-api/agents_api/routers/agents/update_agent_tool.py index ac51afd16..60e38ad76 100644 --- a/agents-api/agents_api/routers/agents/update_agent_tools.py +++ b/agents-api/agents_api/routers/agents/update_agent_tool.py @@ -14,19 +14,15 @@ @router.put("/agents/{agent_id}/tools/{tool_id}", tags=["agents"]) -async def update_agent_tools( +async def update_agent_tool( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], agent_id: UUID, tool_id: UUID, - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], data: UpdateToolRequest, ) -> ResourceUpdatedResponse: - _, resp = next( - update_tool_query( - developer_id=x_developer_id, - agent_id=agent_id, - tool_id=tool_id, - data=data, - ).iterrows() + return update_tool_query( + developer_id=x_developer_id, + agent_id=agent_id, + tool_id=tool_id, + data=data, ) - - return ResourceUpdatedResponse(id=resp["tool_id"], updated_at=resp["updated_at"]) diff --git a/agents-api/agents_api/routers/docs/__init__.py b/agents-api/agents_api/routers/docs/__init__.py new file mode 100644 index 000000000..10195be77 --- /dev/null +++ b/agents-api/agents_api/routers/docs/__init__.py @@ -0,0 +1,8 @@ +# ruff: noqa: F401 +from .create_doc import create_agent_doc, create_user_doc +from .delete_doc import delete_agent_doc, delete_user_doc +from .embed import embed +from .get_doc import get_doc +from .list_docs import list_agent_docs, list_user_docs +from .router import router +from .search_docs import search_agent_docs, search_user_docs diff --git a/agents-api/agents_api/routers/docs/create_doc.py b/agents-api/agents_api/routers/docs/create_doc.py new file mode 100644 index 000000000..dd41620ae --- /dev/null +++ b/agents-api/agents_api/routers/docs/create_doc.py @@ -0,0 +1,112 @@ +from typing import Annotated +from uuid import UUID, uuid4 + +from fastapi import BackgroundTasks, Depends +from pydantic import UUID4 +from starlette.status import HTTP_201_CREATED +from temporalio.client import Client as TemporalClient + +from ...activities.types import EmbedDocsPayload +from ...autogen.openapi_model import CreateDocRequest, ResourceCreatedResponse +from ...clients import temporal +from ...dependencies.developer_id import get_developer_id +from ...env import temporal_task_queue, testing +from ...models.docs.create_doc import create_doc as create_doc_query +from .router import router + + +async def run_embed_docs_task( + *, + developer_id: UUID, + doc_id: UUID, + title: str, + content: list[str], + job_id: UUID, + background_tasks: BackgroundTasks, + client: TemporalClient | None = None, +): + from ...workflows.embed_docs import EmbedDocsWorkflow + + client = client or (await temporal.get_client()) + + embed_payload = EmbedDocsPayload( + developer_id=developer_id, + doc_id=doc_id, + content=content, + title=title, + embed_instruction=None, + ) + + handle = await client.start_workflow( + EmbedDocsWorkflow.run, + embed_payload, + task_queue=temporal_task_queue, + id=str(job_id), + ) + + # TODO: Remove this conditional once we have a way to run workflows in + # a test environment. + if not testing: + background_tasks.add_task(handle.result) + + return handle + + +@router.post("/users/{user_id}/docs", status_code=HTTP_201_CREATED, tags=["docs"]) +async def create_user_doc( + user_id: UUID4, + data: CreateDocRequest, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + background_tasks: BackgroundTasks, +) -> ResourceCreatedResponse: + doc = create_doc_query( + developer_id=x_developer_id, + owner_type="user", + owner_id=user_id, + data=data, + ) + + embed_job_id = uuid4() + + await run_embed_docs_task( + developer_id=x_developer_id, + doc_id=doc.id, + title=doc.title, + content=doc.content, + job_id=embed_job_id, + background_tasks=background_tasks, + ) + + return ResourceCreatedResponse( + id=doc.id, created_at=doc.created_at, jobs=[embed_job_id] + ) + + +@router.post("/agents/{agent_id}/docs", status_code=HTTP_201_CREATED, tags=["docs"]) +async def create_agent_doc( + agent_id: UUID4, + data: CreateDocRequest, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + background_tasks: BackgroundTasks, +) -> ResourceCreatedResponse: + doc = create_doc_query( + developer_id=x_developer_id, + owner_type="agent", + owner_id=agent_id, + data=data, + ) + + embed_job_id = uuid4() + + await run_embed_docs_task( + developer_id=x_developer_id, + doc_id=doc.id, + title=doc.title, + content=doc.content, + job_id=embed_job_id, + background_tasks=background_tasks, + ) + + return ResourceCreatedResponse( + id=doc.id, created_at=doc.created_at, jobs=[embed_job_id] + ) diff --git a/agents-api/agents_api/routers/docs/delete_doc.py b/agents-api/agents_api/routers/docs/delete_doc.py new file mode 100644 index 000000000..c31bf4051 --- /dev/null +++ b/agents-api/agents_api/routers/docs/delete_doc.py @@ -0,0 +1,42 @@ +from typing import Annotated + +from fastapi import Depends +from pydantic import UUID4 +from starlette.status import HTTP_202_ACCEPTED + +from ...autogen.openapi_model import ResourceDeletedResponse +from ...dependencies.developer_id import get_developer_id +from ...models.docs.delete_doc import delete_doc as delete_doc_query +from .router import router + + +@router.delete( + "/agents/{agent_id}/docs/{doc_id}", status_code=HTTP_202_ACCEPTED, tags=["docs"] +) +async def delete_agent_doc( + doc_id: UUID4, + agent_id: UUID4, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], +) -> ResourceDeletedResponse: + return delete_doc_query( + developer_id=x_developer_id, + owner_id=agent_id, + owner_type="agent", + doc_id=doc_id, + ) + + +@router.delete( + "/users/{user_id}/docs/{doc_id}", status_code=HTTP_202_ACCEPTED, tags=["docs"] +) +async def delete_user_doc( + doc_id: UUID4, + user_id: UUID4, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], +) -> ResourceDeletedResponse: + return delete_doc_query( + developer_id=x_developer_id, + owner_id=user_id, + owner_type="user", + doc_id=doc_id, + ) diff --git a/agents-api/agents_api/routers/docs/embed.py b/agents-api/agents_api/routers/docs/embed.py new file mode 100644 index 000000000..1de99bfce --- /dev/null +++ b/agents-api/agents_api/routers/docs/embed.py @@ -0,0 +1,28 @@ +from typing import Annotated + +from fastapi import Depends +from pydantic import UUID4 + +import agents_api.clients.embed as embedder + +from ...autogen.openapi_model import ( + EmbedQueryRequest, + EmbedQueryResponse, +) +from ...dependencies.developer_id import get_developer_id +from .router import router + + +@router.post("/embed", tags=["docs"]) +async def embed( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + data: EmbedQueryRequest, +) -> EmbedQueryResponse: + text_to_embed: str | list[str] = data.text + text_to_embed: list[str] = ( + [text_to_embed] if isinstance(text_to_embed, str) else text_to_embed + ) + + vectors = await embedder.embed(inputs=text_to_embed) + + return EmbedQueryResponse(vectors=vectors) diff --git a/agents-api/agents_api/routers/docs/get_doc.py b/agents-api/agents_api/routers/docs/get_doc.py new file mode 100644 index 000000000..febebf1bd --- /dev/null +++ b/agents-api/agents_api/routers/docs/get_doc.py @@ -0,0 +1,17 @@ +from typing import Annotated + +from fastapi import Depends +from pydantic import UUID4 + +from ...autogen.openapi_model import Doc +from ...dependencies.developer_id import get_developer_id +from ...models.docs.get_doc import get_doc as get_doc_query +from .router import router + + +@router.get("/docs/{doc_id}", tags=["docs"]) +async def get_doc( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + doc_id: UUID4, +) -> Doc: + return get_doc_query(developer_id=x_developer_id, doc_id=doc_id) diff --git a/agents-api/agents_api/routers/docs/list_docs.py b/agents-api/agents_api/routers/docs/list_docs.py new file mode 100644 index 000000000..80a6ba6ae --- /dev/null +++ b/agents-api/agents_api/routers/docs/list_docs.py @@ -0,0 +1,75 @@ +import json +from json import JSONDecodeError +from typing import Annotated, Literal + +from fastapi import Depends, HTTPException, status +from pydantic import UUID4 + +from ...autogen.openapi_model import Doc, ListResponse +from ...dependencies.developer_id import get_developer_id +from ...models.docs.list_docs import list_docs as list_docs_query +from .router import router + + +@router.get("/users/{user_id}/docs", tags=["docs"]) +async def list_user_docs( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + user_id: UUID4, + limit: int = 100, + offset: int = 0, + sort_by: Literal["created_at", "updated_at"] = "created_at", + direction: Literal["asc", "desc"] = "desc", + metadata_filter: str = "{}", +) -> ListResponse[Doc]: + try: + metadata_filter = json.loads(metadata_filter) + except JSONDecodeError: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="metadata_filter is not a valid JSON", + ) + + docs = list_docs_query( + developer_id=x_developer_id, + owner_type="user", + owner_id=user_id, + limit=limit, + offset=offset, + sort_by=sort_by, + direction=direction, + metadata_filter=metadata_filter, + ) + + return ListResponse[Doc](items=docs) + + +@router.get("/agents/{agent_id}/docs", tags=["docs"]) +async def list_agent_docs( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + agent_id: UUID4, + limit: int = 100, + offset: int = 0, + sort_by: Literal["created_at", "updated_at"] = "created_at", + direction: Literal["asc", "desc"] = "desc", + metadata_filter: str = "{}", +) -> ListResponse[Doc]: + try: + metadata_filter = json.loads(metadata_filter) + except JSONDecodeError: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="metadata_filter is not a valid JSON", + ) + + docs = list_docs_query( + developer_id=x_developer_id, + owner_type="agent", + owner_id=agent_id, + limit=limit, + offset=offset, + sort_by=sort_by, + direction=direction, + metadata_filter=metadata_filter, + ) + + return ListResponse[Doc](items=docs) diff --git a/agents-api/agents_api/routers/docs/router.py b/agents-api/agents_api/routers/docs/router.py new file mode 100644 index 000000000..5c3ec9311 --- /dev/null +++ b/agents-api/agents_api/routers/docs/router.py @@ -0,0 +1,3 @@ +from fastapi import APIRouter + +router: APIRouter = APIRouter() diff --git a/agents-api/agents_api/routers/docs/search_docs.py b/agents-api/agents_api/routers/docs/search_docs.py new file mode 100644 index 000000000..f2864164e --- /dev/null +++ b/agents-api/agents_api/routers/docs/search_docs.py @@ -0,0 +1,115 @@ +import time +from typing import Annotated, Any, Dict, List, Optional, Tuple, Union + +from fastapi import Depends +from pydantic import UUID4 + +from ...autogen.openapi_model import ( + DocSearchResponse, + HybridDocSearchRequest, + TextOnlyDocSearchRequest, + VectorDocSearchRequest, +) +from ...dependencies.developer_id import get_developer_id +from ...models.docs.search_docs_by_embedding import search_docs_by_embedding +from ...models.docs.search_docs_by_text import search_docs_by_text +from ...models.docs.search_docs_hybrid import search_docs_hybrid +from .router import router + + +def get_search_fn_and_params( + search_params, +) -> Tuple[ + Any, Optional[Dict[str, Union[float, int, str, Dict[str, float], List[float]]]] +]: + search_fn, params = None, None + + match search_params: + case TextOnlyDocSearchRequest(text=query, limit=k): + search_fn = search_docs_by_text + params = dict( + query=query, + k=k, + ) + + case VectorDocSearchRequest( + vector=query_embedding, limit=k, confidence=confidence + ): + search_fn = search_docs_by_embedding + params = dict( + query_embedding=query_embedding, + k=k, + confidence=confidence, + ) + + case HybridDocSearchRequest( + text=query, + vector=query_embedding, + limit=k, + confidence=confidence, + alpha=alpha, + ): + search_fn = search_docs_hybrid + params = dict( + query=query, + query_embedding=query_embedding, + k=k, + embed_search_options=dict(confidence=confidence), + alpha=alpha, + ) + + return search_fn, params + + +@router.post("/users/{user_id}/search", tags=["docs"]) +async def search_user_docs( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + search_params: ( + TextOnlyDocSearchRequest | VectorDocSearchRequest | HybridDocSearchRequest + ), + user_id: UUID4, +) -> DocSearchResponse: + search_fn, params = get_search_fn_and_params(search_params) + + start = time.time() + docs = search_fn( + developer_id=x_developer_id, + owners=[("user", user_id)], + **params, + ) + + end = time.time() + + time_taken = end - start + + return DocSearchResponse( + docs=docs, + time=time_taken, + ) + + +@router.post("/agents/{agent_id}/search", tags=["docs"]) +async def search_agent_docs( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + search_params: ( + TextOnlyDocSearchRequest | VectorDocSearchRequest | HybridDocSearchRequest + ), + agent_id: UUID4, +) -> DocSearchResponse: + search_fn, params = get_search_fn_and_params(search_params) + + start = time.time() + docs = search_fn( + developer_id=x_developer_id, + owners=[("agent", agent_id)], + **params, + ) + + end = time.time() + + time_taken = end - start + + return DocSearchResponse( + docs=docs, + time=time_taken, + ) diff --git a/agents-api/agents_api/routers/jobs/routers.py b/agents-api/agents_api/routers/jobs/routers.py index 2d936cb28..c9783053c 100644 --- a/agents-api/agents_api/routers/jobs/routers.py +++ b/agents-api/agents_api/routers/jobs/routers.py @@ -1,31 +1,43 @@ +from typing import Literal + from fastapi import APIRouter from pydantic import UUID4 from temporalio.client import WorkflowExecutionStatus -from agents_api.autogen.openapi_model import JobStatus, State +from agents_api.autogen.openapi_model import JobStatus from agents_api.clients.temporal import get_client -router = APIRouter() +router: APIRouter = APIRouter() -def map_job_status(status: WorkflowExecutionStatus) -> State: +def map_job_status( + status: WorkflowExecutionStatus, +) -> Literal[ + "pending", + "in_progress", + "retrying", + "succeeded", + "aborted", + "failed", + "unknown", +]: match status: case WorkflowExecutionStatus.RUNNING: - return State.in_progress + return "in_progress" case WorkflowExecutionStatus.COMPLETED: - return State.succeeded + return "succeeded" case WorkflowExecutionStatus.FAILED: - return State.failed + return "failed" case WorkflowExecutionStatus.CANCELED: - return State.aborted + return "aborted" case WorkflowExecutionStatus.TERMINATED: - return State.aborted + return "aborted" case WorkflowExecutionStatus.CONTINUED_AS_NEW: - return State.in_progress + return "in_progress" case WorkflowExecutionStatus.TIMED_OUT: - return State.failed + return "failed" case _: - return State.unknown + return "unknown" @router.get("/jobs/{job_id}", tags=["jobs"]) @@ -39,7 +51,7 @@ async def get_job_status(job_id: UUID4) -> JobStatus: return JobStatus( name=handle.id, - reason=f"Execution status: {state.name}", + reason=f"Execution status: {state}", created_at=job_description.start_time, updated_at=job_description.execution_time, id=job_id, diff --git a/agents-api/agents_api/routers/sessions/__init__.py b/agents-api/agents_api/routers/sessions/__init__.py index 3cea3eb2d..bbf4014a5 100644 --- a/agents-api/agents_api/routers/sessions/__init__.py +++ b/agents-api/agents_api/routers/sessions/__init__.py @@ -1,7 +1,12 @@ -from .create_session import create_session # noqa: F401 -from .delete_session import delete_session # noqa: F401 -from .get_session import get_session # noqa: F401 -from .list_sessions import list_sessions # noqa: F401 -from .patch_session import patch_session # noqa: F401 -from .router import router # noqa: F401 -from .update_session import update_session # noqa: F401 +# ruff: noqa: F401 + +from .chat import chat +from .create_or_update_session import create_or_update_session +from .create_session import create_session +from .delete_session import delete_session +from .get_session import get_session +from .get_session_history import get_session_history +from .list_sessions import list_sessions +from .patch_session import patch_session +from .router import router +from .update_session import update_session diff --git a/agents-api/agents_api/routers/sessions/chat.py b/agents-api/agents_api/routers/sessions/chat.py new file mode 100644 index 000000000..8d0355de2 --- /dev/null +++ b/agents-api/agents_api/routers/sessions/chat.py @@ -0,0 +1,115 @@ +from typing import Annotated +from uuid import UUID, uuid4 + +from fastapi import BackgroundTasks, Depends +from starlette.status import HTTP_201_CREATED + +from ...autogen.openapi_model import ( + ChatInput, + ChatResponse, + ChunkChatResponse, + CreateEntryRequest, + MessageChatResponse, +) +from ...clients import litellm +from ...common.protocol.developers import Developer +from ...common.protocol.sessions import ChatContext +from ...common.utils.datetime import utcnow +from ...common.utils.template import render_template +from ...dependencies.developer_id import get_developer_data +from ...models.chat.gather_messages import gather_messages +from ...models.chat.prepare_chat_context import prepare_chat_context +from ...models.entry.create_entries import create_entries +from .router import router + + +@router.post( + "/sessions/{session_id}/chat", + status_code=HTTP_201_CREATED, + tags=["sessions", "chat"], +) +async def chat( + developer: Annotated[Developer, Depends(get_developer_data)], + session_id: UUID, + chat_input: ChatInput, + background_tasks: BackgroundTasks, +) -> ChatResponse: + # First get the chat context + chat_context: ChatContext = prepare_chat_context( + developer_id=developer.id, + session_id=session_id, + ) + + # Merge the settings and prepare environment + chat_context.merge_settings(chat_input) + settings: dict = chat_context.settings.model_dump() + env: dict = chat_context.get_chat_environment() + new_raw_messages = [msg.model_dump() for msg in chat_input.messages] + + # Render the messages + past_messages, doc_references = await gather_messages( + developer=developer, + session_id=session_id, + chat_context=chat_context, + chat_input=chat_input, + ) + + env["docs"] = doc_references + new_messages = await render_template(new_raw_messages, variables=env) + messages = past_messages + new_messages + + # Get the tools + tools = settings.get("tools") or chat_context.get_active_tools() + + # TODO: Truncate the messages if necessary + if chat_context.session.context_overflow == "truncate": + # messages = messages[-settings["max_tokens"] :] + raise NotImplementedError("Truncation is not yet implemented") + + # Get the response from the model + model_response = await litellm.acompletion( + messages=messages, + tools=tools, + user=str(developer.id), # For tracking usage + tags=developer.tags, # For filtering models in litellm + **settings, + ) + + # Save the input and the response to the session history + if chat_input.save: + new_entries = [ + CreateEntryRequest.from_model_input( + model=settings["model"], **msg, source="api_request" + ) + for msg in new_messages + ] + + background_tasks.add_task( + create_entries, + developer_id=developer.id, + session_id=session_id, + data=new_entries, + mark_session_as_updated=True, + ) + + # Adaptive context handling + jobs = [] + if chat_context.session.context_overflow == "adaptive": + # TODO: Start the adaptive context workflow + # jobs = [await start_adaptive_context_workflow] + raise NotImplementedError("Adaptive context is not yet implemented") + + # Return the response + chat_response_class = ( + ChunkChatResponse if chat_input.stream else MessageChatResponse + ) + chat_response: ChatResponse = chat_response_class( + id=uuid4(), + created_at=utcnow(), + jobs=jobs, + docs=doc_references, + usage=model_response.usage.model_dump(), + choices=[choice.model_dump() for choice in model_response.choices], + ) + + return chat_response diff --git a/agents-api/agents_api/routers/sessions/create_or_update_session.py b/agents-api/agents_api/routers/sessions/create_or_update_session.py new file mode 100644 index 000000000..8ed9ff7ba --- /dev/null +++ b/agents-api/agents_api/routers/sessions/create_or_update_session.py @@ -0,0 +1,34 @@ +from typing import Annotated +from uuid import UUID + +from fastapi import Depends +from pydantic import UUID4 +from starlette.status import HTTP_201_CREATED + +from ...autogen.openapi_model import ( + CreateOrUpdateSessionRequest, + ResourceCreatedResponse, +) +from ...dependencies.developer_id import get_developer_id +from ...models.session.create_or_update_session import ( + create_or_update_session as create_session_query, +) +from .router import router + + +@router.post("/sessions/{session_id}", status_code=HTTP_201_CREATED, tags=["sessions"]) +async def create_or_update_session( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + session_id: UUID, + data: CreateOrUpdateSessionRequest, +) -> ResourceCreatedResponse: + session = create_session_query( + developer_id=x_developer_id, + session_id=session_id, + data=data, + ) + + return ResourceCreatedResponse( + id=session.id, + created_at=session.created_at, + ) diff --git a/agents-api/agents_api/routers/sessions/create_session.py b/agents-api/agents_api/routers/sessions/create_session.py index 558a4ae22..2262d8e7c 100644 --- a/agents-api/agents_api/routers/sessions/create_session.py +++ b/agents-api/agents_api/routers/sessions/create_session.py @@ -1,7 +1,5 @@ from typing import Annotated -from uuid import uuid4 -import pandas as pd from fastapi import Depends from pydantic import UUID4 from starlette.status import HTTP_201_CREATED @@ -17,23 +15,16 @@ @router.post("/sessions", status_code=HTTP_201_CREATED, tags=["sessions"]) async def create_session( - request: CreateSessionRequest, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + data: CreateSessionRequest, ) -> ResourceCreatedResponse: - session_id = uuid4() - resp: pd.DataFrame = create_session_query( - session_id=session_id, + session = create_session_query( developer_id=x_developer_id, - agent_id=request.agent_id, - user_id=request.user_id, - situation=request.situation, - metadata=request.metadata or {}, - render_templates=request.render_templates or False, - token_budget=request.token_budget, - context_overflow=request.context_overflow, + data=data, ) return ResourceCreatedResponse( - id=resp["session_id"][0], - created_at=resp["created_at"][0], + id=session.id, + created_at=session.created_at, + jobs=[], ) diff --git a/agents-api/agents_api/routers/sessions/delete_session.py b/agents-api/agents_api/routers/sessions/delete_session.py index 969363d70..9645eb8de 100644 --- a/agents-api/agents_api/routers/sessions/delete_session.py +++ b/agents-api/agents_api/routers/sessions/delete_session.py @@ -1,12 +1,10 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 -from starlette.status import HTTP_202_ACCEPTED, HTTP_404_NOT_FOUND +from starlette.status import HTTP_202_ACCEPTED from ...autogen.openapi_model import ResourceDeletedResponse -from ...common.exceptions.sessions import SessionNotFoundError -from ...common.utils.datetime import utcnow from ...dependencies.developer_id import get_developer_id from ...models.session.delete_session import delete_session as delete_session_query from .router import router @@ -18,9 +16,4 @@ async def delete_session( session_id: UUID4, x_developer_id: Annotated[UUID4, Depends(get_developer_id)] ) -> ResourceDeletedResponse: - try: - delete_session_query(x_developer_id, session_id) - except SessionNotFoundError as e: - raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=str(e)) - - return ResourceDeletedResponse(id=session_id, deleted_at=utcnow()) + return delete_session_query(developer_id=x_developer_id, session_id=session_id) diff --git a/agents-api/agents_api/routers/sessions/exceptions.py b/agents-api/agents_api/routers/sessions/exceptions.py index add4b79cb..b7a5bb971 100644 --- a/agents-api/agents_api/routers/sessions/exceptions.py +++ b/agents-api/agents_api/routers/sessions/exceptions.py @@ -3,7 +3,7 @@ class BaseSessionException(Exception): class InputTooBigError(BaseSessionException): - def __init__(self, actual_tokens, required_tokens): + def __init__(self, actual_tokens, required_tokens) -> None: super().__init__( f"Input is too big, {actual_tokens} tokens provided, but only {required_tokens} tokens are allowed." ) diff --git a/agents-api/agents_api/routers/sessions/get_session.py b/agents-api/agents_api/routers/sessions/get_session.py index 5aed042dc..c357394fa 100644 --- a/agents-api/agents_api/routers/sessions/get_session.py +++ b/agents-api/agents_api/routers/sessions/get_session.py @@ -1,6 +1,6 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 from ...autogen.openapi_model import Session @@ -13,16 +13,4 @@ async def get_session( session_id: UUID4, x_developer_id: Annotated[UUID4, Depends(get_developer_id)] ) -> Session: - try: - res = [ - row.to_dict() - for _, row in get_session_query( - developer_id=x_developer_id, session_id=session_id - ).iterrows() - ][0] - return Session(**res) - except (IndexError, KeyError): - raise HTTPException( - status_code=404, - detail="Session not found", - ) + return get_session_query(developer_id=x_developer_id, session_id=session_id) diff --git a/agents-api/agents_api/routers/sessions/get_session_history.py b/agents-api/agents_api/routers/sessions/get_session_history.py new file mode 100644 index 000000000..64be57fcb --- /dev/null +++ b/agents-api/agents_api/routers/sessions/get_session_history.py @@ -0,0 +1,16 @@ +from typing import Annotated + +from fastapi import Depends +from pydantic import UUID4 + +from ...autogen.openapi_model import History +from ...dependencies.developer_id import get_developer_id +from ...models.entry.get_history import get_history as get_history_query +from .router import router + + +@router.get("/sessions/{session_id}/history", tags=["sessions"]) +async def get_session_history( + session_id: UUID4, x_developer_id: Annotated[UUID4, Depends(get_developer_id)] +) -> History: + return get_history_query(developer_id=x_developer_id, session_id=session_id) diff --git a/agents-api/agents_api/routers/sessions/list_sessions.py b/agents-api/agents_api/routers/sessions/list_sessions.py index e93c22228..bf5458887 100644 --- a/agents-api/agents_api/routers/sessions/list_sessions.py +++ b/agents-api/agents_api/routers/sessions/list_sessions.py @@ -1,27 +1,25 @@ import json from json import JSONDecodeError -from typing import Annotated +from typing import Annotated, Literal from fastapi import Depends, HTTPException, status -from pydantic import UUID4, BaseModel +from pydantic import UUID4 -from ...autogen.openapi_model import Session +from ...autogen.openapi_model import ListResponse, Session from ...dependencies.developer_id import get_developer_id -from ...models.session.list_sessions import list_sessions +from ...models.session.list_sessions import list_sessions as list_sessions_query from .router import router -class SessionList(BaseModel): - items: list[Session] - - @router.get("/sessions", tags=["sessions"]) -async def list_sessions_route( +async def list_sessions( x_developer_id: Annotated[UUID4, Depends(get_developer_id)], limit: int = 100, offset: int = 0, + sort_by: Literal["created_at", "updated_at"] = "created_at", + direction: Literal["asc", "desc"] = "desc", metadata_filter: str = "{}", -) -> SessionList: +) -> ListResponse[Session]: try: metadata_filter = json.loads(metadata_filter) except JSONDecodeError: @@ -30,13 +28,13 @@ async def list_sessions_route( detail="metadata_filter is not a valid JSON", ) - query_results = list_sessions( + sessions = list_sessions_query( developer_id=x_developer_id, limit=limit, offset=offset, + sort_by=sort_by, + direction=direction, metadata_filter=metadata_filter, ) - return SessionList( - items=[Session(**row.to_dict()) for _, row in query_results.iterrows()] - ) + return ListResponse[Session](items=sessions) diff --git a/agents-api/agents_api/routers/sessions/patch_session.py b/agents-api/agents_api/routers/sessions/patch_session.py index 992b272a8..365fa49ca 100644 --- a/agents-api/agents_api/routers/sessions/patch_session.py +++ b/agents-api/agents_api/routers/sessions/patch_session.py @@ -1,14 +1,12 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 -from starlette.status import HTTP_404_NOT_FOUND from ...autogen.openapi_model import ( PatchSessionRequest, ResourceUpdatedResponse, ) -from ...common.exceptions.sessions import SessionNotFoundError from ...dependencies.developer_id import get_developer_id from ...models.session.patch_session import patch_session as patch_session_query from .router import router @@ -16,31 +14,12 @@ @router.patch("/sessions/{session_id}", tags=["sessions"]) async def patch_session( - session_id: UUID4, - request: PatchSessionRequest, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + session_id: UUID4, + data: PatchSessionRequest, ) -> ResourceUpdatedResponse: - try: - resp = patch_session_query( - session_id=session_id, - developer_id=x_developer_id, - situation=request.situation, - metadata=request.metadata, - token_budget=request.token_budget, - context_overflow=request.context_overflow, - ) - - return ResourceUpdatedResponse( - id=resp["session_id"][0], - updated_at=resp["updated_at"][0][0], - ) - except (IndexError, KeyError): - raise HTTPException( - status_code=HTTP_404_NOT_FOUND, - detail="Session not found", - ) - except SessionNotFoundError as e: - raise HTTPException( - status_code=HTTP_404_NOT_FOUND, - detail=str(e), - ) + return patch_session_query( + developer_id=x_developer_id, + session_id=session_id, + data=data, + ) diff --git a/agents-api/agents_api/routers/sessions/protocol.py b/agents-api/agents_api/routers/sessions/protocol.py deleted file mode 100644 index 6502e98a2..000000000 --- a/agents-api/agents_api/routers/sessions/protocol.py +++ /dev/null @@ -1,36 +0,0 @@ -from pydantic import BaseModel, ConfigDict, Field, field_validator - -from agents_api.autogen.openapi_model import Preset, ResponseFormat, Tool - - -class Settings(BaseModel): - model_config = ConfigDict(validate_assignment=True) - - model: str - frequency_penalty: float | None = Field(default=0) - length_penalty: float | None = Field(default=1.0) - logit_bias: float | None = None - max_tokens: int | None = Field(default=200) - presence_penalty: float | None = Field(default=0) - repetition_penalty: float | None = Field(default=1) - response_format: ResponseFormat | None - seed: int | None = Field(default=0) - stop: list[str] | None = None - stream: bool | None = Field(default=False) - temperature: float | None = Field(default=0.7) - top_p: float | None = Field(default=1) - remember: bool | None = Field(default=True) - recall: bool | None = Field(default=True) - min_p: float | None = Field(default=0.01) - preset: Preset | None = Field(default=None) - tools: list[Tool] | None = Field(default=None) - token_budget: int | None = Field(default=None) - context_overflow: str | None = Field(default=None) - - @field_validator("max_tokens") - def set_max_tokens(cls, max_tokens): - return max_tokens if max_tokens is not None else 200 - - @field_validator("stream") - def set_stream(cls, stream): - return stream or False diff --git a/agents-api/agents_api/routers/sessions/router.py b/agents-api/agents_api/routers/sessions/router.py index af9233c56..5c3ec9311 100644 --- a/agents-api/agents_api/routers/sessions/router.py +++ b/agents-api/agents_api/routers/sessions/router.py @@ -1,3 +1,3 @@ from fastapi import APIRouter -router = APIRouter() +router: APIRouter = APIRouter() diff --git a/agents-api/agents_api/routers/sessions/session.py b/agents-api/agents_api/routers/sessions/session.py deleted file mode 100644 index 5784f40af..000000000 --- a/agents-api/agents_api/routers/sessions/session.py +++ /dev/null @@ -1,550 +0,0 @@ -import json -from dataclasses import dataclass -from functools import partial, reduce -from json import JSONDecodeError -from typing import Callable -from uuid import uuid4 - -import litellm -import xxhash -from litellm import acompletion -from openai.types.chat.chat_completion import ChatCompletion -from pydantic import UUID4 - -from ...autogen.openapi_model import ( - CreateEntryRequest, - DocIds, - InputChatMLMessage, - Tool, -) -from ...clients.embed import embed -from ...clients.temporal import run_summarization_task, run_truncation_task -from ...clients.worker.types import ChatML -from ...common.exceptions.sessions import SessionNotFoundError -from ...common.protocol.entries import Entry -from ...common.protocol.sessions import SessionData -from ...common.utils.json import CustomJSONEncoder -from ...common.utils.messages import stringify_content -from ...common.utils.template import render_template -from ...env import ( - embedding_model_id, - embedding_service_url, - model_api_key, - model_inference_url, -) -from ...exceptions import PromptTooBigError -from ...model_registry import ( - LOCAL_MODELS, - LOCAL_MODELS_WITH_TOOL_CALLS, - OLLAMA_MODELS, - get_extra_settings, - load_context, - validate_and_extract_tool_calls, -) -from ...models.entry.create_entries import create_entries -from ...models.session.get_cached_response import get_cached_response -from ...models.session.prepare_session_data import prepare_session_data -from ...models.session.set_cached_response import set_cached_response -from .exceptions import InputTooBigError -from .protocol import Settings - -THOUGHTS_STRIP_LEN = 2 -MESSAGES_STRIP_LEN = 4 - - -tool_query_instruction = ( - "Transform this user request for fetching helpful tool descriptions: " -) -instruction_query_instruction = ( - "Embed this text chunk for finding useful historical chunks: " -) -doc_query_instruction = ( - "Encode this query and context for searching relevant passages: " -) - - -def cache(f): - async def wrapper(init_context: list[ChatML], settings: Settings) -> ChatCompletion: - key = xxhash.xxh64( - json.dumps( - { - "init_context": [c.model_dump() for c in init_context], - "settings": settings.model_dump(), - }, - cls=CustomJSONEncoder, - default_empty_value="", - ) - ).hexdigest() - result = get_cached_response(key=key) - if not result.size: - resp = await f(init_context, settings) - set_cached_response(key=key, value=resp.model_dump()) - return resp - choices = result.iloc[0].to_dict()["value"] - return ChatCompletion(**choices) - - return wrapper - - -# FIXME: Refactor llm_generate and cache for use inside tasks as well -# - these should probably be moved to a separate module -@cache -async def llm_generate( - init_context: list[ChatML], settings: Settings -) -> ChatCompletion: - init_context = load_context(init_context, settings.model) - tools = None - api_base = None - api_key = None - model = settings.model - if model in [*LOCAL_MODELS.keys(), *LOCAL_MODELS_WITH_TOOL_CALLS.keys()]: - api_base = model_inference_url - api_key = model_api_key - model = f"openai/{model}" - if model in OLLAMA_MODELS: - model = f"ollama/{model}" - - if settings.tools: - tools = [(tool.model_dump(exclude="id")) for tool in settings.tools] - - extra_body = get_extra_settings(settings) - - litellm.drop_params = True - litellm.add_function_to_prompt = True - - res = await acompletion( - model=model, - messages=init_context, - max_tokens=settings.max_tokens, - stop=settings.stop, - temperature=settings.temperature, - frequency_penalty=settings.frequency_penalty, - top_p=settings.top_p, - presence_penalty=settings.presence_penalty, - stream=settings.stream, - tools=tools, - response_format=settings.response_format, - api_base=api_base, - api_key=api_key, - **extra_body, - ) - - return res - - -@dataclass -class BaseSession: - session_id: UUID4 - developer_id: UUID4 - - def _remove_messages( - self, - messages: list[Entry], - start_idx: int | None, - end_idx: int | None, - token_count: int, - summarization_tokens_threshold: int, - predicate: Callable[[Entry], bool], - ) -> tuple[list[Entry], int]: - if len(messages) < abs((end_idx or len(messages)) - (start_idx or 0)): - return messages, token_count - - result: list[Entry] = messages[: start_idx or 0] - skip_check = False - for m in messages[start_idx:end_idx]: - if predicate(m) and not skip_check: - token_count -= m.token_count - if token_count <= summarization_tokens_threshold: - skip_check = True - - continue - - result.append(m) - - if end_idx is not None: - result += messages[end_idx:] - - return result, token_count - - def _truncate_context( - self, messages: list[Entry], summarization_tokens_threshold: int | None - ) -> list[Entry]: - def rm_thoughts(m): - return m.role == "system" and m.name == "thought" - - def rm_user_assistant(m): - return m.role in ("user", "assistant") - - if summarization_tokens_threshold is None: - return messages - - token_count = reduce(lambda c, e: (e.token_count or 0) + c, messages, 0) - - if token_count <= summarization_tokens_threshold: - return messages - - for start_idx, end_idx, cond in [ - (THOUGHTS_STRIP_LEN, -THOUGHTS_STRIP_LEN, rm_thoughts), - (None, None, rm_thoughts), - (MESSAGES_STRIP_LEN, -MESSAGES_STRIP_LEN, rm_user_assistant), - ]: - messages, token_count = self._remove_messages( - messages, - start_idx, - end_idx, - token_count, - summarization_tokens_threshold, - cond, - ) - - if token_count <= summarization_tokens_threshold and messages: - return messages - - # TODO: - # Compress info sections using LLM Lingua - # - If more space is still needed, remove info sections iteratively - - raise InputTooBigError(token_count, summarization_tokens_threshold) - - async def run( - self, new_input, settings: Settings - ) -> tuple[ChatCompletion, Entry, Callable | None, DocIds]: - # TODO: implement locking at some point - - # Get session data - session_data = prepare_session_data( - developer_id=self.developer_id, session_id=self.session_id - ) - if session_data is None: - raise SessionNotFoundError(self.developer_id, self.session_id) - - # Assemble context - init_context, final_settings, doc_ids = await self.forward( - session_data, new_input, settings - ) - - # Generate response - response = await self.generate( - self._truncate_context(init_context, final_settings.token_budget), - final_settings, - ) - - # Save response to session - # if final_settings.get("remember"): - # await self.add_to_session(new_input, response) - - # FIXME: Implement support for multiple choices, will need a revisit to the schema - message = response.choices[0].message - role = message.role - content = message.content - - # FIXME: Implement support for multiple tool calls - - # Unpack tool calls if present - # TODO: implement changes in the openapi spec - # Currently our function_call does the same job as openai's function role - # Need to add a new role for openai's paradigm of shoving function selected into assistant's context - # Ref: https://github.com/openai/openai-cookbook/blob/main/examples/How_to_call_functions_with_chat_models.ipynb - if not message.content and message.tool_calls: - role = "function_call" - content = message.tool_calls[0].function.model_dump_json() - - elif not message.content: - raise ValueError("No content in response") - - total_tokens = response.usage.total_tokens - completion_tokens = response.usage.completion_tokens - new_entry = Entry( - session_id=self.session_id, - role=role, - name=None if session_data is None else session_data.agent_name, - content=content, - token_count=completion_tokens, - ) - - # Return response and the backward pass as a background task (dont await here) - backward_pass = await self.backward( - new_input, total_tokens, new_entry, final_settings - ) - - return response, new_entry, backward_pass, doc_ids - - async def forward( - self, - session_data: SessionData | None, - new_input: list[Entry], - settings: Settings, - ) -> tuple[list[ChatML], Settings, DocIds]: - if session_data is not None: - settings.token_budget = session_data.token_budget - settings.context_overflow = session_data.context_overflow - - stringified_input = [] - for msg in new_input: - stringified_input.append( - ( - msg.role, - msg.name, - stringify_content(msg.content), - ) - ) - - # role, name, content, token_count, created_at - string_to_embed = "\n".join( - [ - f"{name or role}: {content}" - for (role, name, content) in stringified_input - if content - ] - ) - - # FIXME: bge-m3 does not require instructions - ( - tool_query_embedding, - doc_query_embedding, - ) = await embed( - [ - instruction + string_to_embed - for instruction in [ - tool_query_instruction, - doc_query_instruction, - ] - ], - join_inputs=False, - embedding_service_url=embedding_service_url, - embedding_model_name=embedding_model_id, - ) - - entries: list[Entry] = [] - instructions = "Instructions:\n\n" - first_instruction_idx = -1 - first_instruction_created_at = 0 - tools = [] - doc_ids = DocIds(agent_doc_ids=[], user_doc_ids=[]) - - for idx, row in proc_mem_context_query( - session_id=self.session_id, - tool_query_embedding=tool_query_embedding, - doc_query_embedding=doc_query_embedding, - ).iterrows(): - agent_doc_id = row.get("agent_doc_id") - user_doc_id = row.get("user_doc_id") - - if agent_doc_id is not None: - doc_ids.agent_doc_ids.append(agent_doc_id) - - if user_doc_id is not None: - doc_ids.user_doc_ids.append(user_doc_id) - - # If a `functions` message is encountered, extract into tools list - if row["name"] == "functions": - # FIXME: This might also break if {role: system, name: functions, content} but content not valid json object - try: - # FIXME: This is a hack for now, need to fix to support multiple function calls - assert ( - len(row["content"]) == 1 - ), "Only one function can be called at a time" - content = row["content"][0]["text"] - saved_function = json.loads(content) - except JSONDecodeError as e: - # FIXME: raise a proper error that can be caught by the router - raise ValueError(str(e)) - - tool = Tool(type="function", function=saved_function, id=str(uuid4())) - tools.append(tool) - - continue - - # If `instruction` encoountered, extract and compile together (because of a quirk in how cozo queries work) - if row["name"] == "instruction": - if first_instruction_idx < 0: - first_instruction_idx = idx - first_instruction_created_at = row["created_at"] - - instructions += f"{row['content'][0]['text']}" + "\n\n" - - continue - - # Else add to entries as is - entries.append( - Entry( - role=row["role"], - name=row["name"], - content=row["content"], - session_id=self.session_id, - created_at=row["created_at"], - ) - ) - - # If any instructions were found, add them as info block - if first_instruction_idx >= 0: - entries.insert( - first_instruction_idx, - Entry( - role="system", - name="information", - content=instructions, - session_id=self.session_id, - created_at=first_instruction_created_at, - ), - ) - - messages = [ - ChatML( - role=e.role.value if hasattr(e.role, "value") else e.role, - name=e.name, - content=e.content, - ) - for e in entries + new_input - if e.content - ] - - # Simplify messages if possible - for message in messages: - if ( - isinstance(message.content, list) - and len(message.content) == 1 - and message.content[0].type == "text" - ): - message.content = message.content[0].text - # Add tools to settings - if tools: - settings.tools = settings.tools or [] - settings.tools.extend(tools) - # If render_templates=True, render the templates - if session_data is not None and session_data.render_templates: - template_data = { - "session": { - "id": session_data.session_id, - "situation": session_data.situation, - "metadata": session_data.metadata, - }, - "user": { - "id": session_data.user_id, - "name": session_data.user_name, - "about": session_data.user_about, - "metadata": session_data.user_metadata, - }, - "agent": { - "id": session_data.agent_id, - "name": session_data.agent_name, - "about": session_data.agent_about, - "metadata": session_data.agent_metadata, - "tools": settings.tools, - }, - } - - for i, msg in enumerate(messages): - # Only render templates for system/assistant messages - if msg.role not in ["system", "assistant"]: - continue - - messages[i].content = await render_template(msg.content, template_data) - - # FIXME: This sometimes returns "The model `` does not exist." - if session_data is not None: - settings.model = session_data.model - - return messages, settings, doc_ids - - async def generate( - self, init_context: list[ChatML], settings: Settings - ) -> ChatCompletion: - # return await llm_generate(init_context, settings) - - init_context = load_context(init_context, settings.model) - tools = None - api_base = None - api_key = None - model = settings.model - if model in LOCAL_MODELS: - api_base = model_inference_url - api_key = model_api_key - model = f"openai/{model}" - - if settings.tools: - tools = [(tool.model_dump(exclude="id")) for tool in settings.tools] - - litellm.drop_params = True - litellm.add_function_to_prompt = True - res = await acompletion( - model=model, - messages=init_context, - max_tokens=settings.max_tokens, - stop=settings.stop, - temperature=settings.temperature, - frequency_penalty=settings.frequency_penalty, - top_p=settings.top_p, - presence_penalty=settings.presence_penalty, - stream=settings.stream, - tools=tools, - response_format=settings.response_format, - api_base=api_base, - api_key=api_key, - ) - if model in LOCAL_MODELS_WITH_TOOL_CALLS: - validation, tool_call, error_msg = validate_and_extract_tool_calls( - res.choices[0].message.content - ) - if validation: - res.choices[0].message.role = ( - "function_call" if tool_call else "assistant" - ) - res.choices[0].finish_reason = "tool_calls" - res.choices[0].message.tool_calls = tool_call - res.choices[0].message.content = json.dumps(tool_call) - return res - - async def backward( - self, - new_input: list[InputChatMLMessage], - total_tokens: int, - new_entry: Entry, - final_settings: Settings, - ) -> Callable | None: - if not final_settings.remember: - return - - entries: list[Entry] = [] - for m in new_input: - entries.append( - CreateEntryRequest( - role=m.role, - content=m.content, - name=m.name, - ) - ) - - entries.append( - CreateEntryRequest( - role=new_entry.role, - content=new_entry.content, - name=new_entry.name, - ) - ) - bg_task = None - - if ( - final_settings.token_budget is not None - and total_tokens >= final_settings.token_budget - ): - if final_settings.context_overflow == "truncate": - bg_task = partial(run_truncation_task, final_settings.token_budget) - elif final_settings.context_overflow == "adaptive": - bg_task = run_summarization_task - else: - raise PromptTooBigError(total_tokens, final_settings.token_budget) - - create_entries( - developer_id=self.developer_id, session_id=self.session_id, data=entries - ) - - return bg_task - - -class PlainCompletionSession(BaseSession): - pass - - -class RecursiveSummarizationSession(PlainCompletionSession): - pass diff --git a/agents-api/agents_api/routers/sessions/update_session.py b/agents-api/agents_api/routers/sessions/update_session.py index 1f2658525..8d7c28b33 100644 --- a/agents-api/agents_api/routers/sessions/update_session.py +++ b/agents-api/agents_api/routers/sessions/update_session.py @@ -1,14 +1,12 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 -from starlette.status import HTTP_404_NOT_FOUND from ...autogen.openapi_model import ( ResourceUpdatedResponse, UpdateSessionRequest, ) -from ...common.exceptions.sessions import SessionNotFoundError from ...dependencies.developer_id import get_developer_id from ...models.session.update_session import update_session as update_session_query from .router import router @@ -16,31 +14,12 @@ @router.put("/sessions/{session_id}", tags=["sessions"]) async def update_session( - session_id: UUID4, - request: UpdateSessionRequest, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + session_id: UUID4, + data: UpdateSessionRequest, ) -> ResourceUpdatedResponse: - try: - resp = update_session_query( - session_id=session_id, - developer_id=x_developer_id, - situation=request.situation, - metadata=request.metadata, - token_budget=request.token_budget, - context_overflow=request.context_overflow, - ) - - return ResourceUpdatedResponse( - id=resp["session_id"][0], - updated_at=resp["updated_at"][0][0], - ) - except (IndexError, KeyError): - raise HTTPException( - status_code=HTTP_404_NOT_FOUND, - detail="Session not found", - ) - except SessionNotFoundError as e: - raise HTTPException( - status_code=HTTP_404_NOT_FOUND, - detail=str(e), - ) + return update_session_query( + developer_id=x_developer_id, + session_id=session_id, + data=data, + ) diff --git a/agents-api/agents_api/routers/tasks/__init__.py b/agents-api/agents_api/routers/tasks/__init__.py index fa07d0740..7321d9ab9 100644 --- a/agents-api/agents_api/routers/tasks/__init__.py +++ b/agents-api/agents_api/routers/tasks/__init__.py @@ -1 +1,12 @@ -from .routers import router # noqa: F401 +# ruff: noqa: F401, F403, F405 +from .create_or_update_task import create_or_update_task +from .create_task import create_task +from .create_task_execution import create_task_execution +from .get_execution_details import get_execution_details +from .get_task_details import get_task_details +from .list_execution_transitions import list_execution_transitions +from .list_task_executions import list_task_executions +from .list_tasks import list_tasks +from .patch_execution import patch_execution +from .router import router +from .update_execution import update_execution diff --git a/agents-api/agents_api/routers/tasks/create_or_update_task.py b/agents-api/agents_api/routers/tasks/create_or_update_task.py new file mode 100644 index 000000000..621d11187 --- /dev/null +++ b/agents-api/agents_api/routers/tasks/create_or_update_task.py @@ -0,0 +1,50 @@ +from typing import Annotated + +from fastapi import Depends, HTTPException +from jsonschema import validate +from jsonschema.exceptions import SchemaError, ValidationError +from pydantic import UUID4 +from starlette.status import HTTP_201_CREATED + +from agents_api.autogen.openapi_model import ( + CreateOrUpdateTaskRequest, + ResourceUpdatedResponse, +) +from agents_api.dependencies.developer_id import get_developer_id +from agents_api.models.task.create_or_update_task import ( + create_or_update_task as create_or_update_task_query, +) + +from .router import router + + +@router.post( + "/agents/{agent_id}/tasks/{task_id}", status_code=HTTP_201_CREATED, tags=["tasks"] +) +async def create_or_update_task( + data: CreateOrUpdateTaskRequest, + agent_id: UUID4, + task_id: UUID4, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], +) -> ResourceUpdatedResponse: + # TODO: Do thorough validation of the task spec + + # Validate the input schema + try: + if data.input_schema is not None: + validate(None, data.input_schema) + + except SchemaError: + raise HTTPException(detail="Invalid input schema", status_code=400) + + except ValidationError: + pass + + task = create_or_update_task_query( + developer_id=x_developer_id, + agent_id=agent_id, + task_id=task_id, + data=data, + ) + + return ResourceUpdatedResponse(id=task.id, updated_at=task.updated_at, jobs=[]) diff --git a/agents-api/agents_api/routers/tasks/create_task.py b/agents-api/agents_api/routers/tasks/create_task.py new file mode 100644 index 000000000..519cfd414 --- /dev/null +++ b/agents-api/agents_api/routers/tasks/create_task.py @@ -0,0 +1,44 @@ +from typing import Annotated + +from fastapi import Depends, HTTPException +from jsonschema import validate +from jsonschema.exceptions import SchemaError, ValidationError +from pydantic import UUID4 +from starlette.status import HTTP_201_CREATED + +from agents_api.autogen.openapi_model import ( + CreateTaskRequest, + ResourceCreatedResponse, +) +from agents_api.dependencies.developer_id import get_developer_id +from agents_api.models.task.create_task import create_task as create_task_query + +from .router import router + + +@router.post("/agents/{agent_id}/tasks", status_code=HTTP_201_CREATED, tags=["tasks"]) +async def create_task( + data: CreateTaskRequest, + agent_id: UUID4, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], +) -> ResourceCreatedResponse: + # TODO: Do thorough validation of the task spec + + # Validate the input schema + try: + if data.input_schema is not None: + validate(None, data.input_schema) + + except SchemaError: + raise HTTPException(detail="Invalid input schema", status_code=400) + + except ValidationError: + pass + + task = create_task_query( + developer_id=x_developer_id, + agent_id=agent_id, + data=data, + ) + + return ResourceCreatedResponse(id=task.id, created_at=task.created_at, jobs=[]) diff --git a/agents-api/agents_api/routers/tasks/create_task_execution.py b/agents-api/agents_api/routers/tasks/create_task_execution.py new file mode 100644 index 000000000..0497777bf --- /dev/null +++ b/agents-api/agents_api/routers/tasks/create_task_execution.py @@ -0,0 +1,136 @@ +import logging +from typing import Annotated +from uuid import UUID, uuid4 + +from beartype import beartype +from fastapi import BackgroundTasks, Depends, HTTPException, status +from jsonschema import validate +from jsonschema.exceptions import ValidationError +from pycozo.client import QueryException +from pydantic import UUID4 +from starlette.status import HTTP_201_CREATED +from temporalio.client import WorkflowHandle + +from ...autogen.Executions import Execution +from ...autogen.openapi_model import ( + CreateExecutionRequest, + ResourceCreatedResponse, + UpdateExecutionRequest, +) +from ...clients.temporal import run_task_execution_workflow +from ...dependencies.developer_id import get_developer_id +from ...models.execution.create_execution import ( + create_execution as create_execution_query, +) +from ...models.execution.create_temporal_lookup import create_temporal_lookup +from ...models.execution.prepare_execution_input import prepare_execution_input +from ...models.execution.update_execution import ( + update_execution as update_execution_query, +) +from ...models.task.get_task import get_task as get_task_query +from .router import router + +logger: logging.Logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) + + +@beartype +async def start_execution( + *, + developer_id: UUID, + task_id: UUID, + data: CreateExecutionRequest, + client=None, +) -> tuple[Execution, WorkflowHandle]: + execution_id = uuid4() + + execution = create_execution_query( + developer_id=developer_id, + task_id=task_id, + execution_id=execution_id, + data=data, + client=client, + ) + + execution_input = prepare_execution_input( + developer_id=developer_id, + task_id=task_id, + execution_id=execution_id, + client=client, + ) + + job_id = uuid4() + + try: + handle = await run_task_execution_workflow( + execution_input=execution_input, + job_id=job_id, + ) + + except Exception as e: + logger.exception(e) + + update_execution_query( + developer_id=developer_id, + task_id=task_id, + execution_id=execution_id, + data=UpdateExecutionRequest(status="failed"), + client=client, + ) + + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Execution creation failed", + ) from e + + return execution, handle + + +@router.post( + "/tasks/{task_id}/executions", + status_code=HTTP_201_CREATED, + tags=["tasks"], +) +async def create_task_execution( + task_id: UUID4, + data: CreateExecutionRequest, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + background_tasks: BackgroundTasks, +) -> ResourceCreatedResponse: + try: + task = get_task_query(task_id=task_id, developer_id=x_developer_id) + validate(data.input, task.input_schema) + + except ValidationError: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Invalid request arguments schema", + ) + except QueryException as e: + if e.code == "transact::assertion_failure": + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="Task not found" + ) + + raise + + execution, handle = await start_execution( + developer_id=x_developer_id, + task_id=task_id, + data=data, + ) + + background_tasks.add_task( + create_temporal_lookup, + # + developer_id=x_developer_id, + task_id=task_id, + execution_id=execution.id, + workflow_handle=handle, + ) + + return ResourceCreatedResponse( + id=execution.id, + created_at=execution.created_at, + jobs=[handle.id], + ) diff --git a/agents-api/agents_api/routers/tasks/get_execution_details.py b/agents-api/agents_api/routers/tasks/get_execution_details.py new file mode 100644 index 000000000..e6f87b8af --- /dev/null +++ b/agents-api/agents_api/routers/tasks/get_execution_details.py @@ -0,0 +1,21 @@ +from fastapi import HTTPException, status +from pydantic import UUID4 + +from agents_api.autogen.openapi_model import ( + Execution, +) +from agents_api.models.execution.get_execution import ( + get_execution as get_execution_query, +) + +from .router import router + + +@router.get("/executions/{execution_id}", tags=["executions"]) +async def get_execution_details(execution_id: UUID4) -> Execution: + try: + return get_execution_query(execution_id=execution_id) + except AssertionError: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="Execution not found" + ) diff --git a/agents-api/agents_api/routers/tasks/get_task_details.py b/agents-api/agents_api/routers/tasks/get_task_details.py new file mode 100644 index 000000000..bcfbeedc1 --- /dev/null +++ b/agents-api/agents_api/routers/tasks/get_task_details.py @@ -0,0 +1,41 @@ +from typing import Annotated + +from fastapi import Depends, HTTPException, status +from pycozo.client import QueryException +from pydantic import UUID4 + +from agents_api.autogen.openapi_model import ( + Task, +) +from agents_api.dependencies.developer_id import get_developer_id +from agents_api.models.task.get_task import get_task as get_task_query + +from .router import router + + +@router.get("/tasks/{task_id}", tags=["tasks"]) +async def get_task_details( + task_id: UUID4, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], +) -> Task: + not_found = HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail="Task not found" + ) + + try: + task = get_task_query(developer_id=x_developer_id, task_id=task_id) + task_data = task.model_dump() + except AssertionError: + raise not_found + except QueryException as e: + if e.code == "transact::assertion_failure": + raise not_found + + raise + + for workflow in task_data.get("workflows", []): + if workflow["name"] == "main": + task_data["main"] = workflow.get("steps", []) + break + + return Task(**task_data) diff --git a/agents-api/agents_api/routers/tasks/list_execution_transitions.py b/agents-api/agents_api/routers/tasks/list_execution_transitions.py new file mode 100644 index 000000000..98b7f1cd7 --- /dev/null +++ b/agents-api/agents_api/routers/tasks/list_execution_transitions.py @@ -0,0 +1,75 @@ +from typing import Literal + +from pydantic import UUID4 + +from agents_api.autogen.openapi_model import ( + ListResponse, + Transition, +) +from agents_api.models.execution.list_execution_transitions import ( + list_execution_transitions as list_execution_transitions_query, +) + +from .router import router + + +@router.get("/executions/{execution_id}/transitions", tags=["executions"]) +async def list_execution_transitions( + execution_id: UUID4, + limit: int = 100, + offset: int = 0, + sort_by: Literal["created_at", "updated_at"] = "created_at", + direction: Literal["asc", "desc"] = "desc", +) -> ListResponse[Transition]: + transitions = list_execution_transitions_query( + execution_id=execution_id, + limit=limit, + offset=offset, + sort_by=sort_by, + direction=direction, + ) + return ListResponse[Transition](items=transitions) + + +# @router.get("/executions/{execution_id}/transitions/{transition_id}", tags=["tasks"]) +# async def get_execution_transition( +# execution_id: UUID4, +# transition_id: UUID4, +# ) -> Transition: +# try: +# res = [ +# row.to_dict() +# for _, row in get_execution_transition_query( +# execution_id, transition_id +# ).iterrows() +# ][0] +# return Transition(**res) +# except (IndexError, KeyError): +# raise HTTPException( +# status_code=status.HTTP_404_NOT_FOUND, +# detail="Transition not found", +# ) + + +# TODO: Later; for resuming waiting transitions +# TODO: Ask for a task token to resume a waiting transition +# @router.put("/executions/{execution_id}/transitions/{transition_id}", tags=["tasks"]) +# async def update_execution_transition( +# execution_id: UUID4, +# transition_id: UUID4, +# request: Transition, +# ) -> ResourceUpdatedResponse: +# try: +# resp = update_execution_transition_query( +# execution_id, transition_id, **request.model_dump() +# ) + +# return ResourceUpdatedResponse( +# id=resp["transition_id"][0], +# updated_at=resp["updated_at"][0][0], +# ) +# except (IndexError, KeyError): +# raise HTTPException( +# status_code=status.HTTP_404_NOT_FOUND, +# detail="Transition not found", +# ) diff --git a/agents-api/agents_api/routers/tasks/list_task_executions.py b/agents-api/agents_api/routers/tasks/list_task_executions.py new file mode 100644 index 000000000..27ea782c4 --- /dev/null +++ b/agents-api/agents_api/routers/tasks/list_task_executions.py @@ -0,0 +1,35 @@ +from typing import Annotated, Literal + +from fastapi import Depends +from pydantic import UUID4 + +from agents_api.autogen.openapi_model import ( + Execution, + ListResponse, +) +from agents_api.dependencies.developer_id import get_developer_id +from agents_api.models.execution.list_executions import ( + list_executions as list_task_executions_query, +) + +from .router import router + + +@router.get("/tasks/{task_id}/executions", tags=["tasks"]) +async def list_task_executions( + task_id: UUID4, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + limit: int = 100, + offset: int = 0, + sort_by: Literal["created_at", "updated_at"] = "created_at", + direction: Literal["asc", "desc"] = "desc", +) -> ListResponse[Execution]: + executions = list_task_executions_query( + task_id=task_id, + developer_id=x_developer_id, + limit=limit, + offset=offset, + sort_by=sort_by, + direction=direction, + ) + return ListResponse[Execution](items=executions) diff --git a/agents-api/agents_api/routers/tasks/list_tasks.py b/agents-api/agents_api/routers/tasks/list_tasks.py new file mode 100644 index 000000000..5066fcc96 --- /dev/null +++ b/agents-api/agents_api/routers/tasks/list_tasks.py @@ -0,0 +1,45 @@ +from typing import Annotated, Literal + +from fastapi import Depends +from pydantic import UUID4 + +from agents_api.autogen.openapi_model import ( + ListResponse, + Task, +) +from agents_api.dependencies.developer_id import get_developer_id +from agents_api.models.task.list_tasks import list_tasks as list_tasks_query + +from .router import router + + +@router.get("/agents/{agent_id}/tasks", tags=["tasks"]) +async def list_tasks( + agent_id: UUID4, + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + limit: int = 100, + offset: int = 0, + sort_by: Literal["created_at", "updated_at"] = "created_at", + direction: Literal["asc", "desc"] = "desc", +) -> ListResponse[Task]: + query_results = list_tasks_query( + agent_id=agent_id, + developer_id=x_developer_id, + limit=limit, + offset=offset, + sort_by=sort_by, + direction=direction, + ) + + tasks = [] + for row in query_results: + row_dict = row.model_dump() + + for workflow in row_dict.get("workflows", []): + if workflow["name"] == "main": + row_dict["main"] = workflow["steps"] + break + + tasks.append(Task(**row_dict)) + + return ListResponse[Task](items=tasks) diff --git a/agents-api/agents_api/routers/tasks/patch_execution.py b/agents-api/agents_api/routers/tasks/patch_execution.py new file mode 100644 index 000000000..0bd0b01ba --- /dev/null +++ b/agents-api/agents_api/routers/tasks/patch_execution.py @@ -0,0 +1,31 @@ +from typing import Annotated + +from fastapi import Depends +from pydantic import UUID4 + +from agents_api.autogen.openapi_model import ( + ResourceUpdatedResponse, + UpdateExecutionRequest, +) +from agents_api.dependencies.developer_id import get_developer_id +from agents_api.models.execution.update_execution import ( + update_execution as update_execution_query, +) + +from .router import router + + +# TODO: write PATCH query +@router.patch("/tasks/{task_id}/executions/{execution_id}", tags=["tasks"]) +async def patch_execution( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + task_id: UUID4, + execution_id: UUID4, + data: UpdateExecutionRequest, +) -> ResourceUpdatedResponse: + return update_execution_query( + developer_id=x_developer_id, + task_id=task_id, + execution_id=execution_id, + data=data, + ) diff --git a/agents-api/agents_api/routers/tasks/router.py b/agents-api/agents_api/routers/tasks/router.py new file mode 100644 index 000000000..9a702c15a --- /dev/null +++ b/agents-api/agents_api/routers/tasks/router.py @@ -0,0 +1,39 @@ +from typing import Callable + +import yaml +from fastapi import APIRouter, Request, Response +from fastapi.routing import APIRoute + + +class YamlRequest(Request): + async def body(self) -> bytes: + if not hasattr(self, "_body"): + body = await super().body() + + if self.headers.get("content-type") in [ + "application/x-yaml", + "application/yaml", + "text/yaml", + ]: + body = yaml.load(body, yaml.CSafeLoader) + + self._body = body + + return self._body + + +class YamlRoute(APIRoute): + def get_route_handler(self) -> Callable: + original_route_handler = super().get_route_handler() + + async def custom_route_handler(request: Request) -> Response: + request = YamlRequest(request.scope, request.receive) + + return await original_route_handler(request) + + return custom_route_handler + + +router: APIRouter = APIRouter( + route_class=YamlRoute, +) diff --git a/agents-api/agents_api/routers/tasks/routers.py b/agents-api/agents_api/routers/tasks/routers.py deleted file mode 100644 index aa9cebb75..000000000 --- a/agents-api/agents_api/routers/tasks/routers.py +++ /dev/null @@ -1,414 +0,0 @@ -import logging -from typing import Annotated, Literal -from uuid import uuid4 - -import pandas as pd -from fastapi import APIRouter, Depends, HTTPException, status -from jsonschema import validate -from jsonschema.exceptions import ValidationError -from pycozo.client import QueryException -from pydantic import UUID4, BaseModel -from starlette.status import HTTP_201_CREATED - -from agents_api.autogen.openapi_model import ( - CreateExecutionRequest, - CreateTaskRequest, - Execution, - ResourceCreatedResponse, - # ResourceUpdatedResponse, - Task, - Transition, - UpdateExecutionRequest, -) -from agents_api.clients.cozo import client as cozo_client -from agents_api.clients.temporal import run_task_execution_workflow -from agents_api.common.protocol.tasks import ExecutionInput -from agents_api.dependencies.developer_id import get_developer_id -from agents_api.models.execution.create_execution import ( - create_execution as create_execution_query, -) -from agents_api.models.execution.get_execution import ( - get_execution as get_execution_query, -) - -# from agents_api.models.execution.get_execution_transition import ( -# get_execution_transition as get_execution_transition_query, -# ) -from agents_api.models.execution.list_execution_transitions import ( - list_execution_transitions as list_execution_transitions_query, -) -from agents_api.models.execution.list_executions import ( - list_executions as list_executions_query, -) -from agents_api.models.execution.list_executions import ( - list_executions as list_task_executions_query, -) -from agents_api.models.execution.update_execution import ( - update_execution as update_execution_query, -) - -# from agents_api.models.execution.update_execution_transition import ( -# update_execution_transition_query, -# ) -from agents_api.models.task.create_task import create_task as create_task_query -from agents_api.models.task.get_task import get_task as get_task_query -from agents_api.models.task.list_tasks import list_tasks as list_tasks_query - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -class TaskList(BaseModel): - items: list[Task] - - -class ExecutionList(BaseModel): - items: list[Execution] - - -class ExecutionTransitionList(BaseModel): - items: list[Transition] - - -router = APIRouter() - - -@router.get("/agents/{agent_id}/tasks", tags=["tasks"]) -async def list_tasks( - agent_id: UUID4, - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], - limit: int = 100, - offset: int = 0, - sort_by: Literal["created_at", "updated_at"] = "created_at", - direction: Literal["asc", "desc"] = "desc", -) -> TaskList: - query_results = list_tasks_query( - agent_id=agent_id, - developer_id=x_developer_id, - limit=limit, - offset=offset, - sort_by=sort_by, - direction=direction, - ) - - items = [] - for _, row in query_results.iterrows(): - row_dict = row.to_dict() - - for workflow in row_dict["workflows"]: - if workflow["name"] == "main": - row_dict["main"] = workflow["steps"] - break - - items.append(Task(**row_dict)) - - return TaskList(items=items) - - -@router.post("/agents/{agent_id}/tasks", status_code=HTTP_201_CREATED, tags=["tasks"]) -async def create_task( - request: CreateTaskRequest, - agent_id: UUID4, - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], -) -> ResourceCreatedResponse: - task_id = uuid4() - - # TODO: Do thorough validation of the task spec - - workflows = [ - {"name": "main", "steps": [w.model_dump() for w in request.main]}, - ] + [{"name": name, "steps": steps} for name, steps in request.model_extra.items()] - - resp: pd.DataFrame = create_task_query( - agent_id=agent_id, - task_id=task_id, - developer_id=x_developer_id, - name=request.name, - description=request.description, - input_schema=request.input_schema or {}, - tools_available=request.tools or [], - workflows=workflows, - ) - - return ResourceCreatedResponse( - id=resp["task_id"][0], created_at=resp["created_at"][0] - ) - - -@router.get("/agents/{agent_id}/tasks/{task_id}", tags=["tasks"]) -async def get_task( - task_id: UUID4, - agent_id: UUID4, - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], -) -> Task: - try: - resp = [ - row.to_dict() - for _, row in get_task_query( - agent_id=agent_id, task_id=task_id, developer_id=x_developer_id - ).iterrows() - ][0] - - for workflow in resp["workflows"]: - if workflow["name"] == "main": - resp["main"] = workflow["steps"] - break - - return Task(**resp) - except (IndexError, KeyError): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Task not found", - ) - except QueryException as e: - if e.code == "transact::assertion_failure": - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail="Task not found" - ) - - raise - - -@router.post( - "/tasks/{task_id}/executions", - status_code=HTTP_201_CREATED, - tags=["tasks"], -) -async def create_task_execution( - task_id: UUID4, - data: CreateExecutionRequest, - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], -) -> ResourceCreatedResponse: - try: - task = [ - row.to_dict() - for _, row in get_task_query( - task_id=task_id, developer_id=x_developer_id - ).iterrows() - ][0] - - validate(data.input, task["input_schema"]) - except ValidationError: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="Invalid request arguments schema", - ) - except (IndexError, KeyError): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Task not found", - ) - except QueryException as e: - if e.code == "transact::assertion_failure": - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail="Task not found" - ) - - raise - - execution_id = uuid4() - execution = create_execution_query( - developer_id=x_developer_id, - task_id=task_id, - execution_id=execution_id, - data=data, - ) - - execution_input = ExecutionInput.fetch( - developer_id=x_developer_id, - task_id=task_id, - execution_id=execution_id, - client=cozo_client, - ) - - try: - await run_task_execution_workflow( - execution_input=execution_input, - job_id=uuid4(), - ) - except Exception as e: - logger.exception(e) - - update_execution_query( - developer_id=x_developer_id, - task_id=task_id, - execution_id=execution_id, - data=UpdateExecutionRequest(status="failed"), - ) - - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Task creation failed", - ) - - return ResourceCreatedResponse( - id=execution["execution_id"][0], created_at=execution["created_at"][0] - ) - - -@router.get("/agents/{agent_id}/tasks/{task_id}/executions", tags=["tasks"]) -async def list_task_executions( - task_id: UUID4, - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], - limit: int = 100, - offset: int = 0, -) -> ExecutionList: - res = list_task_executions_query( - task_id=task_id, developer_id=x_developer_id, limit=limit, offse=offset - ) - return ExecutionList( - items=[Execution(**row.to_dict()) for _, row in res.iterrows()] - ) - - -@router.get("/executions/{execution_id}", tags=["executions"]) -async def get_execution(task_id: UUID4, execution_id: UUID4) -> Execution: - try: - res = [ - row.to_dict() - for _, row in get_execution_query(execution_id=execution_id).iterrows() - ][0] - return Execution(**res) - except (IndexError, KeyError): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Execution not found", - ) - - -# TODO: write PATCH query -@router.patch("/tasks/{task_id}/executions/{execution_id}", tags=["tasks"]) -async def patch_execution( - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], - task_id: UUID4, - execution_id: UUID4, - data: UpdateExecutionRequest, -) -> Execution: - try: - res = [ - row.to_dict() - for _, row in update_execution_query( - developer_id=x_developer_id, - task_id=task_id, - execution_id=execution_id, - data=data, - ).iterrows() - ][0] - return Execution(**res) - except (IndexError, KeyError): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Execution not found", - ) - - -@router.put("/tasks/{task_id}/executions/{execution_id}", tags=["tasks"]) -async def put_execution( - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], - task_id: UUID4, - execution_id: UUID4, - data: UpdateExecutionRequest, -) -> Execution: - try: - res = [ - row.to_dict() - for _, row in update_execution_query( - developer_id=x_developer_id, - task_id=task_id, - execution_id=execution_id, - data=data, - ).iterrows() - ][0] - return Execution(**res) - except (IndexError, KeyError): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Execution not found", - ) - - -@router.get("/tasks/{task_id}/executions", tags=["tasks"]) -async def list_execution( - x_developer_id: Annotated[UUID4, Depends(get_developer_id)], - task_id: UUID4, - limit: int = 100, - offset: int = 0, - sort_by: Literal["created_at", "updated_at"] = "created_at", - direction: Literal["asc", "desc"] = "desc", -) -> list[Execution]: - try: - res = [ - Execution(**row.to_dict()) - for _, row in list_executions_query( - developer_id=x_developer_id, - task_id=task_id, - limit=limit, - offset=offset, - sort_by=sort_by, - direction=direction, - ).iterrows() - ] - return res - except (IndexError, KeyError): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="Execution not found", - ) - - -# @router.get("/executions/{execution_id}/transitions/{transition_id}", tags=["tasks"]) -# async def get_execution_transition( -# execution_id: UUID4, -# transition_id: UUID4, -# ) -> Transition: -# try: -# res = [ -# row.to_dict() -# for _, row in get_execution_transition_query( -# execution_id, transition_id -# ).iterrows() -# ][0] -# return Transition(**res) -# except (IndexError, KeyError): -# raise HTTPException( -# status_code=status.HTTP_404_NOT_FOUND, -# detail="Transition not found", -# ) - - -# TODO: Later; for resuming waiting transitions -# TODO: Ask for a task token to resume a waiting transition -# @router.put("/executions/{execution_id}/transitions/{transition_id}", tags=["tasks"]) -# async def update_execution_transition( -# execution_id: UUID4, -# transition_id: UUID4, -# request: Transition, -# ) -> ResourceUpdatedResponse: -# try: -# resp = update_execution_transition_query( -# execution_id, transition_id, **request.model_dump() -# ) - -# return ResourceUpdatedResponse( -# id=resp["transition_id"][0], -# updated_at=resp["updated_at"][0][0], -# ) -# except (IndexError, KeyError): -# raise HTTPException( -# status_code=status.HTTP_404_NOT_FOUND, -# detail="Transition not found", -# ) - - -@router.get("/executions/{execution_id}/transitions", tags=["executions"]) -async def list_execution_transitions( - execution_id: UUID4, - limit: int = 100, - offset: int = 0, -) -> ExecutionTransitionList: - res = list_execution_transitions_query( - execution_id=execution_id, limit=limit, offset=offset - ) - return ExecutionTransitionList( - items=[Transition(**row.to_dict()) for _, row in res.iterrows()] - ) diff --git a/agents-api/agents_api/routers/tasks/update_execution.py b/agents-api/agents_api/routers/tasks/update_execution.py new file mode 100644 index 000000000..779a7121b --- /dev/null +++ b/agents-api/agents_api/routers/tasks/update_execution.py @@ -0,0 +1,47 @@ +from typing import Annotated + +from fastapi import Depends, HTTPException +from pydantic import UUID4 + +from agents_api.autogen.openapi_model import ( + ResumeExecutionRequest, + StopExecutionRequest, +) +from agents_api.clients.temporal import get_client +from agents_api.dependencies.developer_id import get_developer_id +from agents_api.models.execution.get_paused_execution_token import ( + get_paused_execution_token, +) +from agents_api.models.execution.get_temporal_workflow_data import ( + get_temporal_workflow_data, +) + +from .router import router + + +@router.put("/executions/{execution_id}", tags=["executions"]) +async def update_execution( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + execution_id: UUID4, + data: ResumeExecutionRequest | StopExecutionRequest, +): + temporal_client = await get_client() + + match data: + case StopExecutionRequest(): + wf_handle = temporal_client.get_workflow_handle_for( + *get_temporal_workflow_data(execution_id=execution_id) + ) + await wf_handle.cancel() + + case ResumeExecutionRequest(): + token_data = get_paused_execution_token( + developer_id=x_developer_id, execution_id=execution_id + ) + act_handle = temporal_client.get_async_activity_handle( + token_data["task_token"] + ) + await act_handle.complete(data.input) + + case _: + raise HTTPException(status_code=400, detail="Invalid request data") diff --git a/agents-api/agents_api/routers/users/__init__.py b/agents-api/agents_api/routers/users/__init__.py index 91888636d..0cfdf4a5e 100644 --- a/agents-api/agents_api/routers/users/__init__.py +++ b/agents-api/agents_api/routers/users/__init__.py @@ -1,6 +1,9 @@ -from .create_user import create_user # noqa: F401 -from .get_user_details import get_user_details # noqa: F401 -from .list_users import list_users # noqa: F401 -from .patch_user import patch_user # noqa: F401 -from .router import router # noqa: F401 -from .update_user import update_user # noqa: F401 +# ruff: noqa: F401 +from .create_or_update_user import create_or_update_user +from .create_user import create_user +from .delete_user import delete_user +from .get_user_details import get_user_details +from .list_users import list_users +from .patch_user import patch_user +from .router import router +from .update_user import update_user diff --git a/agents-api/agents_api/routers/users/create_or_update_user.py b/agents-api/agents_api/routers/users/create_or_update_user.py new file mode 100644 index 000000000..331f2c4d7 --- /dev/null +++ b/agents-api/agents_api/routers/users/create_or_update_user.py @@ -0,0 +1,27 @@ +from typing import Annotated + +from fastapi import Depends +from pydantic import UUID4 +from starlette.status import HTTP_201_CREATED + +from ...autogen.openapi_model import CreateOrUpdateUserRequest, ResourceCreatedResponse +from ...dependencies.developer_id import get_developer_id +from ...models.user.create_or_update_user import ( + create_or_update_user as create_or_update_user_query, +) +from .router import router + + +@router.post("/users/{user_id}", status_code=HTTP_201_CREATED, tags=["users"]) +async def create_or_update_user( + x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + user_id: UUID4, + data: CreateOrUpdateUserRequest, +) -> ResourceCreatedResponse: + user = create_or_update_user_query( + developer_id=x_developer_id, + user_id=user_id, + data=data, + ) + + return ResourceCreatedResponse(id=user.id, created_at=user.created_at, jobs=[]) diff --git a/agents-api/agents_api/routers/users/create_user.py b/agents-api/agents_api/routers/users/create_user.py index 4e1986315..fcf8bf89b 100644 --- a/agents-api/agents_api/routers/users/create_user.py +++ b/agents-api/agents_api/routers/users/create_user.py @@ -1,5 +1,4 @@ from typing import Annotated -from uuid import uuid4 from fastapi import Depends from pydantic import UUID4 @@ -13,17 +12,12 @@ @router.post("/users", status_code=HTTP_201_CREATED, tags=["users"]) async def create_user( - request: CreateUserRequest, + data: CreateUserRequest, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], ) -> ResourceCreatedResponse: - user_id = uuid4() - created_user = create_user_query( + user = create_user_query( developer_id=x_developer_id, - user_id=user_id, - name=request.name, - about=request.about, - metadata=request.metadata, - ) - return ResourceCreatedResponse( - id=str(user_id), created_at=created_user["created_at"] + data=data, ) + + return ResourceCreatedResponse(id=user.id, created_at=user.created_at, jobs=[]) diff --git a/agents-api/agents_api/routers/users/delete_user.py b/agents-api/agents_api/routers/users/delete_user.py new file mode 100644 index 000000000..fd1d02a94 --- /dev/null +++ b/agents-api/agents_api/routers/users/delete_user.py @@ -0,0 +1,17 @@ +from typing import Annotated + +from fastapi import Depends +from pydantic import UUID4 +from starlette.status import HTTP_202_ACCEPTED + +from ...autogen.openapi_model import ResourceDeletedResponse +from ...dependencies.developer_id import get_developer_id +from ...models.user.delete_user import delete_user as delete_user_query +from .router import router + + +@router.delete("/users/{user_id}", status_code=HTTP_202_ACCEPTED, tags=["users"]) +async def delete_user( + user_id: UUID4, x_developer_id: Annotated[UUID4, Depends(get_developer_id)] +) -> ResourceDeletedResponse: + return delete_user_query(developer_id=x_developer_id, user_id=user_id) diff --git a/agents-api/agents_api/routers/users/exceptions.py b/agents-api/agents_api/routers/users/exceptions.py deleted file mode 100644 index 188c31bb8..000000000 --- a/agents-api/agents_api/routers/users/exceptions.py +++ /dev/null @@ -1,7 +0,0 @@ -class BaseUserException(Exception): - pass - - -class InvalidUserQueryError(BaseUserException): - def __init__(self, message: str): - super().__init__(f"Invalid user query: {message}") diff --git a/agents-api/agents_api/routers/users/get_user_details.py b/agents-api/agents_api/routers/users/get_user_details.py index bb3950cb6..0bc0460ca 100644 --- a/agents-api/agents_api/routers/users/get_user_details.py +++ b/agents-api/agents_api/routers/users/get_user_details.py @@ -1,11 +1,9 @@ from typing import Annotated -from fastapi import Depends, HTTPException, status -from pycozo.client import QueryException +from fastapi import Depends from pydantic import UUID4 from ...autogen.openapi_model import User -from ...common.exceptions.users import UserNotFoundError from ...dependencies.developer_id import get_developer_id from ...models.user.get_user import get_user as get_user_query from .router import router @@ -13,27 +11,7 @@ @router.get("/users/{user_id}", tags=["users"]) async def get_user_details( - user_id: UUID4, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], + user_id: UUID4, ) -> User: - try: - resp = [ - row.to_dict() - for _, row in get_user_query( - developer_id=x_developer_id, - user_id=user_id, - ).iterrows() - ][0] - - return User(**resp) - except (IndexError, KeyError): - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="User not found", - ) - except QueryException as e: - # the code is not so informative now, but it may be a good solution in the future - if e.code == "transact::assertion_failure": - raise UserNotFoundError(x_developer_id, user_id) - - raise + return get_user_query(developer_id=x_developer_id, user_id=user_id) diff --git a/agents-api/agents_api/routers/users/list_users.py b/agents-api/agents_api/routers/users/list_users.py index c7e7b33c4..bfa5be128 100644 --- a/agents-api/agents_api/routers/users/list_users.py +++ b/agents-api/agents_api/routers/users/list_users.py @@ -1,11 +1,11 @@ import json from json import JSONDecodeError -from typing import Annotated, List +from typing import Annotated, Literal from fastapi import Depends, HTTPException, status from pydantic import UUID4 -from ...autogen.openapi_model import User +from ...autogen.openapi_model import ListResponse, User from ...dependencies.developer_id import get_developer_id from ...models.user.list_users import list_users as list_users_query from .router import router @@ -16,8 +16,10 @@ async def list_users( x_developer_id: Annotated[UUID4, Depends(get_developer_id)], limit: int = 100, offset: int = 0, + sort_by: Literal["created_at", "updated_at"] = "created_at", + direction: Literal["asc", "desc"] = "desc", metadata_filter: str = "{}", -) -> List[User]: +) -> ListResponse[User]: try: metadata_filter = json.loads(metadata_filter) except JSONDecodeError: @@ -26,14 +28,14 @@ async def list_users( detail="metadata_filter is not a valid JSON", ) - users = [ - User(**row.to_dict()) - for _, row in list_users_query( - developer_id=x_developer_id, - limit=limit, - offset=offset, - metadata_filter=metadata_filter, - ).iterrows() - ] + users = list_users_query( + developer_id=x_developer_id, + limit=limit, + offset=offset, + sort_by=sort_by, + direction=direction, + metadata_filter=metadata_filter, + ) - return users + result = ListResponse[User](items=users) + return result diff --git a/agents-api/agents_api/routers/users/patch_user.py b/agents-api/agents_api/routers/users/patch_user.py index 7ffeb8251..fcd1e9380 100644 --- a/agents-api/agents_api/routers/users/patch_user.py +++ b/agents-api/agents_api/routers/users/patch_user.py @@ -1,11 +1,9 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 -from starlette.status import HTTP_404_NOT_FOUND from ...autogen.openapi_model import PatchUserRequest, ResourceUpdatedResponse -from ...common.exceptions.users import UserNotFoundError from ...dependencies.developer_id import get_developer_id from ...models.user.patch_user import patch_user as patch_user_query from .router import router @@ -14,26 +12,11 @@ @router.patch("/users/{user_id}", tags=["users"]) async def patch_user( user_id: UUID4, - request: PatchUserRequest, + data: PatchUserRequest, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], ) -> ResourceUpdatedResponse: - try: - resp = patch_user_query( - developer_id=x_developer_id, - user_id=user_id, - name=request.name, - about=request.about, - metadata=request.metadata, - ) - - return ResourceUpdatedResponse( - id=resp["user_id"][0], - updated_at=resp["updated_at"][0], - ) - except (IndexError, KeyError): - raise HTTPException( - status_code=HTTP_404_NOT_FOUND, - detail="User not found", - ) - except UserNotFoundError as e: - raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=str(e)) + return patch_user_query( + developer_id=x_developer_id, + user_id=user_id, + data=data, + ) diff --git a/agents-api/agents_api/routers/users/router.py b/agents-api/agents_api/routers/users/router.py index af9233c56..5c3ec9311 100644 --- a/agents-api/agents_api/routers/users/router.py +++ b/agents-api/agents_api/routers/users/router.py @@ -1,3 +1,3 @@ from fastapi import APIRouter -router = APIRouter() +router: APIRouter = APIRouter() diff --git a/agents-api/agents_api/routers/users/update_user.py b/agents-api/agents_api/routers/users/update_user.py index 39e8f782b..258023173 100644 --- a/agents-api/agents_api/routers/users/update_user.py +++ b/agents-api/agents_api/routers/users/update_user.py @@ -1,11 +1,9 @@ from typing import Annotated -from fastapi import Depends, HTTPException +from fastapi import Depends from pydantic import UUID4 -from starlette.status import HTTP_404_NOT_FOUND from ...autogen.openapi_model import ResourceUpdatedResponse, UpdateUserRequest -from ...common.exceptions.users import UserNotFoundError from ...dependencies.developer_id import get_developer_id from ...models.user.update_user import update_user as update_user_query from .router import router @@ -14,26 +12,11 @@ @router.put("/users/{user_id}", tags=["users"]) async def update_user( user_id: UUID4, - request: UpdateUserRequest, + data: UpdateUserRequest, x_developer_id: Annotated[UUID4, Depends(get_developer_id)], ) -> ResourceUpdatedResponse: - try: - resp = update_user_query( - developer_id=x_developer_id, - user_id=user_id, - name=request.name, - about=request.about, - metadata=request.metadata, - ) - - return ResourceUpdatedResponse( - id=resp["user_id"][0], - updated_at=resp["updated_at"][0], - ) - except (IndexError, KeyError): - raise HTTPException( - status_code=HTTP_404_NOT_FOUND, - detail="User not found", - ) - except UserNotFoundError as e: - raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=str(e)) + return update_user_query( + developer_id=x_developer_id, + user_id=user_id, + data=data, + ) diff --git a/agents-api/agents_api/web.py b/agents-api/agents_api/web.py index c408c152b..364497671 100644 --- a/agents-api/agents_api/web.py +++ b/agents-api/agents_api/web.py @@ -3,24 +3,27 @@ """ import logging +from typing import Any, Callable import fire import sentry_sdk import uvicorn from fastapi import Depends, FastAPI, Request, status -from fastapi.exceptions import RequestValidationError +from fastapi.exceptions import HTTPException, RequestValidationError from fastapi.middleware.cors import CORSMiddleware +from fastapi.middleware.gzip import GZipMiddleware from fastapi.responses import JSONResponse from litellm.exceptions import APIError from pycozo.client import QueryException from temporalio.service import RPCError -from agents_api.common.exceptions import BaseCommonException -from agents_api.dependencies.auth import get_api_key -from agents_api.env import sentry_dsn -from agents_api.exceptions import PromptTooBigError -from agents_api.routers import ( +from .common.exceptions import BaseCommonException +from .dependencies.auth import get_api_key +from .env import sentry_dsn +from .exceptions import PromptTooBigError +from .routers import ( agents, + docs, jobs, sessions, tasks, @@ -36,10 +39,10 @@ ) -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) -def make_exception_handler(status: int): +def make_exception_handler(status: int) -> Callable[[Any, Any], Any]: """ Creates a custom exception handler for the application. @@ -59,7 +62,7 @@ async def _handler(request: Request, exc): return _handler -def register_exceptions(app: FastAPI): +def register_exceptions(app: FastAPI) -> None: """ Registers custom exception handlers for the FastAPI application. @@ -76,8 +79,14 @@ def register_exceptions(app: FastAPI): ) -app = FastAPI(dependencies=[Depends(get_api_key)]) +# TODO: Auth logic should be moved into global middleware _per router_ +# Because some routes don't require auth +# See: https://fastapi.tiangolo.com/tutorial/bigger-applications/ +# +app: Any = FastAPI(dependencies=[Depends(get_api_key)]) +# TODO: CORS should be enabled only for JWT auth +# app.add_middleware( CORSMiddleware, allow_origins=["*"], @@ -87,15 +96,26 @@ def register_exceptions(app: FastAPI): max_age=3600, ) +app.add_middleware(GZipMiddleware, minimum_size=1000, compresslevel=3) + register_exceptions(app) app.include_router(agents.router) -# app.include_router(sessions.router) -# app.include_router(users.router) -# app.include_router(jobs.router) +app.include_router(sessions.router) +app.include_router(users.router) +app.include_router(jobs.router) +app.include_router(docs.router) app.include_router(tasks.router) +@app.exception_handler(HTTPException) +async def http_exception_handler(request, exc: HTTPException): # pylint: disable=unused-argument + return JSONResponse( + status_code=exc.status_code, + content={"error": {"message": str(exc)}}, + ) + + @app.exception_handler(RPCError) async def validation_error_handler(request: Request, exc: RPCError): return JSONResponse( @@ -137,7 +157,7 @@ def main( timeout_keep_alive=30, workers=None, log_level="info", -): +) -> None: uvicorn.run( app, host=host, diff --git a/agents-api/agents_api/worker/__main__.py b/agents-api/agents_api/worker/__main__.py index 271158385..72f9ccf14 100644 --- a/agents-api/agents_api/worker/__main__.py +++ b/agents-api/agents_api/worker/__main__.py @@ -7,45 +7,8 @@ import asyncio -from temporalio.client import Client, TLSConfig -from temporalio.worker import Worker - -from ..activities.co_density import co_density -from ..activities.dialog_insights import dialog_insights -from ..activities.embed_docs import embed_docs -from ..activities.mem_mgmt import mem_mgmt -from ..activities.mem_rating import mem_rating -from ..activities.relationship_summary import relationship_summary -from ..activities.salient_questions import salient_questions -from ..activities.summarization import summarization -from ..activities.task_steps import ( - evaluate_step, - # error_step, - if_else_step, - prompt_step, - tool_call_step, - transition_step, - yield_step, -) -from ..activities.truncation import truncation -from ..env import ( - temporal_client_cert, - temporal_endpoint, - temporal_namespace, - temporal_private_key, - temporal_task_queue, -) -from ..workflows.co_density import CoDensityWorkflow -from ..workflows.dialog_insights import DialogInsightsWorkflow -from ..workflows.embed_docs import EmbedDocsWorkflow -from ..workflows.mem_mgmt import MemMgmtWorkflow -from ..workflows.mem_rating import MemRatingWorkflow -from ..workflows.relationship_summary import RelationshipSummaryWorkflow -from ..workflows.salient_questions import SalientQuestionsWorkflow -from ..workflows.summarization import SummarizationWorkflow -from ..workflows.task_execution import TaskExecutionWorkflow -from ..workflows.truncation import TruncationWorkflow -from .codec import pydantic_data_converter +from ..clients import temporal +from .worker import create_worker async def main(): @@ -53,65 +16,9 @@ async def main(): Initializes the Temporal client and worker with TLS configuration (if provided), then starts the worker to listen for tasks on the configured task queue. """ - print(f"Starting worker on [{temporal_endpoint}]...") - - # Set up TLS configuration if both private key and client certificate are provided - tls_config = False - - if temporal_private_key and temporal_client_cert: - tls_config = TLSConfig( - client_cert=temporal_client_cert.encode(), - client_private_key=temporal_private_key.encode(), - ) - - # Connect to the Temporal service using the provided endpoint, namespace, and TLS configuration (if any) - client = await Client.connect( - temporal_endpoint, - namespace=temporal_namespace, - tls=tls_config, - data_converter=pydantic_data_converter, - ) - - task_activities = [ - prompt_step, - evaluate_step, - yield_step, - tool_call_step, - # error_step, - if_else_step, - transition_step, - ] - print(f"Queue: {temporal_task_queue}") - # Initialize the worker with the specified task queue, workflows, and activities - worker = Worker( - client, - task_queue=temporal_task_queue, - workflows=[ - SummarizationWorkflow, - CoDensityWorkflow, - DialogInsightsWorkflow, - MemMgmtWorkflow, - MemRatingWorkflow, - RelationshipSummaryWorkflow, - SalientQuestionsWorkflow, - EmbedDocsWorkflow, - TaskExecutionWorkflow, - TruncationWorkflow, - ], - activities=[ - *task_activities, - summarization, - co_density, - dialog_insights, - mem_mgmt, - mem_rating, - relationship_summary, - salient_questions, - embed_docs, - truncation, - ], - ) + client = await temporal.get_client() + worker = create_worker(client) # Start the worker to listen for and process tasks await worker.run() diff --git a/agents-api/agents_api/worker/codec.py b/agents-api/agents_api/worker/codec.py index abf3ddfe5..d56b81de1 100644 --- a/agents-api/agents_api/worker/codec.py +++ b/agents-api/agents_api/worker/codec.py @@ -1,12 +1,20 @@ +### +### NOTE: Working with temporal's codec is really really weird +### This is a workaround to use pydantic models with temporal +### The codec is used to serialize/deserialize the data +### But this code is quite brittle. Be careful when changing it + + import dataclasses -import json +import logging +import pickle from typing import Any, Optional, Type -import openai.types as openai_types -import openai.types.chat as openai_chat_types import temporalio.converter -from litellm.utils import ModelResponse -from pydantic import BaseModel + +# from beartype import BeartypeConf +# from beartype.door import is_bearable, is_subhint +from lz4.frame import compress, decompress from temporalio.api.common.v1 import Payload from temporalio.converter import ( CompositePayloadConverter, @@ -14,63 +22,76 @@ EncodingPayloadConverter, ) -import agents_api.autogen.openapi_model as openapi_model -import agents_api.common.protocol.tasks as tasks -from agents_api.common.utils.json import dumps as json_dumps - -# Map of model name to class so that we can look up the class when deserializing -model_class_map = { - subclass.__module__ + "." + subclass.__name__: subclass - for subclass in { - # All the models we want to support - **openai_types.__dict__, - **openai_chat_types.__dict__, - **openapi_model.__dict__, - **tasks.__dict__, - }.values() - # - # Filter out the ones that aren't pydantic models - if isinstance(subclass, type) and issubclass(subclass, BaseModel) -} -# Also include dict -model_class_map["builtins.dict"] = dict -model_class_map["litellm.utils.ModelResponse"] = ModelResponse +def serialize(x: Any) -> bytes: + return compress(pickle.dumps(x)) + + +def deserialize(b: bytes) -> Any: + return pickle.loads(decompress(b)) + + +def from_payload_data(data: bytes, type_hint: Optional[Type] = None) -> Any: + decoded = deserialize(data) + + if type_hint is None: + return decoded + + decoded_type = type(decoded) + + # FIXME: Enable this check when temporal's codec stuff is fixed + # + # # Otherwise, check if the decoded value is bearable to the type hint + # if not is_bearable( + # decoded, + # type_hint, + # conf=BeartypeConf( + # is_pep484_tower=True + # ), # Check PEP 484 type hints. (be more lax on numeric types) + # ): + # logging.warning( + # f"WARNING: Decoded value {decoded_type} is not bearable to {type_hint}" + # ) + + # FIXME: Enable this check when temporal's codec stuff is fixed + # + # If the decoded value is a BaseModel and the type hint is a subclass of BaseModel + # and the decoded value's class is a subclass of the type hint, then promote the decoded value + # to the type hint. + if ( + type_hint != decoded_type + and hasattr(type_hint, "model_construct") + and hasattr(decoded, "model_dump") + # + # FIXME: Enable this check when temporal's codec stuff is fixed + # + # and is_subhint(type_hint, decoded_type) + ): + try: + decoded = type_hint(**decoded.model_dump()) + except Exception as e: + logging.warning( + f"WARNING: Could not promote {decoded_type} to {type_hint}: {e}" + ) + + return decoded class PydanticEncodingPayloadConverter(EncodingPayloadConverter): - @property - def encoding(self) -> str: - return "text/pydantic-json" + encoding = "text/pickle+lz4" + b_encoding = encoding.encode() def to_payload(self, value: Any) -> Optional[Payload]: - data: str = ( - value.model_dump_json() - if hasattr(value, "model_dump_json") - else json_dumps(value) - ) - return Payload( metadata={ - "encoding": self.encoding.encode(), - "model_name": value.__class__.__name__.encode(), - "model_module": value.__class__.__module__.encode(), + "encoding": self.b_encoding, }, - data=data.encode(), + data=serialize(value), ) def from_payload(self, payload: Payload, type_hint: Optional[Type] = None) -> Any: - data = json.loads(payload.data.decode()) - - if not isinstance(data, dict): - return data - - # Otherwise, we have a model - model_name = payload.metadata["model_name"].decode() - model_module = payload.metadata["model_module"].decode() - model_class = model_class_map[model_module + "." + model_name] - - return model_class(**data) + assert payload.metadata["encoding"] == self.b_encoding + return from_payload_data(payload.data, type_hint) class PydanticPayloadConverter(CompositePayloadConverter): @@ -83,7 +104,7 @@ def __init__(self) -> None: # Use the default data converter, but change the payload converter. -pydantic_data_converter = dataclasses.replace( +pydantic_data_converter: Any = dataclasses.replace( temporalio.converter.default(), payload_converter_class=PydanticPayloadConverter, ) diff --git a/agents-api/agents_api/worker/worker.py b/agents-api/agents_api/worker/worker.py new file mode 100644 index 000000000..65e813023 --- /dev/null +++ b/agents-api/agents_api/worker/worker.py @@ -0,0 +1,60 @@ +from datetime import timedelta +from inspect import getmembers, isfunction +from typing import Any + +from temporalio.client import Client +from temporalio.worker import Worker + + +def create_worker(client: Client) -> Any: + """ + Initializes the Temporal client and worker with TLS configuration (if provided), + then create a worker to listen for tasks on the configured task queue. + """ + + from ..activities import task_steps + from ..activities.demo import demo_activity + from ..activities.embed_docs import embed_docs + from ..activities.mem_mgmt import mem_mgmt + from ..activities.mem_rating import mem_rating + from ..activities.summarization import summarization + from ..activities.truncation import truncation + from ..env import ( + temporal_task_queue, + ) + from ..workflows.demo import DemoWorkflow + from ..workflows.embed_docs import EmbedDocsWorkflow + from ..workflows.mem_mgmt import MemMgmtWorkflow + from ..workflows.mem_rating import MemRatingWorkflow + from ..workflows.summarization import SummarizationWorkflow + from ..workflows.task_execution import TaskExecutionWorkflow + from ..workflows.truncation import TruncationWorkflow + + task_activity_names, task_activities = zip(*getmembers(task_steps, isfunction)) + + # Initialize the worker with the specified task queue, workflows, and activities + worker = Worker( + client, + graceful_shutdown_timeout=timedelta(seconds=30), + task_queue=temporal_task_queue, + workflows=[ + DemoWorkflow, + SummarizationWorkflow, + MemMgmtWorkflow, + MemRatingWorkflow, + EmbedDocsWorkflow, + TaskExecutionWorkflow, + TruncationWorkflow, + ], + activities=[ + *task_activities, + demo_activity, + summarization, + mem_mgmt, + mem_rating, + embed_docs, + truncation, + ], + ) + + return worker diff --git a/agents-api/agents_api/workflows/co_density.py b/agents-api/agents_api/workflows/co_density.py deleted file mode 100644 index ffa317926..000000000 --- a/agents-api/agents_api/workflows/co_density.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 - - -from datetime import timedelta - -from temporalio import workflow - -with workflow.unsafe.imports_passed_through(): - from ..activities.co_density import co_density - - -@workflow.defn -class CoDensityWorkflow: - @workflow.run - async def run(self, memory: str) -> None: - return await workflow.execute_activity( - co_density, - memory, - schedule_to_close_timeout=timedelta(seconds=600), - ) diff --git a/agents-api/agents_api/workflows/demo.py b/agents-api/agents_api/workflows/demo.py new file mode 100644 index 000000000..61ad9d4a8 --- /dev/null +++ b/agents-api/agents_api/workflows/demo.py @@ -0,0 +1,17 @@ +from datetime import timedelta + +from temporalio import workflow + +with workflow.unsafe.imports_passed_through(): + from ..activities.demo import demo_activity + + +@workflow.defn +class DemoWorkflow: + @workflow.run + async def run(self, a: int, b: int) -> int: + return await workflow.execute_activity( + demo_activity, + args=[a, b], + start_to_close_timeout=timedelta(seconds=30), + ) diff --git a/agents-api/agents_api/workflows/dialog_insights.py b/agents-api/agents_api/workflows/dialog_insights.py deleted file mode 100644 index d7e40395e..000000000 --- a/agents-api/agents_api/workflows/dialog_insights.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python3 - - -from datetime import timedelta - -from temporalio import workflow - -with workflow.unsafe.imports_passed_through(): - from ..activities.dialog_insights import dialog_insights - from ..activities.types import ChatML - - -@workflow.defn -class DialogInsightsWorkflow: - @workflow.run - async def run(self, dialog: list[ChatML], person1: str, person2: str) -> None: - return await workflow.execute_activity( - dialog_insights, - [dialog, person1, person2], - schedule_to_close_timeout=timedelta(seconds=600), - ) diff --git a/agents-api/agents_api/workflows/embed_docs.py b/agents-api/agents_api/workflows/embed_docs.py index e52921ed8..62e0e65ae 100644 --- a/agents-api/agents_api/workflows/embed_docs.py +++ b/agents-api/agents_api/workflows/embed_docs.py @@ -7,14 +7,15 @@ with workflow.unsafe.imports_passed_through(): from ..activities.embed_docs import embed_docs + from ..activities.types import EmbedDocsPayload @workflow.defn class EmbedDocsWorkflow: @workflow.run - async def run(self, doc_id: str, title: str, content: list[str]) -> None: - return await workflow.execute_activity( + async def run(self, embed_payload: EmbedDocsPayload) -> None: + await workflow.execute_activity( embed_docs, - args=[doc_id, title, content], + embed_payload, schedule_to_close_timeout=timedelta(seconds=600), ) diff --git a/agents-api/agents_api/workflows/mem_mgmt.py b/agents-api/agents_api/workflows/mem_mgmt.py index 2db9f95da..31c973741 100644 --- a/agents-api/agents_api/workflows/mem_mgmt.py +++ b/agents-api/agents_api/workflows/mem_mgmt.py @@ -7,14 +7,17 @@ with workflow.unsafe.imports_passed_through(): from ..activities.mem_mgmt import mem_mgmt - from ..activities.types import ChatML + from ..autogen.openapi_model import InputChatMLMessage @workflow.defn class MemMgmtWorkflow: @workflow.run async def run( - self, dialog: list[ChatML], session_id: str, previous_memories: list[str] + self, + dialog: list[InputChatMLMessage], + session_id: str, + previous_memories: list[str], ) -> None: return await workflow.execute_activity( mem_mgmt, diff --git a/agents-api/agents_api/workflows/relationship_summary.py b/agents-api/agents_api/workflows/relationship_summary.py deleted file mode 100644 index 0f2e5fb07..000000000 --- a/agents-api/agents_api/workflows/relationship_summary.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 - - -from datetime import timedelta - -from temporalio import workflow - -with workflow.unsafe.imports_passed_through(): - from ..activities.relationship_summary import relationship_summary - - -@workflow.defn -class RelationshipSummaryWorkflow: - @workflow.run - async def run(self, statements: list[str], person1: str, person2: str) -> None: - return await workflow.execute_activity( - relationship_summary, - [statements, person1, person2], - schedule_to_close_timeout=timedelta(seconds=600), - ) diff --git a/agents-api/agents_api/workflows/salient_questions.py b/agents-api/agents_api/workflows/salient_questions.py deleted file mode 100644 index 59f30dc37..000000000 --- a/agents-api/agents_api/workflows/salient_questions.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 - - -from datetime import timedelta - -from temporalio import workflow - -with workflow.unsafe.imports_passed_through(): - from ..activities.salient_questions import salient_questions - - -@workflow.defn -class SalientQuestionsWorkflow: - @workflow.run - async def run(self, statements: list[str], num: int = 3) -> None: - return await workflow.execute_activity( - salient_questions, - [statements, num], - schedule_to_close_timeout=timedelta(seconds=600), - ) diff --git a/agents-api/agents_api/workflows/task_execution.py b/agents-api/agents_api/workflows/task_execution.py index 1b888788b..72c7393ed 100644 --- a/agents-api/agents_api/workflows/task_execution.py +++ b/agents-api/agents_api/workflows/task_execution.py @@ -1,29 +1,73 @@ #!/usr/bin/env python3 +import asyncio from datetime import timedelta +from typing import Any +from pydantic import RootModel from temporalio import workflow +from temporalio.exceptions import ApplicationError with workflow.unsafe.imports_passed_through(): - from ..activities.task_steps import ( - evaluate_step, - if_else_step, - prompt_step, - tool_call_step, - transition_step, - ) - from ..common.protocol.tasks import ( + from ..activities import task_steps + from ..autogen.openapi_model import ( + CreateTransitionRequest, + ErrorWorkflowStep, EvaluateStep, - ExecutionInput, - # ErrorWorkflowStep, + ForeachDo, + ForeachStep, IfElseWorkflowStep, + LogStep, + MapReduceStep, PromptStep, - StepContext, - ToolCallStep, - TransitionInfo, + ReturnStep, + SleepFor, + SleepStep, + SwitchStep, + # ToolCallStep, + TransitionTarget, + WaitForInputStep, + Workflow, + WorkflowStep, YieldStep, ) + from ..common.protocol.tasks import ( + ExecutionInput, + PendingTransition, + StepContext, + StepOutcome, + ) + from ..env import testing + + +STEP_TO_ACTIVITY = { + PromptStep: task_steps.prompt_step, + # ToolCallStep: tool_call_step, + WaitForInputStep: task_steps.wait_for_input_step, + SwitchStep: task_steps.switch_step, + # FIXME: These should be moved to local activities + # once temporal has fixed error handling for local activities + LogStep: task_steps.log_step, + EvaluateStep: task_steps.evaluate_step, + ReturnStep: task_steps.return_step, + YieldStep: task_steps.yield_step, + IfElseWorkflowStep: task_steps.if_else_step, + ForeachStep: task_steps.for_each_step, + MapReduceStep: task_steps.map_reduce_step, +} + +# TODO: Avoid local activities for now (currently experimental) +STEP_TO_LOCAL_ACTIVITY = { + # # NOTE: local activities are directly called in the workflow executor + # # They MUST NOT FAIL, otherwise they will crash the workflow + # EvaluateStep: task_steps.evaluate_step, + # ReturnStep: task_steps.return_step, + # YieldStep: task_steps.yield_step, + # IfElseWorkflowStep: task_steps.if_else_step, +} + +GenericStep = RootModel[WorkflowStep] @workflow.defn @@ -32,113 +76,308 @@ class TaskExecutionWorkflow: async def run( self, execution_input: ExecutionInput, - start: tuple[str, int] = ("main", 0), - previous_inputs: list[dict] = [], - ) -> None: - wf_name, step_idx = start - spec = execution_input.task.spec - workflow_map = {wf.name: wf.steps for wf in spec.workflows} - current_workflow = workflow_map[wf_name] + start: TransitionTarget = TransitionTarget(workflow="main", step=0), + previous_inputs: list[Any] = [], + ) -> Any: + # 0. Prepare context previous_inputs = previous_inputs or [execution_input.arguments] - step = current_workflow[step_idx] context = StepContext( - developer_id=execution_input.developer_id, - execution=execution_input.execution, - task=execution_input.task, - agent=execution_input.agent, - user=execution_input.user, - session=execution_input.session, - tools=execution_input.tools, - arguments=execution_input.arguments, - definition=step, + execution_input=execution_input, inputs=previous_inputs, + cursor=start, ) - should_wait = False - # Run the step - match step: - case PromptStep(): - outputs = await workflow.execute_activity( - prompt_step, - context, - schedule_to_close_timeout=timedelta(seconds=600), + step_type = type(context.current_step) + + # --- + + # 1a. Set global state + # (By default, exit if last otherwise transition 'step' to the next step) + state = PendingTransition( + type="finish" if context.is_last_step else "step", + next=None + if context.is_last_step + else TransitionTarget(workflow=start.workflow, step=start.step + 1), + metadata={"__meta__": {"step_type": step_type.__name__}}, + ) + + # 1b. Prep a transition request + async def transition(**kwargs) -> None: + # NOTE: The state variable is closured from the outer scope + transition_request = CreateTransitionRequest( + current=context.cursor, + **{ + **state.model_dump(exclude_unset=True), + **kwargs, # Override with any additional kwargs + }, + ) + + await workflow.execute_activity( + task_steps.transition_step, + args=[context, transition_request], + schedule_to_close_timeout=timedelta(seconds=600), + ) + + # --- + + # 2. Execute the current step's activity if applicable + if activity := STEP_TO_ACTIVITY.get(step_type): + execute_activity = workflow.execute_activity + elif activity := STEP_TO_LOCAL_ACTIVITY.get(step_type): + execute_activity = workflow.execute_local_activity + else: + execute_activity = None + + outcome = None + if execute_activity: + outcome = await execute_activity( + activity, + context, + # + # TODO: This should be a configurable timeout everywhere based on the task + schedule_to_close_timeout=timedelta(seconds=3 if testing else 600), + ) + + # --- + + # 3. Then, based on the outcome and step type, decide what to do next + match context.current_step, outcome: + # Handle errors (activity returns None) + case step, StepOutcome(error=error) if error is not None: + raise ApplicationError( + f"{type(step).__name__} step threw error: {error}" ) - # TODO: ChatCompletion does not have tool_calls - # if outputs.tool_calls is not None: - # should_wait = True + case LogStep(), StepOutcome(output=output): + # Add the logged message to transition history + await transition(output=dict(logged=output)) - case EvaluateStep(): - outputs = await workflow.execute_activity( - evaluate_step, - context, - schedule_to_close_timeout=timedelta(seconds=600), + # Set the output to the current input + state.output = context.current_input + + case ReturnStep(), StepOutcome(output=output): + await transition(output=output, type="finish", next=None) + return output # <--- Byeeee! + + case SwitchStep(switch=switch), StepOutcome(output=index) if index >= 0: + chosen_branch = switch[index] + + # Create a faux workflow + case_wf_name = ( + f"`{context.cursor.workflow}`[{context.cursor.step}].case" ) - case YieldStep(): - outputs = await workflow.execute_child_workflow( + + case_task = execution_input.task.model_copy() + case_task.workflows = [ + Workflow(name=case_wf_name, steps=[chosen_branch.then]) + ] + + # Create a new execution input + case_execution_input = execution_input.model_copy() + case_execution_input.task = case_task + + # Set the next target to the chosen branch + case_next_target = TransitionTarget(workflow=case_wf_name, step=0) + + case_args = [ + case_execution_input, + case_next_target, + previous_inputs, + ] + + # Execute the chosen branch and come back here + state.output = await workflow.execute_child_workflow( TaskExecutionWorkflow.run, - args=[execution_input, (step.workflow, 0), previous_inputs], + args=case_args, ) - case ToolCallStep(): - outputs = await workflow.execute_activity( - tool_call_step, - context, - schedule_to_close_timeout=timedelta(seconds=600), + + case SwitchStep(), StepOutcome(output=index) if index < 0: + # If no case matched, then the output will be -1 + raise ApplicationError("Negative indices not allowed") + + case IfElseWorkflowStep(then=then_branch, else_=else_branch), StepOutcome( + output=condition + ): + # Choose the branch based on the condition + chosen_branch = then_branch if condition else else_branch + + # Create a faux workflow + if_else_wf_name = ( + f"`{context.cursor.workflow}`[{context.cursor.step}].if_else" ) - # case ErrorWorkflowStep(): - # result = await workflow.execute_activity( - # error_step, - # context, - # schedule_to_close_timeout=timedelta(seconds=600), - # ) - case IfElseWorkflowStep(): - outputs = await workflow.execute_activity( - if_else_step, - context, - schedule_to_close_timeout=timedelta(seconds=600), + if_else_wf_name += ".then" if condition else ".else" + + if_else_task = execution_input.task.model_copy() + if_else_task.workflows = [ + Workflow(name=if_else_wf_name, steps=[chosen_branch]) + ] + + # Create a new execution input + if_else_execution_input = execution_input.model_copy() + if_else_execution_input.task = if_else_task + + # Set the next target to the chosen branch + if_else_next_target = TransitionTarget(workflow=if_else_wf_name, step=0) + + if_else_args = [ + if_else_execution_input, + if_else_next_target, + previous_inputs, + ] + + # Execute the chosen branch and come back here + state.output = await workflow.execute_child_workflow( + TaskExecutionWorkflow.run, + args=if_else_args, ) - workflow_step = YieldStep(**outputs["goto_workflow"]) - outputs = await workflow.execute_child_workflow( + case ForeachStep(foreach=ForeachDo(do=do_step)), StepOutcome(output=items): + for i, item in enumerate(items): + # Create a faux workflow + foreach_wf_name = f"`{context.cursor.workflow}`[{context.cursor.step}].foreach[{i}]" + + foreach_task = execution_input.task.model_copy() + foreach_task.workflows = [ + Workflow(name=foreach_wf_name, steps=[do_step]) + ] + + # Create a new execution input + foreach_execution_input = execution_input.model_copy() + foreach_execution_input.task = foreach_task + + # Set the next target to the chosen branch + foreach_next_target = TransitionTarget( + workflow=foreach_wf_name, step=0 + ) + + foreach_args = [ + foreach_execution_input, + foreach_next_target, + previous_inputs + [{"item": item}], + ] + + # Execute the chosen branch and come back here + state.output = await workflow.execute_child_workflow( + TaskExecutionWorkflow.run, + args=foreach_args, + ) + + case MapReduceStep( + map=map_defn, reduce=reduce, initial=initial + ), StepOutcome(output=items): + initial = initial or [] + reduce = reduce or "results + [_]" + + for i, item in enumerate(items): + workflow_name = f"`{context.cursor.workflow}`[{context.cursor.step}].mapreduce[{i}]" + map_reduce_task = execution_input.task.model_copy() + + defn_dict = map_defn.model_dump() + step_defn = GenericStep(**defn_dict).root + map_reduce_task.workflows = [ + Workflow(name=workflow_name, steps=[step_defn]) + ] + + # Create a new execution input + map_reduce_execution_input = execution_input.model_copy() + map_reduce_execution_input.task = map_reduce_task + + # Set the next target to the chosen branch + map_reduce_next_target = TransitionTarget( + workflow=workflow_name, step=0 + ) + + map_reduce_args = [ + map_reduce_execution_input, + map_reduce_next_target, + previous_inputs + [item], + ] + + # TODO: We should parallelize this + # Execute the chosen branch and come back here + output = await workflow.execute_child_workflow( + TaskExecutionWorkflow.run, + args=map_reduce_args, + ) + + initial = await execute_activity( + task_steps.base_evaluate, + args=[ + reduce, + {"results": initial, "_": output}, + ], + schedule_to_close_timeout=timedelta(seconds=2), + ) + + state.output = initial + await transition() + + case SleepStep( + sleep=SleepFor( + seconds=seconds, + minutes=minutes, + hours=hours, + days=days, + ) + ), _: + seconds = seconds + minutes * 60 + hours * 60 * 60 + days * 24 * 60 * 60 + assert seconds > 0, "Sleep duration must be greater than 0" + + state.output = await asyncio.sleep( + seconds, result=context.current_input + ) + + await transition() + + case EvaluateStep(), StepOutcome(output=output): + state.output = output + await transition() + + case ErrorWorkflowStep(error=error), _: + state.output = dict(error=error) + state.type = "error" + await transition() + + raise ApplicationError(f"Error raised by ErrorWorkflowStep: {error}") + + case YieldStep(), StepOutcome( + output=output, transition_to=(yield_transition_type, yield_next_target) + ): + await transition( + output=output, type=yield_transition_type, next=yield_next_target + ) + + state.output = await workflow.execute_child_workflow( TaskExecutionWorkflow.run, - args=[ - execution_input, - (workflow_step.workflow, 0), - previous_inputs, - ], + args=[execution_input, yield_next_target, [output]], ) - is_last = step_idx + 1 == len(current_workflow) - # Transition type - transition_type = ( - "awaiting_input" if should_wait else ("finish" if is_last else "step") - ) + case WaitForInputStep(), StepOutcome(output=output): + await transition(output=output, type="wait", next=None) - # Transition to the next step - transition_info = TransitionInfo( - from_=(wf_name, step_idx), - to=None if (is_last or should_wait) else (wf_name, step_idx + 1), - type=transition_type, - ) + state.type = "resume" + state.output = await execute_activity( + task_steps.raise_complete_async, + schedule_to_close_timeout=timedelta(days=31), + ) - await workflow.execute_activity( - transition_step, - args=[ - context, - transition_info, - ], - schedule_to_close_timeout=timedelta(seconds=600), - ) + case PromptStep(), StepOutcome(output=response): + state.output = response + + case _: + raise ApplicationError("Not implemented") - # FIXME: this is just a demo, we should handle the end of the workflow properly - # ----- + # --- + # 4. Closing # End if the last step - if is_last: - return outputs + if state.type in ("finish", "cancelled"): + return state.output # Otherwise, recurse to the next step + # TODO: Should use a continue_as_new workflow ONLY if the next step is a conditional or loop + # Otherwise, we should just call the next step as a child workflow workflow.continue_as_new( - execution_input, (wf_name, step_idx + 1), previous_inputs + [outputs] + args=[execution_input, state.next, previous_inputs + [state.output]] ) diff --git a/agents-api/demo/example.py b/agents-api/demo/example.py deleted file mode 100644 index 737539c93..000000000 --- a/agents-api/demo/example.py +++ /dev/null @@ -1,89 +0,0 @@ -import asyncio -from julep import Client - -client = Client(base_url="0.0.0.0:8080", api_key="myauthkey") - -# Let's create a research assistant -name = "Research Assistant" -description = "This assistant is designed to automate the process of gathering, summarizing, and delivering research on specific topics using web searches and webhooks to integrate with other systems." - -# Let's give it some tools -web_search = { - "type": "search", - "engine": "brave", - "description": "Uses Brave search engine to find relevant information on the web.", -} -call_webhook = { - "type": "http", - "http": { - "endpoint": "http://localhost:9000", - "method": "POST", - "description": "Webhook to deliver research results", - "json": { - "summary": {"type": "string", "description": "Summary of the research"}, - "details": { - "type": "string", - "description": "Detailed search results for further analysis", - }, - }, - }, -} - -agent = client.agents.create( - name=name, - description=description, - tools=[web_search, call_webhook], -) - -# Let's create a task for this agent. -# The agent will perform these tasks: -# 1. Think about the task and make a plan using the tools (given a topic, search the web for it, then summarize it and then send the result to a webhook) -# 2. Think about step 1. And make a tool_call to search the web for the topic with a detailed query -# (the tool returns the results) -# 3. Think about step 2. And then summarize the results received. -# 4. Think about step 3. And make a tool call to the webhook - -instructions = [ - "Consider the research topic and devise a search strategy using the provided tools.", - "Use the 'search' tool to find comprehensive information on the topic from various web sources.", - "Analyze the search results and create a concise summary highlighting the key points.", - "Send the summary and the detailed search results to the specified webhook endpoint for integration into our system.", -] - -task = client.tasks.create( - agent_id=agent.id, - instructions=instructions, - inputs={"topic": {"type": "string", "description": "Topic to research"}}, -) - -# Ask the agent to run this task - -run = client.runs.create( - agent_id=agent.id, task_id=task.id, inputs={"topic": "Sam Altman"} -) - - -async def main(): - async for step in run.execution_steps(): - print(step.messages) - - -# >>> [{"role": "thought", "content": "Starting the research on Sam Altman. I'll begin by gathering information from various sources on the web."}, {"role": "assistant", "tool_calls": [{"type": "search", "inputs": {"query": "Sam Altman significant contributions and background"}}]}] -# # Wait for 3-4 seconds - -# >>> [{"role": "system", "name": "information", "content": "Found numerous articles, interviews, and resources on Sam Altman, including his role at OpenAI, investments, and insights into technology and entrepreneurship."}] -# # Wait for 1 second - -# >>> [{"role": "thought", "content": "Need to summarize this information to capture the essence of Sam Altman's impact."}, {"role": "assistant", "content": "Summary:\nSam Altman, known for his leadership at OpenAI, has been a pivotal figure in the tech industry, driving innovation and supporting startups. His insights on entrepreneurship and the future of AI have influenced a wide audience."}] -# # Wait for 2 sec - -# >>> [{"role": "thought", "content": "Now, I'll send the compiled summary and details to the webhook."}, {"role": "assistant", "tool_calls": [{"type": "http", "endpoint": "http://localhost:9000", "method": "POST", "data": {"summary": "Sam Altman, known for his leadership at OpenAI, has been a pivotal figure in the tech industry, driving innovation and supporting startups. His insights on entrepreneurship and the future of AI have influenced a wide audience.", "details": "Found numerous articles, interviews, and resources on Sam Altman, including his role at OpenAI, investments, and insights into technology and entrepreneurship."}}]}] -# # Wait for 2 sec - -# # POST call should show up on the http.server -# # Wait for 1 second - -# >>> [{"role": "system", "name": "information", "content": "Delivered data to webhook"}] - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/agents-api/demo/julep.py b/agents-api/demo/julep.py deleted file mode 100644 index aaa42f401..000000000 --- a/agents-api/demo/julep.py +++ /dev/null @@ -1,132 +0,0 @@ -import asyncio -import uuid -import httpx - - -PORT = 9000 - - -class Step: - def __init__(self, d): - self._d = d - - @property - def messages(self): - return self._d.get("content", "") - - -class Client: - def __init__(self, base_url, api_key): - self.base_url = base_url - self.api_key = api_key - self.agents = AgentManager() - self.tasks = TaskManager() - self.runs = RunManager() - - -class AgentManager: - def create(self, name, description, tools): - return Agent(name, description, tools) - - -class Agent: - def __init__(self, name, description, tools): - self.id = str(uuid.uuid4()) - self.name = name - self.description = description - self.tools = tools - - -class TaskManager: - def create(self, agent_id, instructions, inputs): - return Task(agent_id, instructions, inputs) - - -class Task: - def __init__(self, agent_id, instructions, inputs): - self.id = str(uuid.uuid4()) - self.agent_id = agent_id - self.instructions = instructions - self.inputs = inputs - - -class RunManager: - def create(self, agent_id, task_id, inputs): - return Run(agent_id, task_id, inputs) - - -class Run: - def __init__(self, agent_id, task_id, inputs): - self.agent_id = agent_id - self.task_id = task_id - self.inputs = inputs - - async def execution_steps(self): - steps = [ - { - "role": "thought", - "content": "Starting the research on Sam Altman. I'll begin by gathering information from various sources on the web.", - }, - { - "role": "assistant", - "tool_calls": [ - { - "type": "search", - "inputs": { - "query": "Sam Altman significant contributions and background" - }, - } - ], - }, - { - "role": "system", - "name": "information", - "content": "Found numerous articles, interviews, and resources on Sam Altman, including his role at OpenAI, investments, and insights into technology and entrepreneurship.", - }, - { - "role": "thought", - "content": "Need to summarize this information to capture the essence of Sam Altman's impact.", - }, - { - "role": "assistant", - "content": "Summary:\nSam Altman, known for his leadership at OpenAI, has been a pivotal figure in the tech industry, driving innovation and supporting startups. His insights on entrepreneurship and the future of AI have influenced a wide audience.", - }, - { - "role": "thought", - "content": "Now, I'll send the compiled summary and details to the webhook.", - }, - { - "role": "assistant", - "tool_calls": [ - { - "type": "http", - "endpoint": f"http://localhost:{PORT}", - "method": "POST", - "data": { - "summary": "Sam Altman, known for his leadership at OpenAI, has been a pivotal figure in the tech industry, driving innovation and supporting startups. His insights on entrepreneurship and the future of AI have influenced a wide audience.", - "details": "Found numerous articles, interviews, and resources on Sam Altman, including his role at OpenAI, investments, and insights into technology and entrepreneurship.", - }, - } - ], - }, - # TODO: @dmitry - # Add a HTTP POST call after this step to 0.0.0.0:8080 with the results - { - "role": "system", - "name": "information", - "content": "Delivered data to webhook", - }, - ] - - for step in steps: - if "tool_calls" in step: - # Simulate tool call delay - await asyncio.sleep(3 if step["role"] == "assistant" else 2) - for c in step["tool_calls"]: - if c.get("endpoint"): - async with httpx.AsyncClient() as client: - await client.post(c["endpoint"], json=c["data"]) - else: - # Simulate thought and system message delay - await asyncio.sleep(1) - yield Step(step) diff --git a/agents-api/demo/requirements.txt b/agents-api/demo/requirements.txt deleted file mode 100644 index 79228389f..000000000 --- a/agents-api/demo/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -httpx \ No newline at end of file diff --git a/agents-api/docker-compose.yml b/agents-api/docker-compose.yml index 150473c3c..96334e1e8 100644 --- a/agents-api/docker-compose.yml +++ b/agents-api/docker-compose.yml @@ -59,9 +59,9 @@ services: container_name: text-embeddings-inference environment: - DTYPE=float16 - - MODEL_ID=BAAI/bge-m3 + - MODEL_ID=Alibaba-NLP/gte-large-en-v1.5 - image: ghcr.io/huggingface/text-embeddings-inference:1.3 + image: ghcr.io/huggingface/text-embeddings-inference:1.5 ports: - "8082:80" volumes: diff --git a/agents-api/migrations/migrate_1704699172_init.py b/agents-api/migrations/migrate_1704699172_init.py index 1f131c8a1..3a427ad48 100644 --- a/agents-api/migrations/migrate_1704699172_init.py +++ b/agents-api/migrations/migrate_1704699172_init.py @@ -19,7 +19,7 @@ def up(client): => name: String, about: String, - model: String default 'julep-ai/samantha-1-turbo', + model: String default 'gpt-4o', created_at: Float default now(), updated_at: Float default now(), } diff --git a/agents-api/migrations/migrate_1704699595_developers.py b/agents-api/migrations/migrate_1704699595_developers.py index e2c183520..d22edb393 100644 --- a/agents-api/migrations/migrate_1704699595_developers.py +++ b/agents-api/migrations/migrate_1704699595_developers.py @@ -29,7 +29,7 @@ def up(client): => name: String, about: String, - model: String default 'julep-ai/samantha-1-turbo', + model: String default 'gpt-4o', created_at: Float default now(), updated_at: Float default now(), } @@ -99,7 +99,7 @@ def down(client): => name: String, about: String, - model: String default 'julep-ai/samantha-1-turbo', + model: String default 'gpt-4o', created_at: Float default now(), updated_at: Float default now(), } diff --git a/agents-api/migrations/migrate_1709631202_metadata.py b/agents-api/migrations/migrate_1709631202_metadata.py index b5c220cb3..36c1c8ec4 100644 --- a/agents-api/migrations/migrate_1709631202_metadata.py +++ b/agents-api/migrations/migrate_1709631202_metadata.py @@ -22,7 +22,7 @@ => name: String, about: String, - model: String default 'julep-ai/samantha-1-turbo', + model: String default 'gpt-4o', created_at: Float default now(), updated_at: Float default now(), metadata: Json default {}, @@ -45,7 +45,7 @@ => name: String, about: String, - model: String default 'julep-ai/samantha-1-turbo', + model: String default 'gpt-4o', created_at: Float default now(), updated_at: Float default now(), } diff --git a/agents-api/migrations/migrate_1712405369_simplify_instructions.py b/agents-api/migrations/migrate_1712405369_simplify_instructions.py index ee3a87da1..b3f8a289a 100644 --- a/agents-api/migrations/migrate_1712405369_simplify_instructions.py +++ b/agents-api/migrations/migrate_1712405369_simplify_instructions.py @@ -24,7 +24,7 @@ name: String, about: String, instructions: [String] default [], - model: String default 'julep-ai/samantha-1-turbo', + model: String default 'gpt-4o', created_at: Float default now(), updated_at: Float default now(), metadata: Json default {}, @@ -47,7 +47,7 @@ => name: String, about: String, - model: String default 'julep-ai/samantha-1-turbo', + model: String default 'gpt-4o', created_at: Float default now(), updated_at: Float default now(), metadata: Json default {}, diff --git a/agents-api/migrations/migrate_1722710530_unify_owner_doc_relations.py b/agents-api/migrations/migrate_1722710530_unify_owner_doc_relations.py new file mode 100644 index 000000000..a56bce674 --- /dev/null +++ b/agents-api/migrations/migrate_1722710530_unify_owner_doc_relations.py @@ -0,0 +1,204 @@ +# /usr/bin/env python3 + +MIGRATION_ID = "unify_owner_doc_relations" +CREATED_AT = 1722710530.126563 + +create_docs_relations_query = dict( + up=""" + :create docs { + owner_type: String, + owner_id: Uuid, + doc_id: Uuid, + => + title: String, + created_at: Float default now(), + metadata: Json default {}, + } + """, + down="::remove docs", +) + +remove_user_docs_table = dict( + up=""" + doc_title[doc_id, unique(title)] := + *snippets { + doc_id, + title, + } + + ?[owner_type, owner_id, doc_id, title, created_at, metadata] := + owner_type = "user", + *user_docs { + user_id: owner_id, + doc_id, + created_at, + metadata, + }, + doc_title[doc_id, title] + + :insert docs { + owner_type, + owner_id, + doc_id, + title, + created_at, + metadata, + } + + } { # <-- this is just a separator between the two queries + ::remove user_docs + """, + down=""" + :create user_docs { + user_id: Uuid, + doc_id: Uuid + => + created_at: Float default now(), + metadata: Json default {}, + } + """, +) + +remove_agent_docs_table = dict( + up=remove_user_docs_table["up"].replace("user", "agent"), + down=remove_user_docs_table["down"].replace("user", "agent"), +) + +# See: https://github.com/nmslib/hnswlib/blob/master/ALGO_PARAMS.md +snippets_hnsw_index = dict( + up=""" + ::hnsw create snippets:embedding_space { + fields: [embedding], + filter: !is_null(embedding), + dim: 1024, + distance: Cosine, + m: 64, + ef_construction: 256, + extend_candidates: true, + keep_pruned_connections: false, + } + """, + down=""" + ::hnsw drop snippets:embedding_space + """, +) + +# See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts +snippets_fts_index = dict( + up=""" + ::fts create snippets:fts { + extractor: content, + tokenizer: Simple, + filters: [Lowercase, Stemmer('english'), Stopwords('en')], + } + """, + down=""" + ::fts drop snippets:fts + """, +) + +temp_rename_snippets_table = dict( + up=""" + ::rename snippets -> information_snippets + """, + down=""" + ::rename information_snippets -> snippets + """, +) + +temp_rename_snippets_table_back = dict( + up=temp_rename_snippets_table["down"], + down=temp_rename_snippets_table["up"], +) + +drop_snippets_hnsw_index = { + "up": snippets_hnsw_index["down"].replace("snippets:", "information_snippets:"), + "down": snippets_hnsw_index["up"].replace("snippets:", "information_snippets:"), +} + +drop_snippets_fts_index = dict( + up=""" + ::fts drop information_snippets:fts + """, + down=""" + ::fts create information_snippets:fts { + extractor: concat(title, ' ', snippet), + tokenizer: Simple, + filters: [Lowercase, Stemmer('english'), Stopwords('en')], + } + """, +) + + +remove_title_from_snippets_table = dict( + up=""" + ?[doc_id, index, content, embedding] := + *snippets { + doc_id, + snippet_idx: index, + snippet: content, + embedding, + } + + :replace snippets { + doc_id: Uuid, + index: Int, + => + content: String, + embedding: ? default null, + } + """, + down=""" + ?[doc_id, snippet_idx, title, snippet, embedding] := + *snippets { + doc_id, + index: snippet_idx, + content: snippet, + embedding, + }, + *docs { + doc_id, + title, + } + + :replace snippets { + doc_id: Uuid, + snippet_idx: Int, + => + title: String, + snippet: String, + embed_instruction: String default 'Encode this passage for retrieval: ', + embedding: ? default null, + } + """, +) + +queries = [ + create_docs_relations_query, + remove_user_docs_table, + remove_agent_docs_table, + temp_rename_snippets_table, # Because of a bug in Cozo + drop_snippets_hnsw_index, + drop_snippets_fts_index, + temp_rename_snippets_table_back, # Because of a bug in Cozo + remove_title_from_snippets_table, + snippets_fts_index, + snippets_hnsw_index, +] + + +def run(client, queries): + joiner = "}\n\n{" + + query = joiner.join(queries) + query = f"{{\n{query}\n}}" + + client.run(query) + + +def up(client): + run(client, [q["up"] for q in queries]) + + +def down(client): + run(client, [q["down"] for q in reversed(queries)]) diff --git a/agents-api/migrations/migrate_1722875101_add_temporal_mapping.py b/agents-api/migrations/migrate_1722875101_add_temporal_mapping.py new file mode 100644 index 000000000..b38a3717c --- /dev/null +++ b/agents-api/migrations/migrate_1722875101_add_temporal_mapping.py @@ -0,0 +1,40 @@ +# /usr/bin/env python3 + +MIGRATION_ID = "add_temporal_mapping" +CREATED_AT = 1722875101.262791 + + +def run(client, queries): + joiner = "}\n\n{" + + query = joiner.join(queries) + query = f"{{\n{query}\n}}" + client.run(query) + + +create_temporal_executions_lookup = dict( + up=""" + :create temporal_executions_lookup { + execution_id: Uuid, + id: String, + => + run_id: String?, + first_execution_run_id: String?, + result_run_id: String?, + created_at: Float default now(), + } + """, + down="::remove temporal_executions_lookup", +) + +queries = [ + create_temporal_executions_lookup, +] + + +def up(client): + run(client, [q["up"] for q in queries]) + + +def down(client): + run(client, [q["down"] for q in reversed(queries)]) diff --git a/agents-api/migrations/migrate_1723307805_add_lsh_index_to_docs.py b/agents-api/migrations/migrate_1723307805_add_lsh_index_to_docs.py new file mode 100644 index 000000000..01eaa8a60 --- /dev/null +++ b/agents-api/migrations/migrate_1723307805_add_lsh_index_to_docs.py @@ -0,0 +1,44 @@ +# /usr/bin/env python3 + +MIGRATION_ID = "add_lsh_index_to_docs" +CREATED_AT = 1723307805.007054 + +# See: https://docs.cozodb.org/en/latest/vector.html#full-text-search-fts +snippets_lsh_index = dict( + up=""" + ::lsh create snippets:lsh { + extractor: content, + tokenizer: Simple, + filters: [Stopwords('en')], + n_perm: 200, + target_threshold: 0.9, + n_gram: 3, + false_positive_weight: 1.0, + false_negative_weight: 1.0, + } + """, + down=""" + ::lsh drop snippets:lsh + """, +) + +queries = [ + snippets_lsh_index, +] + + +def run(client, queries): + joiner = "}\n\n{" + + query = joiner.join(queries) + query = f"{{\n{query}\n}}" + + client.run(query) + + +def up(client): + run(client, [q["up"] for q in queries]) + + +def down(client): + run(client, [q["down"] for q in reversed(queries)]) diff --git a/agents-api/migrations/migrate_1723400730_add_settings_to_developers.py b/agents-api/migrations/migrate_1723400730_add_settings_to_developers.py new file mode 100644 index 000000000..e10e71510 --- /dev/null +++ b/agents-api/migrations/migrate_1723400730_add_settings_to_developers.py @@ -0,0 +1,68 @@ +# /usr/bin/env python3 + +MIGRATION_ID = "add_settings_to_developers" +CREATED_AT = 1723400730.539554 + + +def up(client): + client.run( + """ + ?[ + developer_id, + email, + active, + tags, + settings, + created_at, + updated_at, + ] := *developers { + developer_id, + email, + active, + created_at, + updated_at, + }, + tags = [], + settings = {} + + :replace developers { + developer_id: Uuid, + => + email: String, + active: Bool default true, + tags: [String] default [], + settings: Json, + created_at: Float default now(), + updated_at: Float default now(), + } + """ + ) + + +def down(client): + client.run( + """ + ?[ + developer_id, + email, + active, + created_at, + updated_at, + ] := *developers { + developer_id, + email, + active, + created_at, + updated_at, + } + + :replace developers { + developer_id: Uuid, + => + email: String, + active: Bool default true, + created_at: Float default now(), + updated_at: Float default now(), + } + """ + ) diff --git a/agents-api/poetry.lock b/agents-api/poetry.lock index 64c74e3b6..b703a0814 100644 --- a/agents-api/poetry.lock +++ b/agents-api/poetry.lock @@ -2,104 +2,118 @@ [[package]] name = "aiohappyeyeballs" -version = "2.3.4" +version = "2.4.0" description = "Happy Eyeballs for asyncio" optional = false -python-versions = "<4.0,>=3.8" +python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.3.4-py3-none-any.whl", hash = "sha256:40a16ceffcf1fc9e142fd488123b2e218abc4188cf12ac20c67200e1579baa42"}, - {file = "aiohappyeyeballs-2.3.4.tar.gz", hash = "sha256:7e1ae8399c320a8adec76f6c919ed5ceae6edd4c3672f4d9eae2b27e37c80ff6"}, + {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, + {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, ] [[package]] name = "aiohttp" -version = "3.10.0" +version = "3.10.5" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:68ab608118e212f56feef44d4785aa90b713042da301f26338f36497b481cd79"}, - {file = "aiohttp-3.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:64a117c16273ca9f18670f33fc7fd9604b9f46ddb453ce948262889a6be72868"}, - {file = "aiohttp-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:54076a25f32305e585a3abae1f0ad10646bec539e0e5ebcc62b54ee4982ec29f"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71c76685773444d90ae83874433505ed800e1706c391fdf9e57cc7857611e2f4"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdda86ab376f9b3095a1079a16fbe44acb9ddde349634f1c9909d13631ff3bcf"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d6dcd1d21da5ae1416f69aa03e883a51e84b6c803b8618cbab341ac89a85b9e"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ef0135d7ab7fb0284342fbbf8e8ddf73b7fee8ecc55f5c3a3d0a6b765e6d8b"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccab9381f38c669bb9254d848f3b41a3284193b3e274a34687822f98412097e9"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:947da3aee057010bc750b7b4bb65cbd01b0bdb7c4e1cf278489a1d4a1e9596b3"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5268b35fee7eb754fb5b3d0f16a84a2e9ed21306f5377f3818596214ad2d7714"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ff25d988fd6ce433b5c393094a5ca50df568bdccf90a8b340900e24e0d5fb45c"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:594b4b4f1dfe8378b4a0342576dc87a930c960641159f5ae83843834016dbd59"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c8820dad615cd2f296ed3fdea8402b12663ac9e5ea2aafc90ef5141eb10b50b8"}, - {file = "aiohttp-3.10.0-cp310-cp310-win32.whl", hash = "sha256:ab1d870403817c9a0486ca56ccbc0ebaf85d992277d48777faa5a95e40e5bcca"}, - {file = "aiohttp-3.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:563705a94ea3af43467167f3a21c665f3b847b2a0ae5544fa9e18df686a660da"}, - {file = "aiohttp-3.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13679e11937d3f37600860de1f848e2e062e2b396d3aa79b38c89f9c8ab7e791"}, - {file = "aiohttp-3.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c66a1aadafbc0bd7d648cb7fcb3860ec9beb1b436ce3357036a4d9284fcef9a"}, - {file = "aiohttp-3.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7e3545b06aae925f90f06402e05cfb9c62c6409ce57041932163b09c48daad6"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:effafe5144aa32f0388e8f99b1b2692cf094ea2f6b7ceca384b54338b77b1f50"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a04f2c8d41821a2507b49b2694c40495a295b013afb0cc7355b337980b47c546"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dbfac556219d884d50edc6e1952a93545c2786193f00f5521ec0d9d464040ab"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a65472256c5232681968deeea3cd5453aa091c44e8db09f22f1a1491d422c2d9"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941366a554e566efdd3f042e17a9e461a36202469e5fd2aee66fe3efe6412aef"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:927b4aca6340301e7d8bb05278d0b6585b8633ea852b7022d604a5df920486bf"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:34adb8412e736a5d0df6d1fccdf71599dfb07a63add241a94a189b6364e997f1"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:43c60d9b332a01ee985f080f639f3e56abcfb95ec1320013c94083c3b6a2e143"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3f49edf7c5cd2987634116e1b6a0ee2438fca17f7c4ee480ff41decb76cf6158"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9784246431eaf9d651b3cc06f9c64f9a9f57299f4971c5ea778fa0b81074ef13"}, - {file = "aiohttp-3.10.0-cp311-cp311-win32.whl", hash = "sha256:bec91402df78b897a47b66b9c071f48051cea68d853d8bc1d4404896c6de41ae"}, - {file = "aiohttp-3.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:25a9924343bf91b0c5082cae32cfc5a1f8787ac0433966319ec07b0ed4570722"}, - {file = "aiohttp-3.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:21dab4a704c68dc7bc2a1219a4027158e8968e2079f1444eda2ba88bc9f2895f"}, - {file = "aiohttp-3.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:872c0dcaccebd5733d535868fe2356aa6939f5827dcea7a8b9355bb2eff6f56e"}, - {file = "aiohttp-3.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f381424dbce313bb5a666a215e7a9dcebbc533e9a2c467a1f0c95279d24d1fa7"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ca48e9f092a417c6669ee8d3a19d40b3c66dde1a2ae0d57e66c34812819b671"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbe2f6d0466f5c59c7258e0745c20d74806a1385fbb7963e5bbe2309a11cc69b"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:03799a95402a7ed62671c4465e1eae51d749d5439dbc49edb6eee52ea165c50b"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5549c71c35b5f057a4eebcc538c41299826f7813f28880722b60e41c861a57ec"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6fa7a42b78d8698491dc4ad388169de54cca551aa9900f750547372de396277"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:77bbf0a2f6fefac6c0db1792c234f577d80299a33ce7125467439097cf869198"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:34eaf5cfcc979846d73571b1a4be22cad5e029d55cdbe77cdc7545caa4dcb925"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4f1de31a585344a106db43a9c3af2e15bb82e053618ff759f1fdd31d82da38eb"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3a1ea61d96146e9b9e5597069466e2e4d9e01e09381c5dd51659f890d5e29e7"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:73c01201219eb039a828bb58dcc13112eec2fed6eea718356316cd552df26e04"}, - {file = "aiohttp-3.10.0-cp312-cp312-win32.whl", hash = "sha256:33e915971eee6d2056d15470a1214e4e0f72b6aad10225548a7ab4c4f54e2db7"}, - {file = "aiohttp-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:2dc75da06c35a7b47a88ceadbf993a53d77d66423c2a78de8c6f9fb41ec35687"}, - {file = "aiohttp-3.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f1bc4d68b83966012813598fe39b35b4e6019b69d29385cf7ec1cb08e1ff829b"}, - {file = "aiohttp-3.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9b8b31c057a0b7bb822a159c490af05cb11b8069097f3236746a78315998afa"}, - {file = "aiohttp-3.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10f0d7894ddc6ff8f369e3fdc082ef1f940dc1f5b9003cd40945d24845477220"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72de8ffba4a27e3c6e83e58a379fc4fe5548f69f9b541fde895afb9be8c31658"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd36d0f0afc2bd84f007cedd2d9a449c3cf04af471853a25eb71f28bc2e1a119"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f64d503c661864866c09806ac360b95457f872d639ca61719115a9f389b2ec90"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31616121369bc823791056c632f544c6c8f8d1ceecffd8bf3f72ef621eaabf49"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f76c12abb88b7ee64b3f9ae72f0644af49ff139067b5add142836dab405d60d4"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6c99eef30a7e98144bcf44d615bc0f445b3a3730495fcc16124cb61117e1f81e"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:39e7ec718e7a1971a5d98357e3e8c0529477d45c711d32cd91999dc8d8404e1e"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1cef548ee4e84264b78879de0c754bbe223193c6313beb242ce862f82eab184"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f98f036eab11d2f90cdd01b9d1410de9d7eb520d070debeb2edadf158b758431"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc4376ff537f7d2c1e98f97f6d548e99e5d96078b0333c1d3177c11467b972de"}, - {file = "aiohttp-3.10.0-cp38-cp38-win32.whl", hash = "sha256:ebedc51ee6d39f9ea5e26e255fd56a7f4e79a56e77d960f9bae75ef4f95ed57f"}, - {file = "aiohttp-3.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:aad87626f31a85fd4af02ba7fd6cc424b39d4bff5c8677e612882649da572e47"}, - {file = "aiohttp-3.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1dc95c5e2a5e60095f1bb51822e3b504e6a7430c9b44bff2120c29bb876c5202"}, - {file = "aiohttp-3.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c83977f7b6f4f4a96fab500f5a76d355f19f42675224a3002d375b3fb309174"}, - {file = "aiohttp-3.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8cedc48d36652dd3ac40e5c7c139d528202393e341a5e3475acedb5e8d5c4c75"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b099fbb823efed3c1d736f343ac60d66531b13680ee9b2669e368280f41c2b8"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d583755ddb9c97a2da1322f17fc7d26792f4e035f472d675e2761c766f94c2ff"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a03a4407bdb9ae815f0d5a19df482b17df530cf7bf9c78771aa1c713c37ff1f"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcb6e65f6ea7caa0188e36bebe9e72b259d3d525634758c91209afb5a6cbcba7"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6612c6ed3147a4a2d6463454b94b877566b38215665be4c729cd8b7bdce15b4"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b0c0148d2a69b82ffe650c2ce235b431d49a90bde7dd2629bcb40314957acf6"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0d85a173b4dbbaaad1900e197181ea0fafa617ca6656663f629a8a372fdc7d06"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:12c43dace645023583f3dd2337dfc3aa92c99fb943b64dcf2bc15c7aa0fb4a95"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:33acb0d9bf12cdc80ceec6f5fda83ea7990ce0321c54234d629529ca2c54e33d"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:91e0b76502205484a4d1d6f25f461fa60fe81a7987b90e57f7b941b0753c3ec8"}, - {file = "aiohttp-3.10.0-cp39-cp39-win32.whl", hash = "sha256:1ebd8ed91428ffbe8b33a5bd6f50174e11882d5b8e2fe28670406ab5ee045ede"}, - {file = "aiohttp-3.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:0433795c4a8bafc03deb3e662192250ba5db347c41231b0273380d2f53c9ea0b"}, - {file = "aiohttp-3.10.0.tar.gz", hash = "sha256:e8dd7da2609303e3574c95b0ec9f1fd49647ef29b94701a2862cceae76382e1d"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"}, + {file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"}, + {file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"}, + {file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"}, + {file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"}, + {file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"}, + {file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"}, + {file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"}, + {file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"}, + {file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"}, + {file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"}, + {file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"}, + {file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"}, + {file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"}, ] [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" @@ -122,24 +136,6 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" -[[package]] -name = "aiosqlite" -version = "0.20.0" -description = "asyncio bridge to the standard sqlite3 module" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"}, - {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"}, -] - -[package.dependencies] -typing_extensions = ">=4.0" - -[package.extras] -dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"] -docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] - [[package]] name = "annotated-types" version = "0.7.0" @@ -163,10 +159,8 @@ files = [ ] [package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] @@ -186,13 +180,13 @@ files = [ [[package]] name = "argcomplete" -version = "3.4.0" +version = "3.5.0" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" files = [ - {file = "argcomplete-3.4.0-py3-none-any.whl", hash = "sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5"}, - {file = "argcomplete-3.4.0.tar.gz", hash = "sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f"}, + {file = "argcomplete-3.5.0-py3-none-any.whl", hash = "sha256:d4bcf3ff544f51e16e54228a7ac7f486ed70ebf2ecfe49a63a91171c76bf029b"}, + {file = "argcomplete-3.5.0.tar.gz", hash = "sha256:4349400469dccfb7950bb60334a680c58d88699bff6159df61251878dc6bf74b"}, ] [package.extras] @@ -303,48 +297,34 @@ files = [ {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" -version = "2.15.0" +version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] @@ -426,8 +406,6 @@ mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -453,17 +431,6 @@ webencodings = "*" [package.extras] css = ["tinycss2 (>=1.1.0,<1.3)"] -[[package]] -name = "cachetools" -version = "5.4.0" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, -] - [[package]] name = "certifi" version = "2024.7.4" @@ -477,63 +444,78 @@ files = [ [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] [package.dependencies] @@ -685,17 +667,6 @@ click = "*" [package.extras] test = ["pytest"] -[[package]] -name = "cloudpickle" -version = "3.0.0" -description = "Pickler class to extend the standard pickle.Pickler functionality" -optional = false -python-versions = ">=3.8" -files = [ - {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, - {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, -] - [[package]] name = "colorama" version = "0.4.6" @@ -772,60 +743,15 @@ files = [ [package.extras] develop = ["coverage", "invoke", "path.py", "pylint", "pytest (>=3.2)", "pytest-html (>=1.19.0)", "tox (>=2.9)"] -[[package]] -name = "dask" -version = "2024.7.1" -description = "Parallel PyData with Task Scheduling" -optional = false -python-versions = ">=3.9" -files = [ - {file = "dask-2024.7.1-py3-none-any.whl", hash = "sha256:dd046840050376c317de90629db5c6197adda820176cf3e2df10c3219d11951f"}, - {file = "dask-2024.7.1.tar.gz", hash = "sha256:dbaef2d50efee841a9d981a218cfeb50392fc9a95e0403b6d680450e4f50d531"}, -] - -[package.dependencies] -click = ">=8.1" -cloudpickle = ">=1.5.0" -distributed = {version = "2024.7.1", optional = true, markers = "extra == \"distributed\""} -fsspec = ">=2021.09.0" -importlib-metadata = {version = ">=4.13.0", markers = "python_version < \"3.12\""} -packaging = ">=20.0" -partd = ">=1.4.0" -pyyaml = ">=5.3.1" -toolz = ">=0.10.0" - -[package.extras] -array = ["numpy (>=1.21)"] -complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)", "pyarrow-hotfix"] -dataframe = ["dask-expr (>=1.1,<1.2)", "dask[array]", "pandas (>=2.0)"] -diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] -distributed = ["distributed (==2024.7.1)"] -test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] - -[[package]] -name = "dataclasses-json" -version = "0.6.7" -description = "Easily serialize dataclasses to and from JSON." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, - {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, -] - -[package.dependencies] -marshmallow = ">=3.18.0,<4.0.0" -typing-inspect = ">=0.4.0,<1" - [[package]] name = "datamodel-code-generator" -version = "0.25.8" +version = "0.25.9" description = "Datamodel Code Generator" optional = false python-versions = "<4.0,>=3.7" files = [ - {file = "datamodel_code_generator-0.25.8-py3-none-any.whl", hash = "sha256:f9b216efad84d8dcb517273d2728875b6052b7e8dc4e5c13a597441cef236f6e"}, - {file = "datamodel_code_generator-0.25.8.tar.gz", hash = "sha256:b7838122b8133dae6e46f36a1cf25c0ccc66745da057988f490d00ab71121de7"}, + {file = "datamodel_code_generator-0.25.9-py3-none-any.whl", hash = "sha256:9e0324233123d6e39a35bc0004771956935889a974aacfd7a0651de11d2219a9"}, + {file = "datamodel_code_generator-0.25.9.tar.gz", hash = "sha256:65ca9807d8edbd88a7f7931c10f4bc1c08bd9bbc5bb0508418a2b6a16590eb65"}, ] [package.dependencies] @@ -836,9 +762,8 @@ inflect = ">=4.1.0,<6.0" isort = ">=4.3.21,<6.0" jinja2 = ">=2.10.1,<4.0" packaging = "*" -pydantic = {version = ">=1.9.0,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.10\" and python_version < \"3.11\""} +pydantic = {version = ">=1.10.0,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.11\" and python_version < \"4.0\""} pyyaml = ">=6.0.1" -toml = {version = ">=0.10.0,<1.0.0", markers = "python_version < \"3.11\""} [package.extras] debug = ["PySnooper (>=0.4.1,<2.0.0)"] @@ -848,33 +773,33 @@ validation = ["openapi-spec-validator (>=0.2.8,<0.7.0)", "prance (>=0.18.2)"] [[package]] name = "debugpy" -version = "1.8.2" +version = "1.8.5" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" files = [ - {file = "debugpy-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2"}, - {file = "debugpy-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6"}, - {file = "debugpy-1.8.2-cp310-cp310-win32.whl", hash = "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47"}, - {file = "debugpy-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3"}, - {file = "debugpy-1.8.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a"}, - {file = "debugpy-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634"}, - {file = "debugpy-1.8.2-cp311-cp311-win32.whl", hash = "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad"}, - {file = "debugpy-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa"}, - {file = "debugpy-1.8.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835"}, - {file = "debugpy-1.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3"}, - {file = "debugpy-1.8.2-cp312-cp312-win32.whl", hash = "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e"}, - {file = "debugpy-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859"}, - {file = "debugpy-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:5a019d4574afedc6ead1daa22736c530712465c0c4cd44f820d803d937531b2d"}, - {file = "debugpy-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40f062d6877d2e45b112c0bbade9a17aac507445fd638922b1a5434df34aed02"}, - {file = "debugpy-1.8.2-cp38-cp38-win32.whl", hash = "sha256:c78ba1680f1015c0ca7115671fe347b28b446081dada3fedf54138f44e4ba031"}, - {file = "debugpy-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cf327316ae0c0e7dd81eb92d24ba8b5e88bb4d1b585b5c0d32929274a66a5210"}, - {file = "debugpy-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1523bc551e28e15147815d1397afc150ac99dbd3a8e64641d53425dba57b0ff9"}, - {file = "debugpy-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e24ccb0cd6f8bfaec68d577cb49e9c680621c336f347479b3fce060ba7c09ec1"}, - {file = "debugpy-1.8.2-cp39-cp39-win32.whl", hash = "sha256:7f8d57a98c5a486c5c7824bc0b9f2f11189d08d73635c326abef268f83950326"}, - {file = "debugpy-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:16c8dcab02617b75697a0a925a62943e26a0330da076e2a10437edd9f0bf3755"}, - {file = "debugpy-1.8.2-py2.py3-none-any.whl", hash = "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca"}, - {file = "debugpy-1.8.2.zip", hash = "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00"}, + {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, + {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, + {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, + {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, + {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, + {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, + {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, + {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, + {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, + {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, + {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, + {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, + {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, + {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, + {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, + {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, + {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, + {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, + {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, + {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, + {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, + {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, ] [[package]] @@ -888,17 +813,6 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] -[[package]] -name = "deepmerge" -version = "1.1.1" -description = "a toolset to deeply merge python dictionaries." -optional = false -python-versions = "*" -files = [ - {file = "deepmerge-1.1.1-py3-none-any.whl", hash = "sha256:7219dad9763f15be9dcd4bcb53e00f48e4eed6f5ed8f15824223eb934bb35977"}, - {file = "deepmerge-1.1.1.tar.gz", hash = "sha256:53a489dc9449636e480a784359ae2aab3191748c920649551c8e378622f0eca4"}, -] - [[package]] name = "defusedxml" version = "0.7.1" @@ -910,34 +824,6 @@ files = [ {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] -[[package]] -name = "distributed" -version = "2024.7.1" -description = "Distributed scheduler for Dask" -optional = false -python-versions = ">=3.9" -files = [ - {file = "distributed-2024.7.1-py3-none-any.whl", hash = "sha256:d5ac38d9682c191e6582c86ebf37c10d7adb60bf4a95048a05ae4fb0866119bc"}, - {file = "distributed-2024.7.1.tar.gz", hash = "sha256:7bce7fa745163b55bdd67fd632b3edf57b31827640390b92d0ee3f73436429d3"}, -] - -[package.dependencies] -click = ">=8.0" -cloudpickle = ">=1.5.0" -dask = "2024.7.1" -jinja2 = ">=2.10.3" -locket = ">=1.0.0" -msgpack = ">=1.0.0" -packaging = ">=20.0" -psutil = ">=5.7.2" -pyyaml = ">=5.3.1" -sortedcontainers = ">=2.0.5" -tblib = ">=1.6.0" -toolz = ">=0.10.0" -tornado = ">=6.0.4" -urllib3 = ">=1.24.3" -zict = ">=3.0.0" - [[package]] name = "distro" version = "1.9.0" @@ -969,17 +855,6 @@ idna = ["idna (>=3.6)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] -[[package]] -name = "docstring-parser" -version = "0.16" -description = "Parse Python docstrings in reST, Google and Numpydoc format" -optional = false -python-versions = ">=3.6,<4.0" -files = [ - {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, - {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, -] - [[package]] name = "email-validator" version = "2.2.0" @@ -1016,20 +891,6 @@ django = ["dj-database-url", "dj-email-url", "django-cache-url"] lint = ["flake8 (==7.0.0)", "flake8-bugbear (==23.11.28)", "mypy (==1.8.0)", "pre-commit (>=3.6,<4.0)"] tests = ["environs[django]", "pytest"] -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - [[package]] name = "executing" version = "2.0.1" @@ -1044,62 +905,25 @@ files = [ [package.extras] tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] -[[package]] -name = "faiss-cpu" -version = "1.8.0" -description = "A library for efficient similarity search and clustering of dense vectors." -optional = false -python-versions = ">=3.8" -files = [ - {file = "faiss-cpu-1.8.0.tar.gz", hash = "sha256:3ee1549491728f37b65267c192a94661a907154a8ae0546ad50a564b8be0d82e"}, - {file = "faiss_cpu-1.8.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:134a064c7411acf7d1d863173a9d2605c5a59bd573639ab39a5ded5ca983b1b2"}, - {file = "faiss_cpu-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba8e6202d561ac57394c9d691ff17f8fa6eb9a077913a993fce0a154ec0176f1"}, - {file = "faiss_cpu-1.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a66e9fa7b70556a39681f06e0652f4124c8ddb0a1924afe4f0e40b6924dc845b"}, - {file = "faiss_cpu-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51aaef5a1255d0ea88ea7e52a2415f98c5dd2dd9cec10348d55136541eeec99f"}, - {file = "faiss_cpu-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:38152761242870ec7019e0397cbd0ed0b0716562029ce41a71bb38448bd6d5bc"}, - {file = "faiss_cpu-1.8.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c9e6ad94b86626be1a0faff3e53c4ca169eba88aa156d7e90c5a2e9ba30558fb"}, - {file = "faiss_cpu-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4601dbd81733bf1bc3bff690aac981289fb386dc8e60d0c4eec8a37ba6856d20"}, - {file = "faiss_cpu-1.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa943d3b5e8c5c77cdd629d9c3c6f78d7da616e586fdd1b94aecbf2e5fa9ba06"}, - {file = "faiss_cpu-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b644b366c3b239b34fa3e08bf65bfc78a24eda1e1ea5b2b6d9be3e8fc73d8179"}, - {file = "faiss_cpu-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:f85ecf3514850f93985be238351f5a70736133cfae784b372640aa17c6343a1b"}, - {file = "faiss_cpu-1.8.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:61abc0129a357ac00f17f5167f14dff41480de2cc852f306c3d4cd36b893ccbd"}, - {file = "faiss_cpu-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b788186d6eb94e6333e1aa8bb6c84b66e967458ecdd1cee22e16f04c43ee674c"}, - {file = "faiss_cpu-1.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5658d90a202c62e4a69c5b065785e9ddcaf6986cb395c16afed8dbe4c58c31a2"}, - {file = "faiss_cpu-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d460a372efce547e53d3c47d2c2a8a90b186ad245969048c10c1d7a1e5cf21b"}, - {file = "faiss_cpu-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:9e6520324f0a6764dd267b3c32c76958bf2b1ec36752950f6fab31a7295980a0"}, - {file = "faiss_cpu-1.8.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fc44be179d5b7f690484ef0d0caf817fea2698a5275a0c7fb6cbf406e5b2e4d1"}, - {file = "faiss_cpu-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbd6f0bc2e1424a12dc7e19d2cc95b53124867966b21110d26f909227e7ed1f1"}, - {file = "faiss_cpu-1.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06e7add0c8a06ce8fb0443c38fcaf49c45fb74527ea633b819e56452608e64f5"}, - {file = "faiss_cpu-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b864e23c1817fa6cfe9bbec096fd7140d596002934f71aa89b196ffb1b9cd846"}, - {file = "faiss_cpu-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:655433755845adbb6f0961e2f8980703640cb9faa96f1cd1ea190252149e0d0a"}, - {file = "faiss_cpu-1.8.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:e81fc376a3bcda213ffb395dda1018c953ce927c587731ad582f4e6c2b225363"}, - {file = "faiss_cpu-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8c6fa6b7eaf558307b4ab118a236e8d1da79a8685222928e4dd52e277dba144a"}, - {file = "faiss_cpu-1.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:652f6812ef2e8b0f9b18209828c590bc618aca82e7f1c1b1888f52928258e406"}, - {file = "faiss_cpu-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:304da4e0d19044374b63a5b6467028572eac4bd3f32bc9e8783d800a03fb1f02"}, - {file = "faiss_cpu-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:cb475d3f25f08c97ac64dfe026f113e2aeb9829b206b3b046256c3b40dd7eb62"}, -] - -[package.dependencies] -numpy = "*" - [[package]] name = "fastapi" -version = "0.110.3" +version = "0.112.1" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.110.3-py3-none-any.whl", hash = "sha256:fd7600612f755e4050beb74001310b5a7e1796d149c2ee363124abdfa0289d32"}, - {file = "fastapi-0.110.3.tar.gz", hash = "sha256:555700b0159379e94fdbfc6bb66a0f1c43f4cf7060f25239af3d84b63a656626"}, + {file = "fastapi-0.112.1-py3-none-any.whl", hash = "sha256:bcbd45817fc2a1cd5da09af66815b84ec0d3d634eb173d1ab468ae3103e183e4"}, + {file = "fastapi-0.112.1.tar.gz", hash = "sha256:b2537146f8c23389a7faa8b03d0bd38d4986e6983874557d95eed2acc46448ef"}, ] [package.dependencies] pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.37.2,<0.38.0" +starlette = ">=0.37.2,<0.39.0" typing-extensions = ">=4.8.0" [package.extras] -all = ["email_validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +all = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email_validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "fastjsonschema" @@ -1293,468 +1117,65 @@ files = [ ] [[package]] -name = "google-api-core" -version = "2.19.1" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, - {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} -grpcio-status = {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""} -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] - -[[package]] -name = "google-auth" -version = "2.32.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"}, - {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"}, +name = "google-re2" +version = "1.1.20240702" +description = "RE2 Python bindings" +optional = false +python-versions = "~=3.8" +files = [ + {file = "google_re2-1.1.20240702-1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:46e7ed614ffaafccae017542d68e9bbf664c8c1e5ca37046adee640bbee4846e"}, + {file = "google_re2-1.1.20240702-1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:3c8d2c0a03e9fd24f78b624cf7e40ac32aaf4837fda7339e2c22ca42e3dca512"}, + {file = "google_re2-1.1.20240702-1-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:7fee39772aa2e1fe91b7694acc48888ac6fa0ca51f8805464272a2089b362c96"}, + {file = "google_re2-1.1.20240702-1-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:b2bcf1a43853cee5a088f40c75fe48a6e3ec7addae1d3f3d47ce679e2bb8936b"}, + {file = "google_re2-1.1.20240702-1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:69f9b156de6f93ea00844f6cd4f2ed5124f9f01692da7ae0fe9a9516df6c63c2"}, + {file = "google_re2-1.1.20240702-1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f70db559ad768ad68a4d9897cb19fd13f7761e60208f475eb8a69b8aa4b6df20"}, + {file = "google_re2-1.1.20240702-1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7bc4fa65ecec3d63ea6048ecaf8784560bbfb31191c02ffaa87771e4a2f813e1"}, + {file = "google_re2-1.1.20240702-1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7480309b133c39f2afb19ff28bc30d27b364cbc56b5d46918d1b4f1fb2e13183"}, + {file = "google_re2-1.1.20240702-1-cp310-cp310-win32.whl", hash = "sha256:1950f499b277789267afee1755394cd959898d0b192b7052bb3186000aff27de"}, + {file = "google_re2-1.1.20240702-1-cp310-cp310-win_amd64.whl", hash = "sha256:2269ff8c2e1de0ee77736bd9f65b5c9f7cd43544eff825dc7b4ab2bf1f1901e4"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:9802a5a5ec585048300d5a8ec522b15057b8f758fe9f8b0ec65ac2927a36a1aa"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:757cbefbe9f998c274c94afd8bf2a4789b983287f33d4f975389c1027ed686c6"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:7e1d36bd20ce04c1198fe482b6f3ce7dd699e1276946a9a2cf31b2e53026a370"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:bb6b2e456cd0002700ad58c3474fc9e342853ff2ef9f95a1f6606c819ffaf3d9"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:71a71d7f9e616d3067e913a1432111593ee41aab2e0ed21ecbcf039451b9d899"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:16bd5389baeb98936fb05926e6a38826c473206c13f1f789f7643a29dcccccc3"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b920c1b0356f0359b35a0e6d0b5ff12fba9067d3c455a9811952fbc9a213268"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c2c39b7378e928d197e4fdf4a23c9338f29cad6d4c5c5c06a2ad7c8c2a3ebc"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-win32.whl", hash = "sha256:d7fd6b6be4f86d4b6503689b492970920f4b50a8ac02427bc975c73bcedda374"}, + {file = "google_re2-1.1.20240702-1-cp311-cp311-win_amd64.whl", hash = "sha256:22217d7c8f57bb6c5e74e171a510b12cdde4eddc2528f89aa0f50e3fc10fe17e"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:00dcb09b15f92b490ae52f328cca229de2a157c8748f10df94dfea7637d32617"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:2ffc6fbe70ccf9fb66d0ab16ccad0f661031ceb0eec3d73d170cd782a93d62d5"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:652e517b6db6cbf7403bab370940718208b15e811fefe7635d4e78a8037f096b"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:449ae8286d644d24af8a6eb81eeba6995388581739920b80d9e4b063eefe0322"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:40568559b0a10240c10377fb5cdd46c9115da8a627c567db68c4dc29103a2ce9"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8139df33d61aac335726b6f32108e763ba2932569c63d2b3ebf6e36a40449223"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7e4a2b88516b4140891a014c6e5b774f57db90c8bd0ccf0554e9f9b99ee1e942"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d95b1e06298299b28e23288a6bfd3c6f13e0f7a01c1f2e86e74073928676cf88"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-win32.whl", hash = "sha256:fb025d4bcd1a3032546da048a6dcb39359967f4df6b3514e76e983256235f694"}, + {file = "google_re2-1.1.20240702-1-cp312-cp312-win_amd64.whl", hash = "sha256:a7e3129d31e12d51397d603adf45bd696135a5d9d61bc33643bc5d2e4366070b"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:bc2f853ace690fb475f68b82b61e3b0ffe2a8603f052853eb21587ac7dcca537"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:974ac711ade3171004e9552d9e069cbe1a8de02c5e45a56101f8396f69a3e3c2"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:ad3dc0084cad59a298ffa52c9def2f1b5332d396d76f3828237ac7141b6e7e7d"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:4e13241b8df4096d840f98993f39c62cff0cdab9d06c86b156d2944cfb3f0814"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-macosx_14_0_arm64.whl", hash = "sha256:6887fe9c291ad42003ad84e11c0a6fac0169adbda9cbc898b8657610512e4ce5"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:82824fa71f51a269cd9bad653d53e6ba5bee9095da059455ee1c6cc7e4ba014b"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cde1453681c2ab1648b9e7aed3861ccedce52c85b24873edd1ec1e92b4b3d7d4"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7388c5aadcc5489291d2804ecc384c2e3bb64832e1b46afd44d7bca6c948b615"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-win32.whl", hash = "sha256:cb20853af1104b5180eb2daea66a481723553aa66bf5a5c4c58420c7369364cb"}, + {file = "google_re2-1.1.20240702-1-cp38-cp38-win_amd64.whl", hash = "sha256:a7f0d950ba9508ac1b2d89837f4a4c74092ae3af015a9797b80570ee87b7d7d5"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:56c2a97d3d38345939fb3ff02d154f5c6ec929e0765723cfd390720f581d2581"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:dfe657979ab96da72f55b03ecdede5467a7193266ce7a0b85013819f052d231f"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:c6e218b831dfc89f5004c1bb7ae9182ec5ddc4d46e6035f636ba96344d5b7478"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:304ed3f740541742e7ef5c162b36619efdac7345f1429ab6d70aefaae9a5658d"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:05f5683e1bcfac8adcc0dbfe3ecb0866cec6eea2c7d419271dfd72930b368ce4"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:39c642041428efaa48f35adf4475a014ce272f87a453c6dff68f2b05793d516f"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d3d1e58f374510101273cda1b6c2b45c178eb94f4c1bd17f7f750cea8d1c85a"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:386d2a8c4b10daaeda03adc7f65c457f67ee8cb18b4f9b4178a44ed62ab291df"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-win32.whl", hash = "sha256:f853c3c68bed0d127e6ef8b29ee85461d9d0a4fa407e3f97e41ecd6803d24d88"}, + {file = "google_re2-1.1.20240702-1-cp39-cp39-win_amd64.whl", hash = "sha256:5e35c8db1bf58ddf1ac28782d6dca5894a0331fc0d33b2a2ce6eb59234d74312"}, + {file = "google_re2-1.1.20240702.tar.gz", hash = "sha256:8788db69f6c93cb229df62c74b2d9aa8e64bf754e9495700f85812afa32efd2b"}, ] -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "google-cloud-aiplatform" -version = "1.60.0" -description = "Vertex AI API client library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "google-cloud-aiplatform-1.60.0.tar.gz", hash = "sha256:782c7f1ec0e77a7c7daabef3b65bfd506ed2b4b1dc2186753c43cd6faf8dd04e"}, - {file = "google_cloud_aiplatform-1.60.0-py2.py3-none-any.whl", hash = "sha256:5f14159c9575f4b46335027e3ceb8fa57bd5eaa76a07f858105b8c6c034ec0d6"}, -] - -[package.dependencies] -docstring-parser = "<1" -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0dev" -google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" -google-cloud-storage = ">=1.32.0,<3.0.0dev" -packaging = ">=14.3" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" -pydantic = "<3" -shapely = "<3.0.0dev" - -[package.extras] -autologging = ["mlflow (>=1.27.0,<=2.1.1)"] -cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] -endpoint = ["requests (>=2.28.1)"] -full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] -langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "tenacity (<=8.3)"] -langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.3)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"] -lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] -metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] -pipelines = ["pyyaml (>=5.3.1,<7)"] -prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] -preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] -private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] -rapid-evaluation = ["pandas (>=1.0.0,<2.2.0)", "tqdm (>=4.23.0)"] -ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"] -ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] -reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"] -tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] -tokenization = ["sentencepiece (>=0.2.0)"] -vizier = ["google-vizier (>=0.1.6)"] -xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] - -[[package]] -name = "google-cloud-bigquery" -version = "3.25.0" -description = "Google BigQuery API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509"}, - {file = "google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-core = ">=1.6.0,<3.0.0dev" -google-resumable-media = ">=0.6.0,<3.0dev" -packaging = ">=20.0.0" -python-dateutil = ">=2.7.2,<3.0dev" -requests = ">=2.21.0,<3.0.0dev" - -[package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] -bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] -geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] -ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] -ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] -opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] -tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] - -[[package]] -name = "google-cloud-core" -version = "2.4.1" -description = "Google Cloud API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, - {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, -] - -[package.dependencies] -google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" - -[package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] - -[[package]] -name = "google-cloud-resource-manager" -version = "1.12.5" -description = "Google Cloud Resource Manager API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_cloud_resource_manager-1.12.5-py2.py3-none-any.whl", hash = "sha256:2708a718b45c79464b7b21559c701b5c92e6b0b1ab2146d0a256277a623dc175"}, - {file = "google_cloud_resource_manager-1.12.5.tar.gz", hash = "sha256:b7af4254401ed4efa3aba3a929cb3ddb803fa6baf91a78485e45583597de5891"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" - -[[package]] -name = "google-cloud-storage" -version = "2.18.0" -description = "Google Cloud Storage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google_cloud_storage-2.18.0-py2.py3-none-any.whl", hash = "sha256:e8e1a9577952143c3fca8163005ecfadd2d70ec080fa158a8b305000e2c22fbb"}, - {file = "google_cloud_storage-2.18.0.tar.gz", hash = "sha256:0aa3f7c57f3632f81b455d91558d2b27ada96eee2de3aaa17f689db1470d9578"}, -] - -[package.dependencies] -google-api-core = ">=2.15.0,<3.0.0dev" -google-auth = ">=2.26.1,<3.0dev" -google-cloud-core = ">=2.3.0,<3.0dev" -google-crc32c = ">=1.0,<2.0dev" -google-resumable-media = ">=2.6.0" -requests = ">=2.18.0,<3.0.0dev" - -[package.extras] -protobuf = ["protobuf (<6.0.0dev)"] -tracing = ["opentelemetry-api (>=1.1.0)"] - -[[package]] -name = "google-crc32c" -version = "1.5.0" -description = "A python wrapper of the C library 'Google CRC32C'" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, -] - -[package.extras] -testing = ["pytest"] - -[[package]] -name = "google-resumable-media" -version = "2.7.1" -description = "Utilities for Google Media Downloads and Resumable Uploads" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-resumable-media-2.7.1.tar.gz", hash = "sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33"}, - {file = "google_resumable_media-2.7.1-py2.py3-none-any.whl", hash = "sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c"}, -] - -[package.dependencies] -google-crc32c = ">=1.0,<2.0dev" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] -requests = ["requests (>=2.18.0,<3.0.0dev)"] - -[[package]] -name = "googleapis-common-protos" -version = "1.63.2" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, - {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, -] - -[package.dependencies] -grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "grpc-google-iam-v1" -version = "0.13.1" -description = "IAM API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpc-google-iam-v1-0.13.1.tar.gz", hash = "sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001"}, - {file = "grpc_google_iam_v1-0.13.1-py2.py3-none-any.whl", hash = "sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e"}, -] - -[package.dependencies] -googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} -grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" - -[[package]] -name = "grpcio" -version = "1.65.4" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.65.4-cp310-cp310-linux_armv7l.whl", hash = "sha256:0e85c8766cf7f004ab01aff6a0393935a30d84388fa3c58d77849fcf27f3e98c"}, - {file = "grpcio-1.65.4-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e4a795c02405c7dfa8affd98c14d980f4acea16ea3b539e7404c645329460e5a"}, - {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d7b984a8dd975d949c2042b9b5ebcf297d6d5af57dcd47f946849ee15d3c2fb8"}, - {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644a783ce604a7d7c91412bd51cf9418b942cf71896344b6dc8d55713c71ce82"}, - {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5764237d751d3031a36fafd57eb7d36fd2c10c658d2b4057c516ccf114849a3e"}, - {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ee40d058cf20e1dd4cacec9c39e9bce13fedd38ce32f9ba00f639464fcb757de"}, - {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4482a44ce7cf577a1f8082e807a5b909236bce35b3e3897f839f2fbd9ae6982d"}, - {file = "grpcio-1.65.4-cp310-cp310-win32.whl", hash = "sha256:66bb051881c84aa82e4f22d8ebc9d1704b2e35d7867757f0740c6ef7b902f9b1"}, - {file = "grpcio-1.65.4-cp310-cp310-win_amd64.whl", hash = "sha256:870370524eff3144304da4d1bbe901d39bdd24f858ce849b7197e530c8c8f2ec"}, - {file = "grpcio-1.65.4-cp311-cp311-linux_armv7l.whl", hash = "sha256:85e9c69378af02e483bc626fc19a218451b24a402bdf44c7531e4c9253fb49ef"}, - {file = "grpcio-1.65.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2bd672e005afab8bf0d6aad5ad659e72a06dd713020554182a66d7c0c8f47e18"}, - {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:abccc5d73f5988e8f512eb29341ed9ced923b586bb72e785f265131c160231d8"}, - {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:886b45b29f3793b0c2576201947258782d7e54a218fe15d4a0468d9a6e00ce17"}, - {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be952436571dacc93ccc7796db06b7daf37b3b56bb97e3420e6503dccfe2f1b4"}, - {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8dc9ddc4603ec43f6238a5c95400c9a901b6d079feb824e890623da7194ff11e"}, - {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ade1256c98cba5a333ef54636095f2c09e6882c35f76acb04412f3b1aa3c29a5"}, - {file = "grpcio-1.65.4-cp311-cp311-win32.whl", hash = "sha256:280e93356fba6058cbbfc6f91a18e958062ef1bdaf5b1caf46c615ba1ae71b5b"}, - {file = "grpcio-1.65.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2b819f9ee27ed4e3e737a4f3920e337e00bc53f9e254377dd26fc7027c4d558"}, - {file = "grpcio-1.65.4-cp312-cp312-linux_armv7l.whl", hash = "sha256:926a0750a5e6fb002542e80f7fa6cab8b1a2ce5513a1c24641da33e088ca4c56"}, - {file = "grpcio-1.65.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a1d4c84d9e657f72bfbab8bedf31bdfc6bfc4a1efb10b8f2d28241efabfaaf2"}, - {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:17de4fda50967679677712eec0a5c13e8904b76ec90ac845d83386b65da0ae1e"}, - {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dee50c1b69754a4228e933696408ea87f7e896e8d9797a3ed2aeed8dbd04b74"}, - {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74c34fc7562bdd169b77966068434a93040bfca990e235f7a67cdf26e1bd5c63"}, - {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:24a2246e80a059b9eb981e4c2a6d8111b1b5e03a44421adbf2736cc1d4988a8a"}, - {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:18c10f0d054d2dce34dd15855fcca7cc44ec3b811139437543226776730c0f28"}, - {file = "grpcio-1.65.4-cp312-cp312-win32.whl", hash = "sha256:d72962788b6c22ddbcdb70b10c11fbb37d60ae598c51eb47ec019db66ccfdff0"}, - {file = "grpcio-1.65.4-cp312-cp312-win_amd64.whl", hash = "sha256:7656376821fed8c89e68206a522522317787a3d9ed66fb5110b1dff736a5e416"}, - {file = "grpcio-1.65.4-cp38-cp38-linux_armv7l.whl", hash = "sha256:4934077b33aa6fe0b451de8b71dabde96bf2d9b4cb2b3187be86e5adebcba021"}, - {file = "grpcio-1.65.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0cef8c919a3359847c357cb4314e50ed1f0cca070f828ee8f878d362fd744d52"}, - {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a925446e6aa12ca37114840d8550f308e29026cdc423a73da3043fd1603a6385"}, - {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf53e6247f1e2af93657e62e240e4f12e11ee0b9cef4ddcb37eab03d501ca864"}, - {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb34278e4ceb224c89704cd23db0d902e5e3c1c9687ec9d7c5bb4c150f86816"}, - {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e6cbdd107e56bde55c565da5fd16f08e1b4e9b0674851d7749e7f32d8645f524"}, - {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:626319a156b1f19513156a3b0dbfe977f5f93db63ca673a0703238ebd40670d7"}, - {file = "grpcio-1.65.4-cp38-cp38-win32.whl", hash = "sha256:3d1bbf7e1dd1096378bd83c83f554d3b93819b91161deaf63e03b7022a85224a"}, - {file = "grpcio-1.65.4-cp38-cp38-win_amd64.whl", hash = "sha256:a99e6dffefd3027b438116f33ed1261c8d360f0dd4f943cb44541a2782eba72f"}, - {file = "grpcio-1.65.4-cp39-cp39-linux_armv7l.whl", hash = "sha256:874acd010e60a2ec1e30d5e505b0651ab12eb968157cd244f852b27c6dbed733"}, - {file = "grpcio-1.65.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b07f36faf01fca5427d4aa23645e2d492157d56c91fab7e06fe5697d7e171ad4"}, - {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b81711bf4ec08a3710b534e8054c7dcf90f2edc22bebe11c1775a23f145595fe"}, - {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88fcabc332a4aef8bcefadc34a02e9ab9407ab975d2c7d981a8e12c1aed92aa1"}, - {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9ba3e63108a8749994f02c7c0e156afb39ba5bdf755337de8e75eb685be244b"}, - {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8eb485801957a486bf5de15f2c792d9f9c897a86f2f18db8f3f6795a094b4bb2"}, - {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075f3903bc1749ace93f2b0664f72964ee5f2da5c15d4b47e0ab68e4f442c257"}, - {file = "grpcio-1.65.4-cp39-cp39-win32.whl", hash = "sha256:0a0720299bdb2cc7306737295d56e41ce8827d5669d4a3cd870af832e3b17c4d"}, - {file = "grpcio-1.65.4-cp39-cp39-win_amd64.whl", hash = "sha256:a146bc40fa78769f22e1e9ff4f110ef36ad271b79707577bf2a31e3e931141b9"}, - {file = "grpcio-1.65.4.tar.gz", hash = "sha256:2a4f476209acffec056360d3e647ae0e14ae13dcf3dfb130c227ae1c594cbe39"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.65.4)"] - -[[package]] -name = "grpcio-status" -version = "1.62.2" -description = "Status proto mapping for gRPC" -optional = false -python-versions = ">=3.6" -files = [ - {file = "grpcio-status-1.62.2.tar.gz", hash = "sha256:62e1bfcb02025a1cd73732a2d33672d3e9d0df4d21c12c51e0bbcaf09bab742a"}, - {file = "grpcio_status-1.62.2-py3-none-any.whl", hash = "sha256:206ddf0eb36bc99b033f03b2c8e95d319f0044defae9b41ae21408e7e0cda48f"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.62.2" -protobuf = ">=4.21.6" - [[package]] name = "h11" version = "0.14.0" @@ -1789,13 +1210,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.26.0" +version = "0.27.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, - {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, ] [package.dependencies] @@ -1813,13 +1234,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" -version = "0.24.5" +version = "0.24.6" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.24.5-py3-none-any.whl", hash = "sha256:d93fb63b1f1a919a22ce91a14518974e81fc4610bf344dfe7572343ce8d3aced"}, - {file = "huggingface_hub-0.24.5.tar.gz", hash = "sha256:7b45d6744dd53ce9cbf9880957de00e9d10a9ae837f1c9b7255fc8fa4e8264f3"}, + {file = "huggingface_hub-0.24.6-py3-none-any.whl", hash = "sha256:a990f3232aa985fe749bc9474060cbad75e8b2f115f6665a9fda5b9c97818970"}, + {file = "huggingface_hub-0.24.6.tar.gz", hash = "sha256:cc2579e761d070713eaa9c323e3debe39d5b464ae3a7261c39a9195b27bb8000"}, ] [package.dependencies] @@ -1847,13 +1268,13 @@ typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "t [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -1883,13 +1304,13 @@ networkx = ">=2" [[package]] name = "importlib-metadata" -version = "8.2.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, - {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] @@ -1962,7 +1383,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} @@ -1988,21 +1408,21 @@ test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "num [[package]] name = "ipywidgets" -version = "8.1.3" +version = "8.1.5" description = "Jupyter interactive widgets" optional = false python-versions = ">=3.7" files = [ - {file = "ipywidgets-8.1.3-py3-none-any.whl", hash = "sha256:efafd18f7a142248f7cb0ba890a68b96abd4d6e88ddbda483c9130d12667eaf2"}, - {file = "ipywidgets-8.1.3.tar.gz", hash = "sha256:f5f9eeaae082b1823ce9eac2575272952f40d748893972956dc09700a6392d9c"}, + {file = "ipywidgets-8.1.5-py3-none-any.whl", hash = "sha256:3290f526f87ae6e77655555baba4f36681c555b8bdbbff430b70e52c34c86245"}, + {file = "ipywidgets-8.1.5.tar.gz", hash = "sha256:870e43b1a35656a80c18c9503bbf2d16802db1cb487eec6fab27d683381dde17"}, ] [package.dependencies] comm = ">=0.1.3" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.11,<3.1.0" +jupyterlab-widgets = ">=3.0.12,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.11,<4.1.0" +widgetsnbextension = ">=4.0.12,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] @@ -2085,44 +1505,86 @@ files = [ Jinja2 = ">=2.2" [[package]] -name = "json5" -version = "0.9.25" -description = "A Python implementation of the JSON5 data format." +name = "jiter" +version = "0.5.0" +description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, -] - -[[package]] -name = "jsonpatch" -version = "1.33" -description = "Apply JSON-Patches (RFC 6902)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, - {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, + {file = "jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f"}, + {file = "jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e"}, + {file = "jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf"}, + {file = "jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646"}, + {file = "jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb"}, + {file = "jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae"}, + {file = "jiter-0.5.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9f664e7351604f91dcdd557603c57fc0d551bc65cc0a732fdacbf73ad335049a"}, + {file = "jiter-0.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:044f2f1148b5248ad2c8c3afb43430dccf676c5a5834d2f5089a4e6c5bbd64df"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:702e3520384c88b6e270c55c772d4bd6d7b150608dcc94dea87ceba1b6391248"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:528d742dcde73fad9d63e8242c036ab4a84389a56e04efd854062b660f559544"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf80e5fe6ab582c82f0c3331df27a7e1565e2dcf06265afd5173d809cdbf9ba"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:44dfc9ddfb9b51a5626568ef4e55ada462b7328996294fe4d36de02fce42721f"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c451f7922992751a936b96c5f5b9bb9312243d9b754c34b33d0cb72c84669f4e"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:308fce789a2f093dca1ff91ac391f11a9f99c35369117ad5a5c6c4903e1b3e3a"}, + {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7f5ad4a7c6b0d90776fdefa294f662e8a86871e601309643de30bf94bb93a64e"}, + {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ea189db75f8eca08807d02ae27929e890c7d47599ce3d0a6a5d41f2419ecf338"}, + {file = "jiter-0.5.0-cp312-none-win32.whl", hash = "sha256:e3bbe3910c724b877846186c25fe3c802e105a2c1fc2b57d6688b9f8772026e4"}, + {file = "jiter-0.5.0-cp312-none-win_amd64.whl", hash = "sha256:a586832f70c3f1481732919215f36d41c59ca080fa27a65cf23d9490e75b2ef5"}, + {file = "jiter-0.5.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f04bc2fc50dc77be9d10f73fcc4e39346402ffe21726ff41028f36e179b587e6"}, + {file = "jiter-0.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f433a4169ad22fcb550b11179bb2b4fd405de9b982601914ef448390b2954f3"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad4a6398c85d3a20067e6c69890ca01f68659da94d74c800298581724e426c7e"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6baa88334e7af3f4d7a5c66c3a63808e5efbc3698a1c57626541ddd22f8e4fbf"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ece0a115c05efca597c6d938f88c9357c843f8c245dbbb53361a1c01afd7148"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:335942557162ad372cc367ffaf93217117401bf930483b4b3ebdb1223dbddfa7"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649b0ee97a6e6da174bffcb3c8c051a5935d7d4f2f52ea1583b5b3e7822fbf14"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4be354c5de82157886ca7f5925dbda369b77344b4b4adf2723079715f823989"}, + {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5206144578831a6de278a38896864ded4ed96af66e1e63ec5dd7f4a1fce38a3a"}, + {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8120c60f8121ac3d6f072b97ef0e71770cc72b3c23084c72c4189428b1b1d3b6"}, + {file = "jiter-0.5.0-cp38-none-win32.whl", hash = "sha256:6f1223f88b6d76b519cb033a4d3687ca157c272ec5d6015c322fc5b3074d8a5e"}, + {file = "jiter-0.5.0-cp38-none-win_amd64.whl", hash = "sha256:c59614b225d9f434ea8fc0d0bec51ef5fa8c83679afedc0433905994fb36d631"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0af3838cfb7e6afee3f00dc66fa24695199e20ba87df26e942820345b0afc566"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:550b11d669600dbc342364fd4adbe987f14d0bbedaf06feb1b983383dcc4b961"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:489875bf1a0ffb3cb38a727b01e6673f0f2e395b2aad3c9387f94187cb214bbf"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b250ca2594f5599ca82ba7e68785a669b352156260c5362ea1b4e04a0f3e2389"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ea18e01f785c6667ca15407cd6dabbe029d77474d53595a189bdc813347218e"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462a52be85b53cd9bffd94e2d788a09984274fe6cebb893d6287e1c296d50653"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92cc68b48d50fa472c79c93965e19bd48f40f207cb557a8346daa020d6ba973b"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c834133e59a8521bc87ebcad773608c6fa6ab5c7a022df24a45030826cf10bc"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab3a71ff31cf2d45cb216dc37af522d335211f3a972d2fe14ea99073de6cb104"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cccd3af9c48ac500c95e1bcbc498020c87e1781ff0345dd371462d67b76643eb"}, + {file = "jiter-0.5.0-cp39-none-win32.whl", hash = "sha256:368084d8d5c4fc40ff7c3cc513c4f73e02c85f6009217922d0823a48ee7adf61"}, + {file = "jiter-0.5.0-cp39-none-win_amd64.whl", hash = "sha256:ce03f7b4129eb72f1687fa11300fbf677b02990618428934662406d2a76742a1"}, + {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, ] -[package.dependencies] -jsonpointer = ">=1.9" - [[package]] -name = "jsonpath-ng" -version = "1.6.1" -description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." +name = "json5" +version = "0.9.25" +description = "A Python implementation of the JSON5 data format." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "jsonpath-ng-1.6.1.tar.gz", hash = "sha256:086c37ba4917304850bd837aeab806670224d3f038fe2833ff593a672ef0a5fa"}, - {file = "jsonpath_ng-1.6.1-py3-none-any.whl", hash = "sha256:8f22cd8273d7772eea9aaa84d922e0841aa36fdb8a2c6b7f6c3791a16a9bc0be"}, + {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, + {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, ] -[package.dependencies] -ply = "*" - [[package]] name = "jsonpointer" version = "3.0.0" @@ -2177,77 +1639,6 @@ files = [ [package.dependencies] referencing = ">=0.31.0" -[[package]] -name = "julep" -version = "0.2.14" -description = "Julep is a platform for creating agents with long-term memory" -optional = false -python-versions = "<3.14,>=3.8" -files = [ - {file = "julep-0.2.14-py3-none-any.whl", hash = "sha256:bc3edab590b7942309e4c03ef3300689d58aa92fb362dc3642046f341ccebf75"}, - {file = "julep-0.2.14.tar.gz", hash = "sha256:504ab31ec6e015f9dac1c5dc0d86a7f56ab1dba0228f215191ff5b016f159c82"}, -] - -[package.dependencies] -beartype = ">=0.14.0,<1.0.0" -environs = ">=9.0.0,<11.0.0" -httpx = ">=0.20.0,<1.0.0" -openai = ">=1.0.1,<2.0.0" -pydantic = ">=2.0.1,<3.0.0" -typing-extensions = ">=4.0.0,<5.0.0" - -[[package]] -name = "jupyter-ai" -version = "2.20.0" -description = "A generative AI extension for JupyterLab" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_ai-2.20.0-py3-none-any.whl", hash = "sha256:e65fb9d3d566bd67e9846716fba0a712955f8ea9b2779dc0902c99e4d4766a3c"}, - {file = "jupyter_ai-2.20.0.tar.gz", hash = "sha256:3544c8906a1ea15aa012a862964832277bd899e9ca2e715b571ce9c3ea69fa23"}, -] - -[package.dependencies] -aiosqlite = ">=0.18" -dask = {version = "*", extras = ["distributed"]} -deepmerge = ">=1.0" -faiss-cpu = "<=1.8.0" -importlib-metadata = ">=5.2.0" -jupyter-ai-magics = ">=2.13.0" -jupyter-server = ">=1.6,<3" -jupyterlab = ">=4.0,<5.0" -traitlets = ">=5.0" -typing-extensions = ">=4.5.0" - -[package.extras] -all = ["arxiv", "jupyter-ai-magics[all]", "pypdf"] -dev = ["jupyter-ai-magics[dev]"] -test = ["coverage", "jupyter-server[test] (>=1.6,<3)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-jupyter", "pytest-tornasync", "syrupy (>=4.0.8,<4.1.0)"] - -[[package]] -name = "jupyter-ai-magics" -version = "2.18.1" -description = "Jupyter AI magics Python package. Not published on NPM." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_ai_magics-2.18.1-py3-none-any.whl", hash = "sha256:54af7a833eae553c3bccf00eff845071826b0fb84b77c8238845f058541fd70d"}, - {file = "jupyter_ai_magics-2.18.1.tar.gz", hash = "sha256:df028a0566cd04555bd59dbdb5c08cb4eaf8150157a5ec176376390c73d4ac5b"}, -] - -[package.dependencies] -click = ">=8.0,<9.0" -importlib-metadata = ">=5.2.0" -ipython = "*" -jsonpath-ng = ">=1.5.3,<2" -langchain = ">=0.1.0,<0.2.0" -typing-extensions = ">=4.5.0" - -[package.extras] -all = ["ai21", "boto3", "gpt4all", "huggingface-hub", "ipywidgets", "langchain-anthropic", "langchain-cohere", "langchain-google-genai", "langchain-mistralai", "langchain-nvidia-ai-endpoints", "langchain-openai", "pillow", "qianfan", "together"] -dev = ["pre-commit (>=3.3.3,<4)"] -test = ["coverage", "pytest", "pytest-asyncio", "pytest-cov"] - [[package]] name = "jupyter-client" version = "8.6.2" @@ -2407,7 +1798,6 @@ jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2" packaging = "*" setuptools = ">=40.1.0" -tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} tornado = ">=6.2.0" traitlets = "*" @@ -2456,152 +1846,15 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v [[package]] name = "jupyterlab-widgets" -version = "3.0.11" +version = "3.0.13" description = "Jupyter interactive widgets for JupyterLab" optional = false python-versions = ">=3.7" files = [ - {file = "jupyterlab_widgets-3.0.11-py3-none-any.whl", hash = "sha256:78287fd86d20744ace330a61625024cf5521e1c012a352ddc0a3cdc2348becd0"}, - {file = "jupyterlab_widgets-3.0.11.tar.gz", hash = "sha256:dd5ac679593c969af29c9bed054c24f26842baa51352114736756bc035deee27"}, -] - -[[package]] -name = "langchain" -version = "0.1.20" -description = "Building applications with LLMs through composability" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain-0.1.20-py3-none-any.whl", hash = "sha256:09991999fbd6c3421a12db3c7d1f52d55601fc41d9b2a3ef51aab2e0e9c38da9"}, - {file = "langchain-0.1.20.tar.gz", hash = "sha256:f35c95eed8c8375e02dce95a34f2fd4856a4c98269d6dc34547a23dba5beab7e"}, -] - -[package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" -async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} -dataclasses-json = ">=0.5.7,<0.7" -langchain-community = ">=0.0.38,<0.1" -langchain-core = ">=0.1.52,<0.2.0" -langchain-text-splitters = ">=0.0.1,<0.1" -langsmith = ">=0.1.17,<0.2.0" -numpy = ">=1,<2" -pydantic = ">=1,<3" -PyYAML = ">=5.3" -requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<9.0.0" - -[package.extras] -azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] -clarifai = ["clarifai (>=9.1.0)"] -cli = ["typer (>=0.9.0,<0.10.0)"] -cohere = ["cohere (>=4,<6)"] -docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] -embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] -javascript = ["esprima (>=4.0.1,<5.0.0)"] -llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] -openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] -qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] -text-helpers = ["chardet (>=5.1.0,<6.0.0)"] - -[[package]] -name = "langchain-community" -version = "0.0.38" -description = "Community contributed LangChain integrations." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_community-0.0.38-py3-none-any.whl", hash = "sha256:ecb48660a70a08c90229be46b0cc5f6bc9f38f2833ee44c57dfab9bf3a2c121a"}, - {file = "langchain_community-0.0.38.tar.gz", hash = "sha256:127fc4b75bc67b62fe827c66c02e715a730fef8fe69bd2023d466bab06b5810d"}, -] - -[package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" -dataclasses-json = ">=0.5.7,<0.7" -langchain-core = ">=0.1.52,<0.2.0" -langsmith = ">=0.1.0,<0.2.0" -numpy = ">=1,<2" -PyYAML = ">=5.3" -requests = ">=2,<3" -SQLAlchemy = ">=1.4,<3" -tenacity = ">=8.1.0,<9.0.0" - -[package.extras] -cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] - -[[package]] -name = "langchain-core" -version = "0.1.52" -description = "Building applications with LLMs through composability" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_core-0.1.52-py3-none-any.whl", hash = "sha256:62566749c92e8a1181c255c788548dc16dbc319d896cd6b9c95dc17af9b2a6db"}, - {file = "langchain_core-0.1.52.tar.gz", hash = "sha256:084c3fc452f5a6966c28ab3ec5dbc8b8d26fc3f63378073928f4e29d90b6393f"}, -] - -[package.dependencies] -jsonpatch = ">=1.33,<2.0" -langsmith = ">=0.1.0,<0.2.0" -packaging = ">=23.2,<24.0" -pydantic = ">=1,<3" -PyYAML = ">=5.3" -tenacity = ">=8.1.0,<9.0.0" - -[package.extras] -extended-testing = ["jinja2 (>=3,<4)"] - -[[package]] -name = "langchain-openai" -version = "0.1.6" -description = "An integration package connecting OpenAI and LangChain" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_openai-0.1.6-py3-none-any.whl", hash = "sha256:7f62ecb12d3cdd0d96679abea00e4e3ceb1f829f6d1f127a5f7b97c1315d157f"}, - {file = "langchain_openai-0.1.6.tar.gz", hash = "sha256:7d2e838e57ef231cb7689fd58ac5fa8a6e9e504174f8c5698c837739786e2030"}, -] - -[package.dependencies] -langchain-core = ">=0.1.46,<0.2.0" -openai = ">=1.24.0,<2.0.0" -tiktoken = ">=0.5.2,<1" - -[[package]] -name = "langchain-text-splitters" -version = "0.0.2" -description = "LangChain text splitting utilities" -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langchain_text_splitters-0.0.2-py3-none-any.whl", hash = "sha256:13887f32705862c1e1454213cb7834a63aae57c26fcd80346703a1d09c46168d"}, - {file = "langchain_text_splitters-0.0.2.tar.gz", hash = "sha256:ac8927dc0ba08eba702f6961c9ed7df7cead8de19a9f7101ab2b5ea34201b3c1"}, + {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, + {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, ] -[package.dependencies] -langchain-core = ">=0.1.28,<0.3" - -[package.extras] -extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"] - -[[package]] -name = "langsmith" -version = "0.1.96" -description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "langsmith-0.1.96-py3-none-any.whl", hash = "sha256:1e8285c3f84cffebc761ff5624647de20686dbbf659f5d1135918261f85bad13"}, - {file = "langsmith-0.1.96.tar.gz", hash = "sha256:01b7fa7d538b6409ee74bff458cc3dcdc1799fc70d329f79eb26ba54c32991ae"}, -] - -[package.dependencies] -orjson = ">=3.9.14,<4.0.0" -pydantic = {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""} -requests = ">=2,<3" - [[package]] name = "libcst" version = "1.4.0" @@ -2644,13 +1897,13 @@ dev = ["Sphinx (>=5.1.1)", "black (==23.12.1)", "build (>=0.10.0)", "coverage (> [[package]] name = "litellm" -version = "1.40.8" +version = "1.44.4" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" files = [ - {file = "litellm-1.40.8-py3-none-any.whl", hash = "sha256:cd0c313423dad49224696c45ac02c574abcaed6666c597543c2318b3521f4320"}, - {file = "litellm-1.40.8.tar.gz", hash = "sha256:8878d2437ac50bcc6f39ded1729e2113eb5fee645fcebcd32fc241c529a21c00"}, + {file = "litellm-1.44.4-py3-none-any.whl", hash = "sha256:0e2d56cc767003b30275005072ad6f7e2e37b0468719e50817cc581a055b8f1a"}, + {file = "litellm-1.44.4.tar.gz", hash = "sha256:3216424a27e6405b9a099aff166c6d2442b1013c9f9909084ab722eb9d3b8861"}, ] [package.dependencies] @@ -2658,27 +1911,68 @@ aiohttp = "*" click = "*" importlib-metadata = ">=6.8.0" jinja2 = ">=3.1.2,<4.0.0" -openai = ">=1.27.0" +jsonschema = ">=4.22.0,<5.0.0" +openai = ">=1.40.0" +pydantic = ">=2.0.0,<3.0.0" python-dotenv = ">=0.2.0" requests = ">=2.31.0,<3.0.0" -tiktoken = ">=0.4.0" +tiktoken = ">=0.7.0" tokenizers = "*" [package.extras] -extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "resend (>=0.8.0,<0.9.0)"] +extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "pynacl (>=1.5.0,<2.0.0)", "resend (>=0.8.0,<0.9.0)"] proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=42.0.5,<43.0.0)", "fastapi (>=0.111.0,<0.112.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=22.0.0,<23.0.0)", "orjson (>=3.9.7,<4.0.0)", "python-multipart (>=0.0.9,<0.0.10)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] [[package]] -name = "locket" -version = "1.0.0" -description = "File-based locks for Python on Linux and Windows" +name = "lz4" +version = "4.3.3" +description = "LZ4 Bindings for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"}, - {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, + {file = "lz4-4.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b891880c187e96339474af2a3b2bfb11a8e4732ff5034be919aa9029484cd201"}, + {file = "lz4-4.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:222a7e35137d7539c9c33bb53fcbb26510c5748779364014235afc62b0ec797f"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f76176492ff082657ada0d0f10c794b6da5800249ef1692b35cf49b1e93e8ef7"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d18718f9d78182c6b60f568c9a9cec8a7204d7cb6fad4e511a2ef279e4cb05"}, + {file = "lz4-4.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cdc60e21ec70266947a48839b437d46025076eb4b12c76bd47f8e5eb8a75dcc"}, + {file = "lz4-4.3.3-cp310-cp310-win32.whl", hash = "sha256:c81703b12475da73a5d66618856d04b1307e43428a7e59d98cfe5a5d608a74c6"}, + {file = "lz4-4.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:43cf03059c0f941b772c8aeb42a0813d68d7081c009542301637e5782f8a33e2"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6"}, + {file = "lz4-4.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7b1839f795315e480fb87d9bc60b186a98e3e5d17203c6e757611ef7dcef61"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563"}, + {file = "lz4-4.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2507ee9c99dbddd191c86f0e0c8b724c76d26b0602db9ea23232304382e1f21"}, + {file = "lz4-4.3.3-cp311-cp311-win32.whl", hash = "sha256:f180904f33bdd1e92967923a43c22899e303906d19b2cf8bb547db6653ea6e7d"}, + {file = "lz4-4.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d"}, + {file = "lz4-4.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:31ea4be9d0059c00b2572d700bf2c1bc82f241f2c3282034a759c9a4d6ca4dc2"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf"}, + {file = "lz4-4.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d84b479ddf39fe3ea05387f10b779155fc0990125f4fb35d636114e1c63a2e"}, + {file = "lz4-4.3.3-cp312-cp312-win32.whl", hash = "sha256:337cb94488a1b060ef1685187d6ad4ba8bc61d26d631d7ba909ee984ea736be1"}, + {file = "lz4-4.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:363ab65bf31338eb364062a15f302fc0fab0a49426051429866d71c793c23394"}, + {file = "lz4-4.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a136e44a16fc98b1abc404fbabf7f1fada2bdab6a7e970974fb81cf55b636d0"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abc197e4aca8b63f5ae200af03eb95fb4b5055a8f990079b5bdf042f568469dd"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56f4fe9c6327adb97406f27a66420b22ce02d71a5c365c48d6b656b4aaeb7775"}, + {file = "lz4-4.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0e822cd7644995d9ba248cb4b67859701748a93e2ab7fc9bc18c599a52e4604"}, + {file = "lz4-4.3.3-cp38-cp38-win32.whl", hash = "sha256:24b3206de56b7a537eda3a8123c644a2b7bf111f0af53bc14bed90ce5562d1aa"}, + {file = "lz4-4.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:b47839b53956e2737229d70714f1d75f33e8ac26e52c267f0197b3189ca6de24"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6756212507405f270b66b3ff7f564618de0606395c0fe10a7ae2ffcbbe0b1fba"}, + {file = "lz4-4.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee9ff50557a942d187ec85462bb0960207e7ec5b19b3b48949263993771c6205"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b901c7784caac9a1ded4555258207d9e9697e746cc8532129f150ffe1f6ba0d"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d9ec061b9eca86e4dcc003d93334b95d53909afd5a32c6e4f222157b50c071"}, + {file = "lz4-4.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4c7bf687303ca47d69f9f0133274958fd672efaa33fb5bcde467862d6c621f0"}, + {file = "lz4-4.3.3-cp39-cp39-win32.whl", hash = "sha256:054b4631a355606e99a42396f5db4d22046a3397ffc3269a348ec41eaebd69d2"}, + {file = "lz4-4.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:eac9af361e0d98335a02ff12fb56caeb7ea1196cf1a49dbf6f17828a131da807"}, + {file = "lz4-4.3.3.tar.gz", hash = "sha256:01fe674ef2889dbb9899d8a67361e0c4a2c833af5aeb37dd505727cf5d2a131e"}, ] +[package.extras] +docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] +flake8 = ["flake8"] +tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -2774,13 +2068,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.3" +version = "3.22.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, ] [package.dependencies] @@ -2788,7 +2082,7 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -2827,71 +2121,6 @@ files = [ {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, ] -[[package]] -name = "msgpack" -version = "1.0.8" -description = "MessagePack serializer" -optional = false -python-versions = ">=3.8" -files = [ - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, - {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, - {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, - {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, - {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, - {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, - {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, - {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, - {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, - {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, - {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, -] - [[package]] name = "msgspec" version = "0.18.6" @@ -3209,132 +2438,89 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numpy" -version = "1.26.4" +version = "2.1.0" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, + {file = "numpy-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6326ab99b52fafdcdeccf602d6286191a79fe2fda0ae90573c5814cd2b0bc1b8"}, + {file = "numpy-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0937e54c09f7a9a68da6889362ddd2ff584c02d015ec92672c099b61555f8911"}, + {file = "numpy-2.1.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:30014b234f07b5fec20f4146f69e13cfb1e33ee9a18a1879a0142fbb00d47673"}, + {file = "numpy-2.1.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:899da829b362ade41e1e7eccad2cf274035e1cb36ba73034946fccd4afd8606b"}, + {file = "numpy-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08801848a40aea24ce16c2ecde3b756f9ad756586fb2d13210939eb69b023f5b"}, + {file = "numpy-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:398049e237d1aae53d82a416dade04defed1a47f87d18d5bd615b6e7d7e41d1f"}, + {file = "numpy-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0abb3916a35d9090088a748636b2c06dc9a6542f99cd476979fb156a18192b84"}, + {file = "numpy-2.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10e2350aea18d04832319aac0f887d5fcec1b36abd485d14f173e3e900b83e33"}, + {file = "numpy-2.1.0-cp310-cp310-win32.whl", hash = "sha256:f6b26e6c3b98adb648243670fddc8cab6ae17473f9dc58c51574af3e64d61211"}, + {file = "numpy-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:f505264735ee074250a9c78247ee8618292091d9d1fcc023290e9ac67e8f1afa"}, + {file = "numpy-2.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:76368c788ccb4f4782cf9c842b316140142b4cbf22ff8db82724e82fe1205dce"}, + {file = "numpy-2.1.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f8e93a01a35be08d31ae33021e5268f157a2d60ebd643cfc15de6ab8e4722eb1"}, + {file = "numpy-2.1.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9523f8b46485db6939bd069b28b642fec86c30909cea90ef550373787f79530e"}, + {file = "numpy-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54139e0eb219f52f60656d163cbe67c31ede51d13236c950145473504fa208cb"}, + {file = "numpy-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ebbf9fbdabed208d4ecd2e1dfd2c0741af2f876e7ae522c2537d404ca895c3"}, + {file = "numpy-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:378cb4f24c7d93066ee4103204f73ed046eb88f9ad5bb2275bb9fa0f6a02bd36"}, + {file = "numpy-2.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8f699a709120b220dfe173f79c73cb2a2cab2c0b88dd59d7b49407d032b8ebd"}, + {file = "numpy-2.1.0-cp311-cp311-win32.whl", hash = "sha256:ffbd6faeb190aaf2b5e9024bac9622d2ee549b7ec89ef3a9373fa35313d44e0e"}, + {file = "numpy-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0af3a5987f59d9c529c022c8c2a64805b339b7ef506509fba7d0556649b9714b"}, + {file = "numpy-2.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fe76d75b345dc045acdbc006adcb197cc680754afd6c259de60d358d60c93736"}, + {file = "numpy-2.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f358ea9e47eb3c2d6eba121ab512dfff38a88db719c38d1e67349af210bc7529"}, + {file = "numpy-2.1.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:dd94ce596bda40a9618324547cfaaf6650b1a24f5390350142499aa4e34e53d1"}, + {file = "numpy-2.1.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b47c551c6724960479cefd7353656498b86e7232429e3a41ab83be4da1b109e8"}, + {file = "numpy-2.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0756a179afa766ad7cb6f036de622e8a8f16ffdd55aa31f296c870b5679d745"}, + {file = "numpy-2.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24003ba8ff22ea29a8c306e61d316ac74111cebf942afbf692df65509a05f111"}, + {file = "numpy-2.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b34fa5e3b5d6dc7e0a4243fa0f81367027cb6f4a7215a17852979634b5544ee0"}, + {file = "numpy-2.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4f982715e65036c34897eb598d64aef15150c447be2cfc6643ec7a11af06574"}, + {file = "numpy-2.1.0-cp312-cp312-win32.whl", hash = "sha256:c4cd94dfefbefec3f8b544f61286584292d740e6e9d4677769bc76b8f41deb02"}, + {file = "numpy-2.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0cdef204199278f5c461a0bed6ed2e052998276e6d8ab2963d5b5c39a0500bc"}, + {file = "numpy-2.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8ab81ccd753859ab89e67199b9da62c543850f819993761c1e94a75a814ed667"}, + {file = "numpy-2.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:442596f01913656d579309edcd179a2a2f9977d9a14ff41d042475280fc7f34e"}, + {file = "numpy-2.1.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:848c6b5cad9898e4b9ef251b6f934fa34630371f2e916261070a4eb9092ffd33"}, + {file = "numpy-2.1.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:54c6a63e9d81efe64bfb7bcb0ec64332a87d0b87575f6009c8ba67ea6374770b"}, + {file = "numpy-2.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:652e92fc409e278abdd61e9505649e3938f6d04ce7ef1953f2ec598a50e7c195"}, + {file = "numpy-2.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab32eb9170bf8ffcbb14f11613f4a0b108d3ffee0832457c5d4808233ba8977"}, + {file = "numpy-2.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8fb49a0ba4d8f41198ae2d52118b050fd34dace4b8f3fb0ee34e23eb4ae775b1"}, + {file = "numpy-2.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44e44973262dc3ae79e9063a1284a73e09d01b894b534a769732ccd46c28cc62"}, + {file = "numpy-2.1.0-cp313-cp313-win32.whl", hash = "sha256:ab83adc099ec62e044b1fbb3a05499fa1e99f6d53a1dde102b2d85eff66ed324"}, + {file = "numpy-2.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:de844aaa4815b78f6023832590d77da0e3b6805c644c33ce94a1e449f16d6ab5"}, + {file = "numpy-2.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:343e3e152bf5a087511cd325e3b7ecfd5b92d369e80e74c12cd87826e263ec06"}, + {file = "numpy-2.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f07fa2f15dabe91259828ce7d71b5ca9e2eb7c8c26baa822c825ce43552f4883"}, + {file = "numpy-2.1.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5474dad8c86ee9ba9bb776f4b99ef2d41b3b8f4e0d199d4f7304728ed34d0300"}, + {file = "numpy-2.1.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1f817c71683fd1bb5cff1529a1d085a57f02ccd2ebc5cd2c566f9a01118e3b7d"}, + {file = "numpy-2.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a3336fbfa0d38d3deacd3fe7f3d07e13597f29c13abf4d15c3b6dc2291cbbdd"}, + {file = "numpy-2.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a894c51fd8c4e834f00ac742abad73fc485df1062f1b875661a3c1e1fb1c2f6"}, + {file = "numpy-2.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:9156ca1f79fc4acc226696e95bfcc2b486f165a6a59ebe22b2c1f82ab190384a"}, + {file = "numpy-2.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:624884b572dff8ca8f60fab591413f077471de64e376b17d291b19f56504b2bb"}, + {file = "numpy-2.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:15ef8b2177eeb7e37dd5ef4016f30b7659c57c2c0b57a779f1d537ff33a72c7b"}, + {file = "numpy-2.1.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:e5f0642cdf4636198a4990de7a71b693d824c56a757862230454629cf62e323d"}, + {file = "numpy-2.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15976718c004466406342789f31b6673776360f3b1e3c575f25302d7e789575"}, + {file = "numpy-2.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6c1de77ded79fef664d5098a66810d4d27ca0224e9051906e634b3f7ead134c2"}, + {file = "numpy-2.1.0.tar.gz", hash = "sha256:7dc90da0081f7e1da49ec4e398ede6a8e9cc4f5ebe5f9e06b443ed889ee9aaa2"}, ] [[package]] name = "openai" -version = "1.38.0" +version = "1.42.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.38.0-py3-none-any.whl", hash = "sha256:a19ef052f1676320f52183ae6f9775da6d888fbe3aec57886117163c095d9f7c"}, - {file = "openai-1.38.0.tar.gz", hash = "sha256:30fb324bf452ecb1194ca7dbc64566a4d7aa054c6a5da857937ede7d517a220b"}, + {file = "openai-1.42.0-py3-none-any.whl", hash = "sha256:dc91e0307033a4f94931e5d03cc3b29b9717014ad5e73f9f2051b6cb5eda4d80"}, + {file = "openai-1.42.0.tar.gz", hash = "sha256:c9d31853b4e0bc2dc8bd08003b462a006035655a701471695d0bfdc08529cde3"}, ] [package.dependencies] anyio = ">=3.5.0,<5" distro = ">=1.7.0,<2" httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" tqdm = ">4" -typing-extensions = ">=4.7,<5" +typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -[[package]] -name = "orjson" -version = "3.10.6" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"}, - {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"}, - {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"}, - {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"}, - {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"}, - {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"}, - {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"}, - {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"}, - {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"}, - {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"}, - {file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"}, - {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"}, - {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"}, - {file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"}, - {file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"}, - {file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"}, - {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"}, - {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"}, - {file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"}, - {file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"}, - {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"}, - {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"}, - {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"}, - {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"}, - {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"}, - {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"}, -] - [[package]] name = "overrides" version = "7.7.0" @@ -3348,13 +2534,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -3396,7 +2582,7 @@ files = [ ] [package.dependencies] -numpy = {version = ">=1.22.4", markers = "python_version < \"3.11\""} +numpy = {version = ">=1.23.2", markers = "python_version == \"3.11\""} python-dateutil = ">=2.8.2" pytz = ">=2020.1" tzdata = ">=2022.7" @@ -3452,24 +2638,6 @@ files = [ qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] testing = ["docopt", "pytest"] -[[package]] -name = "partd" -version = "1.4.2" -description = "Appendable key-value storage" -optional = false -python-versions = ">=3.9" -files = [ - {file = "partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f"}, - {file = "partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c"}, -] - -[package.dependencies] -locket = "*" -toolz = "*" - -[package.extras] -complete = ["blosc", "numpy (>=1.20.0)", "pandas (>=1.3)", "pyzmq"] - [[package]] name = "pastel" version = "0.2.1" @@ -3537,17 +2705,6 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "ply" -version = "3.11" -description = "Python Lex & Yacc" -optional = false -python-versions = "*" -files = [ - {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, - {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, -] - [[package]] name = "poethepoet" version = "0.25.1" @@ -3605,41 +2762,24 @@ files = [ [package.dependencies] wcwidth = "*" -[[package]] -name = "proto-plus" -version = "1.24.0" -description = "Beautiful, Pythonic protocol buffers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, -] - -[package.dependencies] -protobuf = ">=3.19.0,<6.0.0dev" - -[package.extras] -testing = ["google-api-core (>=1.31.5)"] - [[package]] name = "protobuf" -version = "4.25.4" +version = "5.27.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, + {file = "protobuf-5.27.3-cp310-abi3-win32.whl", hash = "sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b"}, + {file = "protobuf-5.27.3-cp310-abi3-win_amd64.whl", hash = "sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7"}, + {file = "protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce"}, + {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25"}, + {file = "protobuf-5.27.3-cp38-cp38-win32.whl", hash = "sha256:043853dcb55cc262bf2e116215ad43fa0859caab79bb0b2d31b708f128ece035"}, + {file = "protobuf-5.27.3-cp38-cp38-win_amd64.whl", hash = "sha256:c2a105c24f08b1e53d6c7ffe69cb09d0031512f0b72f812dd4005b8112dbe91e"}, + {file = "protobuf-5.27.3-cp39-cp39-win32.whl", hash = "sha256:c84eee2c71ed83704f1afbf1a85c3171eab0fd1ade3b399b3fad0884cbcca8bf"}, + {file = "protobuf-5.27.3-cp39-cp39-win_amd64.whl", hash = "sha256:af7c0b7cfbbb649ad26132e53faa348580f844d9ca46fd3ec7ca48a1ea5db8a1"}, + {file = "protobuf-5.27.3-py3-none-any.whl", hash = "sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5"}, + {file = "protobuf-5.27.3.tar.gz", hash = "sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c"}, ] [[package]] @@ -3696,31 +2836,6 @@ files = [ [package.extras] tests = ["pytest"] -[[package]] -name = "pyasn1" -version = "0.6.0" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.4.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - [[package]] name = "pycnite" version = "2024.7.31" @@ -3961,6 +3076,41 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "python-box" +version = "7.2.0" +description = "Advanced Python dictionaries with dot notation access" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_box-7.2.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:6bdeec791e25258351388b3029a3ec5da302bb9ed3be175493c43cdc6c47f5e3"}, + {file = "python_box-7.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c449f7b3756a71479fa9c61a86e344ac00ed782a66d7662590f0afa294249d18"}, + {file = "python_box-7.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:6b0d61f182d394106d963232854e495b51edc178faa5316a797be1178212d7e0"}, + {file = "python_box-7.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e2d752de8c1204255bf7b0c814c59ef48293c187a7e9fdcd2fefa28024b72032"}, + {file = "python_box-7.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a6c35ea356a386077935958a5debcd5b229b9a1b3b26287a52dfe1a7e65d99"}, + {file = "python_box-7.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:32ed58ec4d9e5475efe69f9c7d773dfea90a6a01979e776da93fd2b0a5d04429"}, + {file = "python_box-7.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a2d664c6a27f7515469b6f1e461935a2038ee130b7d194b4b4db4e85d363618"}, + {file = "python_box-7.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5a7365db1aaf600d3e8a2747fcf6833beb5d45439a54318548f02e302e3ec"}, + {file = "python_box-7.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:739f827056ea148cbea3122d4617c994e829b420b1331183d968b175304e3a4f"}, + {file = "python_box-7.2.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:2617ef3c3d199f55f63c908f540a4dc14ced9b18533a879e6171c94a6a436f23"}, + {file = "python_box-7.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd866bed03087b1d8340014da8c3aaae19135767580641df1b4ae6fff6ac0aa"}, + {file = "python_box-7.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:9681f059e7e92bdf20782cd9ea6e533d4711fc7b8c57a462922a025d46add4d0"}, + {file = "python_box-7.2.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:6b59b1e2741c9ceecdf5a5bd9b90502c24650e609cd824d434fed3b6f302b7bb"}, + {file = "python_box-7.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23fae825d809ae7520fdeac88bb52be55a3b63992120a00e381783669edf589"}, + {file = "python_box-7.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:573b1abdcb7bd745fa404444f060ee62fc35a74f067181e55dcb43cfe92f2827"}, + {file = "python_box-7.2.0-py3-none-any.whl", hash = "sha256:a3c90832dd772cb0197fdb5bc06123b6e1b846899a1b53d9c39450d27a584829"}, + {file = "python_box-7.2.0.tar.gz", hash = "sha256:551af20bdab3a60a2a21e3435120453c4ca32f7393787c3a5036e1d9fc6a0ede"}, +] + +[package.extras] +all = ["msgpack", "ruamel.yaml (>=0.17)", "toml"] +msgpack = ["msgpack"] +pyyaml = ["PyYAML"] +ruamel-yaml = ["ruamel.yaml (>=0.17)"] +toml = ["toml"] +tomli = ["tomli", "tomli-w"] +yaml = ["ruamel.yaml (>=0.17)"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -4088,158 +3238,182 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "pyzmq" -version = "26.0.3" +version = "26.2.0" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" files = [ - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, - {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, - {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, - {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, - {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, - {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, - {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, - {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, + {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, + {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, + {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, + {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, + {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, + {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, + {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, + {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, + {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, ] [package.dependencies] @@ -4414,279 +3588,143 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.19.1" +version = "0.20.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:aaf71f95b21f9dc708123335df22e5a2fef6307e3e6f9ed773b2e0938cc4d491"}, - {file = "rpds_py-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca0dda0c5715efe2ab35bb83f813f681ebcd2840d8b1b92bfc6fe3ab382fae4a"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81db2e7282cc0487f500d4db203edc57da81acde9e35f061d69ed983228ffe3b"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1a8dfa125b60ec00c7c9baef945bb04abf8ac772d8ebefd79dae2a5f316d7850"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271accf41b02687cef26367c775ab220372ee0f4925591c6796e7c148c50cab5"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9bc4161bd3b970cd6a6fcda70583ad4afd10f2750609fb1f3ca9505050d4ef3"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0cf2a0dbb5987da4bd92a7ca727eadb225581dd9681365beba9accbe5308f7d"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b5e28e56143750808c1c79c70a16519e9bc0a68b623197b96292b21b62d6055c"}, - {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c7af6f7b80f687b33a4cdb0a785a5d4de1fb027a44c9a049d8eb67d5bfe8a687"}, - {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e429fc517a1c5e2a70d576077231538a98d59a45dfc552d1ac45a132844e6dfb"}, - {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d2dbd8f4990d4788cb122f63bf000357533f34860d269c1a8e90ae362090ff3a"}, - {file = "rpds_py-0.19.1-cp310-none-win32.whl", hash = "sha256:e0f9d268b19e8f61bf42a1da48276bcd05f7ab5560311f541d22557f8227b866"}, - {file = "rpds_py-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:df7c841813f6265e636fe548a49664c77af31ddfa0085515326342a751a6ba51"}, - {file = "rpds_py-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:902cf4739458852fe917104365ec0efbea7d29a15e4276c96a8d33e6ed8ec137"}, - {file = "rpds_py-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f3d73022990ab0c8b172cce57c69fd9a89c24fd473a5e79cbce92df87e3d9c48"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3837c63dd6918a24de6c526277910e3766d8c2b1627c500b155f3eecad8fad65"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cdb7eb3cf3deb3dd9e7b8749323b5d970052711f9e1e9f36364163627f96da58"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26ab43b6d65d25b1a333c8d1b1c2f8399385ff683a35ab5e274ba7b8bb7dc61c"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75130df05aae7a7ac171b3b5b24714cffeabd054ad2ebc18870b3aa4526eba23"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c34f751bf67cab69638564eee34023909380ba3e0d8ee7f6fe473079bf93f09b"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2671cb47e50a97f419a02cd1e0c339b31de017b033186358db92f4d8e2e17d8"}, - {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c73254c256081704dba0a333457e2fb815364018788f9b501efe7c5e0ada401"}, - {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4383beb4a29935b8fa28aca8fa84c956bf545cb0c46307b091b8d312a9150e6a"}, - {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dbceedcf4a9329cc665452db1aaf0845b85c666e4885b92ee0cddb1dbf7e052a"}, - {file = "rpds_py-0.19.1-cp311-none-win32.whl", hash = "sha256:f0a6d4a93d2a05daec7cb885157c97bbb0be4da739d6f9dfb02e101eb40921cd"}, - {file = "rpds_py-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:c149a652aeac4902ecff2dd93c3b2681c608bd5208c793c4a99404b3e1afc87c"}, - {file = "rpds_py-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:56313be667a837ff1ea3508cebb1ef6681d418fa2913a0635386cf29cff35165"}, - {file = "rpds_py-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d1d7539043b2b31307f2c6c72957a97c839a88b2629a348ebabe5aa8b626d6b"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1dc59a5e7bc7f44bd0c048681f5e05356e479c50be4f2c1a7089103f1621d5"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8f78398e67a7227aefa95f876481485403eb974b29e9dc38b307bb6eb2315ea"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef07a0a1d254eeb16455d839cef6e8c2ed127f47f014bbda64a58b5482b6c836"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8124101e92c56827bebef084ff106e8ea11c743256149a95b9fd860d3a4f331f"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08ce9c95a0b093b7aec75676b356a27879901488abc27e9d029273d280438505"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b02dd77a2de6e49078c8937aadabe933ceac04b41c5dde5eca13a69f3cf144e"}, - {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4dd02e29c8cbed21a1875330b07246b71121a1c08e29f0ee3db5b4cfe16980c4"}, - {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9c7042488165f7251dc7894cd533a875d2875af6d3b0e09eda9c4b334627ad1c"}, - {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f809a17cc78bd331e137caa25262b507225854073fd319e987bd216bed911b7c"}, - {file = "rpds_py-0.19.1-cp312-none-win32.whl", hash = "sha256:3ddab996807c6b4227967fe1587febade4e48ac47bb0e2d3e7858bc621b1cace"}, - {file = "rpds_py-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:32e0db3d6e4f45601b58e4ac75c6f24afbf99818c647cc2066f3e4b192dabb1f"}, - {file = "rpds_py-0.19.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:747251e428406b05fc86fee3904ee19550c4d2d19258cef274e2151f31ae9d38"}, - {file = "rpds_py-0.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dc733d35f861f8d78abfaf54035461e10423422999b360966bf1c443cbc42705"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbda75f245caecff8faa7e32ee94dfaa8312a3367397975527f29654cd17a6ed"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd04d8cab16cab5b0a9ffc7d10f0779cf1120ab16c3925404428f74a0a43205a"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2d66eb41ffca6cc3c91d8387509d27ba73ad28371ef90255c50cb51f8953301"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdf4890cda3b59170009d012fca3294c00140e7f2abe1910e6a730809d0f3f9b"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1fa67ef839bad3815124f5f57e48cd50ff392f4911a9f3cf449d66fa3df62a5"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b82c9514c6d74b89a370c4060bdb80d2299bc6857e462e4a215b4ef7aa7b090e"}, - {file = "rpds_py-0.19.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c7b07959866a6afb019abb9564d8a55046feb7a84506c74a6f197cbcdf8a208e"}, - {file = "rpds_py-0.19.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4f580ae79d0b861dfd912494ab9d477bea535bfb4756a2269130b6607a21802e"}, - {file = "rpds_py-0.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c6d20c8896c00775e6f62d8373aba32956aa0b850d02b5ec493f486c88e12859"}, - {file = "rpds_py-0.19.1-cp313-none-win32.whl", hash = "sha256:afedc35fe4b9e30ab240b208bb9dc8938cb4afe9187589e8d8d085e1aacb8309"}, - {file = "rpds_py-0.19.1-cp313-none-win_amd64.whl", hash = "sha256:1d4af2eb520d759f48f1073ad3caef997d1bfd910dc34e41261a595d3f038a94"}, - {file = "rpds_py-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:34bca66e2e3eabc8a19e9afe0d3e77789733c702c7c43cd008e953d5d1463fde"}, - {file = "rpds_py-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:24f8ae92c7fae7c28d0fae9b52829235df83f34847aa8160a47eb229d9666c7b"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71157f9db7f6bc6599a852852f3389343bea34315b4e6f109e5cbc97c1fb2963"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d494887d40dc4dd0d5a71e9d07324e5c09c4383d93942d391727e7a40ff810b"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3661e6d4ba63a094138032c1356d557de5b3ea6fd3cca62a195f623e381c76"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97fbb77eaeb97591efdc654b8b5f3ccc066406ccfb3175b41382f221ecc216e8"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cc4bc73e53af8e7a42c8fd7923bbe35babacfa7394ae9240b3430b5dcf16b2a"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:35af5e4d5448fa179fd7fff0bba0fba51f876cd55212f96c8bbcecc5c684ae5c"}, - {file = "rpds_py-0.19.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3511f6baf8438326e351097cecd137eb45c5f019944fe0fd0ae2fea2fd26be39"}, - {file = "rpds_py-0.19.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:57863d16187995c10fe9cf911b897ed443ac68189179541734502353af33e693"}, - {file = "rpds_py-0.19.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9e318e6786b1e750a62f90c6f7fa8b542102bdcf97c7c4de2a48b50b61bd36ec"}, - {file = "rpds_py-0.19.1-cp38-none-win32.whl", hash = "sha256:53dbc35808c6faa2ce3e48571f8f74ef70802218554884787b86a30947842a14"}, - {file = "rpds_py-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:8df1c283e57c9cb4d271fdc1875f4a58a143a2d1698eb0d6b7c0d7d5f49c53a1"}, - {file = "rpds_py-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e76c902d229a3aa9d5ceb813e1cbcc69bf5bda44c80d574ff1ac1fa3136dea71"}, - {file = "rpds_py-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de1f7cd5b6b351e1afd7568bdab94934d656abe273d66cda0ceea43bbc02a0c2"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fc5a84777cb61692d17988989690d6f34f7f95968ac81398d67c0d0994a897"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:74129d5ffc4cde992d89d345f7f7d6758320e5d44a369d74d83493429dad2de5"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e360188b72f8080fefa3adfdcf3618604cc8173651c9754f189fece068d2a45"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13e6d4840897d4e4e6b2aa1443e3a8eca92b0402182aafc5f4ca1f5e24f9270a"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f09529d2332264a902688031a83c19de8fda5eb5881e44233286b9c9ec91856d"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d4b52811dcbc1aba08fd88d475f75b4f6db0984ba12275d9bed1a04b2cae9b5"}, - {file = "rpds_py-0.19.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd635c2c4043222d80d80ca1ac4530a633102a9f2ad12252183bcf338c1b9474"}, - {file = "rpds_py-0.19.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f35b34a5184d5e0cc360b61664c1c06e866aab077b5a7c538a3e20c8fcdbf90b"}, - {file = "rpds_py-0.19.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d4ec0046facab83012d821b33cead742a35b54575c4edfb7ed7445f63441835f"}, - {file = "rpds_py-0.19.1-cp39-none-win32.whl", hash = "sha256:f5b8353ea1a4d7dfb59a7f45c04df66ecfd363bb5b35f33b11ea579111d4655f"}, - {file = "rpds_py-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:1fb93d3486f793d54a094e2bfd9cd97031f63fcb5bc18faeb3dd4b49a1c06523"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d5c7e32f3ee42f77d8ff1a10384b5cdcc2d37035e2e3320ded909aa192d32c3"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:89cc8921a4a5028d6dd388c399fcd2eef232e7040345af3d5b16c04b91cf3c7e"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca34e913d27401bda2a6f390d0614049f5a95b3b11cd8eff80fe4ec340a1208"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5953391af1405f968eb5701ebbb577ebc5ced8d0041406f9052638bafe52209d"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:840e18c38098221ea6201f091fc5d4de6128961d2930fbbc96806fb43f69aec1"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d8b735c4d162dc7d86a9cf3d717f14b6c73637a1f9cd57fe7e61002d9cb1972"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce757c7c90d35719b38fa3d4ca55654a76a40716ee299b0865f2de21c146801c"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9421b23c85f361a133aa7c5e8ec757668f70343f4ed8fdb5a4a14abd5437244"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3b823be829407393d84ee56dc849dbe3b31b6a326f388e171555b262e8456cc1"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:5e58b61dcbb483a442c6239c3836696b79f2cd8e7eec11e12155d3f6f2d886d1"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39d67896f7235b2c886fb1ee77b1491b77049dcef6fbf0f401e7b4cbed86bbd4"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8b32cd4ab6db50c875001ba4f5a6b30c0f42151aa1fbf9c2e7e3674893fb1dc4"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c32e41de995f39b6b315d66c27dea3ef7f7c937c06caab4c6a79a5e09e2c415"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a129c02b42d46758c87faeea21a9f574e1c858b9f358b6dd0bbd71d17713175"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:346557f5b1d8fd9966059b7a748fd79ac59f5752cd0e9498d6a40e3ac1c1875f"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31e450840f2f27699d014cfc8865cc747184286b26d945bcea6042bb6aa4d26e"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01227f8b3e6c8961490d869aa65c99653df80d2f0a7fde8c64ebddab2b9b02fd"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69084fd29bfeff14816666c93a466e85414fe6b7d236cfc108a9c11afa6f7301"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d2b88efe65544a7d5121b0c3b003ebba92bfede2ea3577ce548b69c5235185"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ea961a674172ed2235d990d7edf85d15d8dfa23ab8575e48306371c070cda67"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:5beffdbe766cfe4fb04f30644d822a1080b5359df7db3a63d30fa928375b2720"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:720f3108fb1bfa32e51db58b832898372eb5891e8472a8093008010911e324c5"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c2087dbb76a87ec2c619253e021e4fb20d1a72580feeaa6892b0b3d955175a71"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ddd50f18ebc05ec29a0d9271e9dbe93997536da3546677f8ca00b76d477680c"}, - {file = "rpds_py-0.19.1.tar.gz", hash = "sha256:31dd5794837f00b46f4096aa8ccaa5972f73a938982e32ed817bb520c465e520"}, -] - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, + {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, + {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, + {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, + {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, + {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, + {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, + {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, + {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, + {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, + {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, + {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, + {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, + {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] -[package.dependencies] -pyasn1 = ">=0.1.3" - [[package]] name = "ruff" -version = "0.5.6" +version = "0.5.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.6-py3-none-linux_armv6l.whl", hash = "sha256:a0ef5930799a05522985b9cec8290b185952f3fcd86c1772c3bdbd732667fdcd"}, - {file = "ruff-0.5.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b652dc14f6ef5d1552821e006f747802cc32d98d5509349e168f6bf0ee9f8f42"}, - {file = "ruff-0.5.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:80521b88d26a45e871f31e4b88938fd87db7011bb961d8afd2664982dfc3641a"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9bc8f328a9f1309ae80e4d392836e7dbc77303b38ed4a7112699e63d3b066ab"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d394940f61f7720ad371ddedf14722ee1d6250fd8d020f5ea5a86e7be217daf"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111a99cdb02f69ddb2571e2756e017a1496c2c3a2aeefe7b988ddab38b416d36"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e395daba77a79f6dc0d07311f94cc0560375ca20c06f354c7c99af3bf4560c5d"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c476acb43c3c51e3c614a2e878ee1589655fa02dab19fe2db0423a06d6a5b1b6"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2ff8003f5252fd68425fd53d27c1f08b201d7ed714bb31a55c9ac1d4c13e2eb"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c94e084ba3eaa80c2172918c2ca2eb2230c3f15925f4ed8b6297260c6ef179ad"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1f77c1c3aa0669fb230b06fb24ffa3e879391a3ba3f15e3d633a752da5a3e670"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f908148c93c02873210a52cad75a6eda856b2cbb72250370ce3afef6fb99b1ed"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:563a7ae61ad284187d3071d9041c08019975693ff655438d8d4be26e492760bd"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:94fe60869bfbf0521e04fd62b74cbca21cbc5beb67cbb75ab33fe8c174f54414"}, - {file = "ruff-0.5.6-py3-none-win32.whl", hash = "sha256:e6a584c1de6f8591c2570e171cc7ce482bb983d49c70ddf014393cd39e9dfaed"}, - {file = "ruff-0.5.6-py3-none-win_amd64.whl", hash = "sha256:d7fe7dccb1a89dc66785d7aa0ac283b2269712d8ed19c63af908fdccca5ccc1a"}, - {file = "ruff-0.5.6-py3-none-win_arm64.whl", hash = "sha256:57c6c0dd997b31b536bff49b9eee5ed3194d60605a4427f735eeb1f9c1b8d264"}, - {file = "ruff-0.5.6.tar.gz", hash = "sha256:07c9e3c2a8e1fe377dd460371c3462671a728c981c3205a5217291422209f642"}, -] - -[[package]] -name = "safetensors" -version = "0.4.3" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "safetensors-0.4.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dcf5705cab159ce0130cd56057f5f3425023c407e170bca60b4868048bae64fd"}, - {file = "safetensors-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb4f8c5d0358a31e9a08daeebb68f5e161cdd4018855426d3f0c23bb51087055"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a5319ef409e7f88686a46607cbc3c428271069d8b770076feaf913664a07ac"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb9c65bd82f9ef3ce4970dc19ee86be5f6f93d032159acf35e663c6bea02b237"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edb5698a7bc282089f64c96c477846950358a46ede85a1c040e0230344fdde10"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efcc860be094b8d19ac61b452ec635c7acb9afa77beb218b1d7784c6d41fe8ad"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d88b33980222085dd6001ae2cad87c6068e0991d4f5ccf44975d216db3b57376"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5fc6775529fb9f0ce2266edd3e5d3f10aab068e49f765e11f6f2a63b5367021d"}, - {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9c6ad011c1b4e3acff058d6b090f1da8e55a332fbf84695cf3100c649cc452d1"}, - {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c496c5401c1b9c46d41a7688e8ff5b0310a3b9bae31ce0f0ae870e1ea2b8caf"}, - {file = "safetensors-0.4.3-cp310-none-win32.whl", hash = "sha256:38e2a8666178224a51cca61d3cb4c88704f696eac8f72a49a598a93bbd8a4af9"}, - {file = "safetensors-0.4.3-cp310-none-win_amd64.whl", hash = "sha256:393e6e391467d1b2b829c77e47d726f3b9b93630e6a045b1d1fca67dc78bf632"}, - {file = "safetensors-0.4.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:22f3b5d65e440cec0de8edaa672efa888030802e11c09b3d6203bff60ebff05a"}, - {file = "safetensors-0.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c4fa560ebd4522adddb71dcd25d09bf211b5634003f015a4b815b7647d62ebe"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9afd5358719f1b2cf425fad638fc3c887997d6782da317096877e5b15b2ce93"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8c5093206ef4b198600ae484230402af6713dab1bd5b8e231905d754022bec7"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0b2104df1579d6ba9052c0ae0e3137c9698b2d85b0645507e6fd1813b70931a"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cf18888606dad030455d18f6c381720e57fc6a4170ee1966adb7ebc98d4d6a3"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bf4f9d6323d9f86eef5567eabd88f070691cf031d4c0df27a40d3b4aaee755b"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:585c9ae13a205807b63bef8a37994f30c917ff800ab8a1ca9c9b5d73024f97ee"}, - {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faefeb3b81bdfb4e5a55b9bbdf3d8d8753f65506e1d67d03f5c851a6c87150e9"}, - {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:befdf0167ad626f22f6aac6163477fcefa342224a22f11fdd05abb3995c1783c"}, - {file = "safetensors-0.4.3-cp311-none-win32.whl", hash = "sha256:a7cef55929dcbef24af3eb40bedec35d82c3c2fa46338bb13ecf3c5720af8a61"}, - {file = "safetensors-0.4.3-cp311-none-win_amd64.whl", hash = "sha256:840b7ac0eff5633e1d053cc9db12fdf56b566e9403b4950b2dc85393d9b88d67"}, - {file = "safetensors-0.4.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:22d21760dc6ebae42e9c058d75aa9907d9f35e38f896e3c69ba0e7b213033856"}, - {file = "safetensors-0.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d22c1a10dff3f64d0d68abb8298a3fd88ccff79f408a3e15b3e7f637ef5c980"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1648568667f820b8c48317c7006221dc40aced1869908c187f493838a1362bc"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:446e9fe52c051aeab12aac63d1017e0f68a02a92a027b901c4f8e931b24e5397"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fef5d70683643618244a4f5221053567ca3e77c2531e42ad48ae05fae909f542"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a1f4430cc0c9d6afa01214a4b3919d0a029637df8e09675ceef1ca3f0dfa0df"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d603846a8585b9432a0fd415db1d4c57c0f860eb4aea21f92559ff9902bae4d"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a844cdb5d7cbc22f5f16c7e2a0271170750763c4db08381b7f696dbd2c78a361"}, - {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:88887f69f7a00cf02b954cdc3034ffb383b2303bc0ab481d4716e2da51ddc10e"}, - {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ee463219d9ec6c2be1d331ab13a8e0cd50d2f32240a81d498266d77d07b7e71e"}, - {file = "safetensors-0.4.3-cp312-none-win32.whl", hash = "sha256:d0dd4a1db09db2dba0f94d15addc7e7cd3a7b0d393aa4c7518c39ae7374623c3"}, - {file = "safetensors-0.4.3-cp312-none-win_amd64.whl", hash = "sha256:d14d30c25897b2bf19b6fb5ff7e26cc40006ad53fd4a88244fdf26517d852dd7"}, - {file = "safetensors-0.4.3-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d1456f814655b224d4bf6e7915c51ce74e389b413be791203092b7ff78c936dd"}, - {file = "safetensors-0.4.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:455d538aa1aae4a8b279344a08136d3f16334247907b18a5c3c7fa88ef0d3c46"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf476bca34e1340ee3294ef13e2c625833f83d096cfdf69a5342475602004f95"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02ef3a24face643456020536591fbd3c717c5abaa2737ec428ccbbc86dffa7a4"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7de32d0d34b6623bb56ca278f90db081f85fb9c5d327e3c18fd23ac64f465768"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a0deb16a1d3ea90c244ceb42d2c6c276059616be21a19ac7101aa97da448faf"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c59d51f182c729f47e841510b70b967b0752039f79f1de23bcdd86462a9b09ee"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f598b713cc1a4eb31d3b3203557ac308acf21c8f41104cdd74bf640c6e538e3"}, - {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5757e4688f20df083e233b47de43845d1adb7e17b6cf7da5f8444416fc53828d"}, - {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fe746d03ed8d193674a26105e4f0fe6c726f5bb602ffc695b409eaf02f04763d"}, - {file = "safetensors-0.4.3-cp37-none-win32.whl", hash = "sha256:0d5ffc6a80f715c30af253e0e288ad1cd97a3d0086c9c87995e5093ebc075e50"}, - {file = "safetensors-0.4.3-cp37-none-win_amd64.whl", hash = "sha256:a11c374eb63a9c16c5ed146457241182f310902bd2a9c18255781bb832b6748b"}, - {file = "safetensors-0.4.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1e31be7945f66be23f4ec1682bb47faa3df34cb89fc68527de6554d3c4258a4"}, - {file = "safetensors-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:03a4447c784917c9bf01d8f2ac5080bc15c41692202cd5f406afba16629e84d6"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d244bcafeb1bc06d47cfee71727e775bca88a8efda77a13e7306aae3813fa7e4"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53c4879b9c6bd7cd25d114ee0ef95420e2812e676314300624594940a8d6a91f"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74707624b81f1b7f2b93f5619d4a9f00934d5948005a03f2c1845ffbfff42212"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d52c958dc210265157573f81d34adf54e255bc2b59ded6218500c9b15a750eb"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f9568f380f513a60139971169c4a358b8731509cc19112369902eddb33faa4d"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d9cd8e1560dfc514b6d7859247dc6a86ad2f83151a62c577428d5102d872721"}, - {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:89f9f17b0dacb913ed87d57afbc8aad85ea42c1085bd5de2f20d83d13e9fc4b2"}, - {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1139eb436fd201c133d03c81209d39ac57e129f5e74e34bb9ab60f8d9b726270"}, - {file = "safetensors-0.4.3-cp38-none-win32.whl", hash = "sha256:d9c289f140a9ae4853fc2236a2ffc9a9f2d5eae0cb673167e0f1b8c18c0961ac"}, - {file = "safetensors-0.4.3-cp38-none-win_amd64.whl", hash = "sha256:622afd28968ef3e9786562d352659a37de4481a4070f4ebac883f98c5836563e"}, - {file = "safetensors-0.4.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8651c7299cbd8b4161a36cd6a322fa07d39cd23535b144d02f1c1972d0c62f3c"}, - {file = "safetensors-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e375d975159ac534c7161269de24ddcd490df2157b55c1a6eeace6cbb56903f0"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084fc436e317f83f7071fc6a62ca1c513b2103db325cd09952914b50f51cf78f"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41a727a7f5e6ad9f1db6951adee21bbdadc632363d79dc434876369a17de6ad6"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7dbbde64b6c534548696808a0e01276d28ea5773bc9a2dfb97a88cd3dffe3df"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbae3b4b9d997971431c346edbfe6e41e98424a097860ee872721e176040a893"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01e4b22e3284cd866edeabe4f4d896229495da457229408d2e1e4810c5187121"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dd37306546b58d3043eb044c8103a02792cc024b51d1dd16bd3dd1f334cb3ed"}, - {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8815b5e1dac85fc534a97fd339e12404db557878c090f90442247e87c8aeaea"}, - {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e011cc162503c19f4b1fd63dfcddf73739c7a243a17dac09b78e57a00983ab35"}, - {file = "safetensors-0.4.3-cp39-none-win32.whl", hash = "sha256:01feb3089e5932d7e662eda77c3ecc389f97c0883c4a12b5cfdc32b589a811c3"}, - {file = "safetensors-0.4.3-cp39-none-win_amd64.whl", hash = "sha256:3f9cdca09052f585e62328c1c2923c70f46814715c795be65f0b93f57ec98a02"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1b89381517891a7bb7d1405d828b2bf5d75528299f8231e9346b8eba092227f9"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cd6fff9e56df398abc5866b19a32124815b656613c1c5ec0f9350906fd798aac"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:840caf38d86aa7014fe37ade5d0d84e23dcfbc798b8078015831996ecbc206a3"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9650713b2cfa9537a2baf7dd9fee458b24a0aaaa6cafcea8bdd5fb2b8efdc34"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4119532cd10dba04b423e0f86aecb96cfa5a602238c0aa012f70c3a40c44b50"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e066e8861eef6387b7c772344d1fe1f9a72800e04ee9a54239d460c400c72aab"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:90964917f5b0fa0fa07e9a051fbef100250c04d150b7026ccbf87a34a54012e0"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c41e1893d1206aa7054029681778d9a58b3529d4c807002c156d58426c225173"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae7613a119a71a497d012ccc83775c308b9c1dab454806291427f84397d852fd"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9bac020faba7f5dc481e881b14b6425265feabb5bfc552551d21189c0eddc3"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:420a98f593ff9930f5822560d14c395ccbc57342ddff3b463bc0b3d6b1951550"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f5e6883af9a68c0028f70a4c19d5a6ab6238a379be36ad300a22318316c00cb0"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:cdd0a3b5da66e7f377474599814dbf5cbf135ff059cc73694de129b58a5e8a2c"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9bfb92f82574d9e58401d79c70c716985dc049b635fef6eecbb024c79b2c46ad"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3615a96dd2dcc30eb66d82bc76cda2565f4f7bfa89fcb0e31ba3cea8a1a9ecbb"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868ad1b6fc41209ab6bd12f63923e8baeb1a086814cb2e81a65ed3d497e0cf8f"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffba80aa49bd09195145a7fd233a7781173b422eeb995096f2b30591639517"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0acbe31340ab150423347e5b9cc595867d814244ac14218932a5cf1dd38eb39"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19bbdf95de2cf64f25cd614c5236c8b06eb2cfa47cbf64311f4b5d80224623a3"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b852e47eb08475c2c1bd8131207b405793bfc20d6f45aff893d3baaad449ed14"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d07cbca5b99babb692d76d8151bec46f461f8ad8daafbfd96b2fca40cadae65"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1ab6527a20586d94291c96e00a668fa03f86189b8a9defa2cdd34a1a01acc7d5"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02318f01e332cc23ffb4f6716e05a492c5f18b1d13e343c49265149396284a44"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec4b52ce9a396260eb9731eb6aea41a7320de22ed73a1042c2230af0212758ce"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:018b691383026a2436a22b648873ed11444a364324e7088b99cd2503dd828400"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:309b10dbcab63269ecbf0e2ca10ce59223bb756ca5d431ce9c9eeabd446569da"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b277482120df46e27a58082df06a15aebda4481e30a1c21eefd0921ae7e03f65"}, - {file = "safetensors-0.4.3.tar.gz", hash = "sha256:2f85fc50c4e07a21e95c24e07460fe6f7e2859d0ce88092838352b798ce711c2"}, + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, ] -[package.extras] -all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] -dev = ["safetensors[all]"] -jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] -mlx = ["mlx (>=0.0.9)"] -numpy = ["numpy (>=1.21.6)"] -paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] -pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] -quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] -tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] -testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] -torch = ["safetensors[numpy]", "torch (>=1.10)"] - [[package]] name = "send2trash" version = "1.8.3" @@ -4705,22 +3743,23 @@ win32 = ["pywin32"] [[package]] name = "sentry-sdk" -version = "1.45.1" +version = "2.13.0" description = "Python client for Sentry (https://sentry.io)" optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "sentry_sdk-1.45.1-py2.py3-none-any.whl", hash = "sha256:608887855ccfe39032bfd03936e3a1c4f4fc99b3a4ac49ced54a4220de61c9c1"}, - {file = "sentry_sdk-1.45.1.tar.gz", hash = "sha256:a16c997c0f4e3df63c0fc5e4207ccb1ab37900433e0f72fef88315d317829a26"}, + {file = "sentry_sdk-2.13.0-py2.py3-none-any.whl", hash = "sha256:6beede8fc2ab4043da7f69d95534e320944690680dd9a963178a49de71d726c6"}, + {file = "sentry_sdk-2.13.0.tar.gz", hash = "sha256:8d4a576f7a98eb2fdb40e13106e41f330e5c79d72a68be1316e7852cf4995260"}, ] [package.dependencies] certifi = "*" fastapi = {version = ">=0.79.0", optional = true, markers = "extra == \"fastapi\""} -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} +urllib3 = ">=1.26.11" [package.extras] aiohttp = ["aiohttp (>=3.5)"] +anthropic = ["anthropic (>=0.16)"] arq = ["arq (>=0.23)"] asyncpg = ["asyncpg (>=0.23)"] beam = ["apache-beam (>=2.12)"] @@ -4733,13 +3772,16 @@ django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] fastapi = ["fastapi (>=0.79.0)"] flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)"] +grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] +huggingface-hub = ["huggingface-hub (>=0.22)"] +langchain = ["langchain (>=0.0.210)"] +litestar = ["litestar (>=2.0.0)"] loguru = ["loguru (>=0.5)"] openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] +opentelemetry-experimental = ["opentelemetry-distro"] pure-eval = ["asttokens", "executing", "pure-eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] @@ -4749,75 +3791,23 @@ sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] starlette = ["starlette (>=0.19.1)"] starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=5)"] +tornado = ["tornado (>=6)"] [[package]] name = "setuptools" -version = "72.1.0" +version = "73.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, - {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, -] - -[package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "shapely" -version = "2.0.5" -description = "Manipulation and analysis of geometric objects" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shapely-2.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89d34787c44f77a7d37d55ae821f3a784fa33592b9d217a45053a93ade899375"}, - {file = "shapely-2.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:798090b426142df2c5258779c1d8d5734ec6942f778dab6c6c30cfe7f3bf64ff"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45211276900c4790d6bfc6105cbf1030742da67594ea4161a9ce6812a6721e68"}, - {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e119444bc27ca33e786772b81760f2028d930ac55dafe9bc50ef538b794a8e1"}, - {file = "shapely-2.0.5-cp310-cp310-win32.whl", hash = "sha256:9a4492a2b2ccbeaebf181e7310d2dfff4fdd505aef59d6cb0f217607cb042fb3"}, - {file = "shapely-2.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:1e5cb5ee72f1bc7ace737c9ecd30dc174a5295fae412972d3879bac2e82c8fae"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bbfb048a74cf273db9091ff3155d373020852805a37dfc846ab71dde4be93ec"}, - {file = "shapely-2.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93be600cbe2fbaa86c8eb70656369f2f7104cd231f0d6585c7d0aa555d6878b8"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8e71bb9a46814019f6644c4e2560a09d44b80100e46e371578f35eaaa9da1c"}, - {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5251c28a29012e92de01d2e84f11637eb1d48184ee8f22e2df6c8c578d26760"}, - {file = "shapely-2.0.5-cp311-cp311-win32.whl", hash = "sha256:35110e80070d664781ec7955c7de557456b25727a0257b354830abb759bf8311"}, - {file = "shapely-2.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c6b78c0007a34ce7144f98b7418800e0a6a5d9a762f2244b00ea560525290c9"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:03bd7b5fa5deb44795cc0a503999d10ae9d8a22df54ae8d4a4cd2e8a93466195"}, - {file = "shapely-2.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ff9521991ed9e201c2e923da014e766c1aa04771bc93e6fe97c27dcf0d40ace"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b65365cfbf657604e50d15161ffcc68de5cdb22a601bbf7823540ab4918a98d"}, - {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21f64e647a025b61b19585d2247137b3a38a35314ea68c66aaf507a1c03ef6fe"}, - {file = "shapely-2.0.5-cp312-cp312-win32.whl", hash = "sha256:3ac7dc1350700c139c956b03d9c3df49a5b34aaf91d024d1510a09717ea39199"}, - {file = "shapely-2.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:30e8737983c9d954cd17feb49eb169f02f1da49e24e5171122cf2c2b62d65c95"}, - {file = "shapely-2.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ff7731fea5face9ec08a861ed351734a79475631b7540ceb0b66fb9732a5f529"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9e520af0c5a578e174bca3c18713cd47a6c6a15b6cf1f50ac17dc8bb8db6a2"}, - {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b299b91557b04acb75e9732645428470825061f871a2edc36b9417d66c1fc5"}, - {file = "shapely-2.0.5-cp37-cp37m-win32.whl", hash = "sha256:b5870633f8e684bf6d1ae4df527ddcb6f3895f7b12bced5c13266ac04f47d231"}, - {file = "shapely-2.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:401cb794c5067598f50518e5a997e270cd7642c4992645479b915c503866abed"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e91ee179af539100eb520281ba5394919067c6b51824e6ab132ad4b3b3e76dd0"}, - {file = "shapely-2.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8af6f7260f809c0862741ad08b1b89cb60c130ae30efab62320bbf4ee9cc71fa"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5456dd522800306ba3faef77c5ba847ec30a0bd73ab087a25e0acdd4db2514f"}, - {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b714a840402cde66fd7b663bb08cacb7211fa4412ea2a209688f671e0d0631fd"}, - {file = "shapely-2.0.5-cp38-cp38-win32.whl", hash = "sha256:7e8cf5c252fac1ea51b3162be2ec3faddedc82c256a1160fc0e8ddbec81b06d2"}, - {file = "shapely-2.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4461509afdb15051e73ab178fae79974387f39c47ab635a7330d7fee02c68a3f"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7545a39c55cad1562be302d74c74586f79e07b592df8ada56b79a209731c0219"}, - {file = "shapely-2.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c83a36f12ec8dee2066946d98d4d841ab6512a6ed7eb742e026a64854019b5f"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89e640c2cd37378480caf2eeda9a51be64201f01f786d127e78eaeff091ec897"}, - {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06efe39beafde3a18a21dde169d32f315c57da962826a6d7d22630025200c5e6"}, - {file = "shapely-2.0.5-cp39-cp39-win32.whl", hash = "sha256:8203a8b2d44dcb366becbc8c3d553670320e4acf0616c39e218c9561dd738d92"}, - {file = "shapely-2.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:7fed9dbfbcfec2682d9a047b9699db8dcc890dfca857ecba872c42185fc9e64e"}, - {file = "shapely-2.0.5.tar.gz", hash = "sha256:bff2366bc786bfa6cb353d6b47d0443c570c32776612e527ee47b6df63fcfe32"}, + {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, + {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, ] -[package.dependencies] -numpy = ">=1.14,<3" - [package.extras] -docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] -test = ["pytest", "pytest-cov"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] [[package]] name = "shellingham" @@ -4863,115 +3853,17 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -optional = false -python-versions = "*" -files = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] - [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.31" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"}, - {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"}, - {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"}, - {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"}, - {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, - {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - [[package]] name = "stack-data" version = "0.6.3" @@ -4993,13 +3885,13 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "starlette" -version = "0.37.2" +version = "0.38.2" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, + {file = "starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff"}, + {file = "starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75"}, ] [package.dependencies] @@ -5022,35 +3914,23 @@ files = [ [package.extras] widechars = ["wcwidth"] -[[package]] -name = "tblib" -version = "3.0.0" -description = "Traceback serialization library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "tblib-3.0.0-py3-none-any.whl", hash = "sha256:80a6c77e59b55e83911e1e607c649836a69c103963c5f28a46cbeef44acf8129"}, - {file = "tblib-3.0.0.tar.gz", hash = "sha256:93622790a0a29e04f0346458face1e144dc4d32f493714c6c3dff82a4adb77e6"}, -] - [[package]] name = "temporalio" -version = "1.6.0" +version = "1.7.0" description = "Temporal.io Python SDK" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "temporalio-1.6.0-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:50207806c5b9d701226ed2aed1fce44c688225ab9a370b014b06e51872b98ea7"}, - {file = "temporalio-1.6.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:499253385dd3ca1827d34a05ae61350d54040e0d6a11502f04cbafa7b35be114"}, - {file = "temporalio-1.6.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8fb097b97f833483cd500af2460a0996f812e8019327d893844a21b1c7cd9868"}, - {file = "temporalio-1.6.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6b25d451170ecdf8443f1ed09f75ea708e8679c26636e7aa326bc89bd6bd0c84"}, - {file = "temporalio-1.6.0-cp38-abi3-win_amd64.whl", hash = "sha256:b5ae0bea0665a0bc87d80e7d18870b32eec631694abc0610ee39235e99cc304b"}, - {file = "temporalio-1.6.0.tar.gz", hash = "sha256:a6f24ea91eb1dd1345c68f4ceb21dd2a11a84cda0d6d963d6e570a0c156a80f0"}, + {file = "temporalio-1.7.0-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:92ec0a1af8d4b41245df339a422f1f87367742d9638d2dba7bb7d3ab934e7f5d"}, + {file = "temporalio-1.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:8b4bb77d766a2ac1d85f3e9b682658fee67d77e87f73bd256d46cd79ecf767f6"}, + {file = "temporalio-1.7.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a38dd43061666700500d5808c18ec0b0f569504a2f22b99d7c38dc4dc50b21fd"}, + {file = "temporalio-1.7.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e0087fb6cdb9e9b8aa62c1705526947cb00a91159435d294f3da0d92b501ed56"}, + {file = "temporalio-1.7.0-cp38-abi3-win_amd64.whl", hash = "sha256:eb45b751c6f7946dccba29260922f0e7192b28b8fb9e2aa5afc2aaf5157891d9"}, + {file = "temporalio-1.7.0.tar.gz", hash = "sha256:5057b74df644bd4f5f4eb0e95e730a0a36a16f7ee926d36fcd479c223a7c63cd"}, ] [package.dependencies] protobuf = ">=3.20" -python-dateutil = {version = ">=2.8.2,<3.0.0", markers = "python_version < \"3.11\""} types-protobuf = ">=3.20" typing-extensions = ">=4.2.0,<5.0.0" @@ -5060,13 +3940,13 @@ opentelemetry = ["opentelemetry-api (>=1.11.1,<2.0.0)", "opentelemetry-sdk (>=1. [[package]] name = "tenacity" -version = "8.5.0" +version = "9.0.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" files = [ - {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, - {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, + {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, + {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, ] [package.extras] @@ -5110,47 +3990,47 @@ typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] [[package]] name = "tiktoken" -version = "0.6.0" +version = "0.7.0" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" optional = false python-versions = ">=3.8" files = [ - {file = "tiktoken-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:277de84ccd8fa12730a6b4067456e5cf72fef6300bea61d506c09e45658d41ac"}, - {file = "tiktoken-0.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c44433f658064463650d61387623735641dcc4b6c999ca30bc0f8ba3fccaf5c"}, - {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afb9a2a866ae6eef1995ab656744287a5ac95acc7e0491c33fad54d053288ad3"}, - {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62c05b3109fefca26fedb2820452a050074ad8e5ad9803f4652977778177d9f"}, - {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ef917fad0bccda07bfbad835525bbed5f3ab97a8a3e66526e48cdc3e7beacf7"}, - {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e095131ab6092d0769a2fda85aa260c7c383072daec599ba9d8b149d2a3f4d8b"}, - {file = "tiktoken-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:05b344c61779f815038292a19a0c6eb7098b63c8f865ff205abb9ea1b656030e"}, - {file = "tiktoken-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cefb9870fb55dca9e450e54dbf61f904aab9180ff6fe568b61f4db9564e78871"}, - {file = "tiktoken-0.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:702950d33d8cabc039845674107d2e6dcabbbb0990ef350f640661368df481bb"}, - {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d49d076058f23254f2aff9af603863c5c5f9ab095bc896bceed04f8f0b013a"}, - {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:430bc4e650a2d23a789dc2cdca3b9e5e7eb3cd3935168d97d43518cbb1f9a911"}, - {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:293cb8669757301a3019a12d6770bd55bec38a4d3ee9978ddbe599d68976aca7"}, - {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bd1a288b7903aadc054b0e16ea78e3171f70b670e7372432298c686ebf9dd47"}, - {file = "tiktoken-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac76e000183e3b749634968a45c7169b351e99936ef46f0d2353cd0d46c3118d"}, - {file = "tiktoken-0.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17cc8a4a3245ab7d935c83a2db6bb71619099d7284b884f4b2aea4c74f2f83e3"}, - {file = "tiktoken-0.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:284aebcccffe1bba0d6571651317df6a5b376ff6cfed5aeb800c55df44c78177"}, - {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c1a3a5d33846f8cd9dd3b7897c1d45722f48625a587f8e6f3d3e85080559be8"}, - {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6318b2bb2337f38ee954fd5efa82632c6e5ced1d52a671370fa4b2eff1355e91"}, - {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f5f0f2ed67ba16373f9a6013b68da298096b27cd4e1cf276d2d3868b5c7efd1"}, - {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:75af4c0b16609c2ad02581f3cdcd1fb698c7565091370bf6c0cf8624ffaba6dc"}, - {file = "tiktoken-0.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:45577faf9a9d383b8fd683e313cf6df88b6076c034f0a16da243bb1c139340c3"}, - {file = "tiktoken-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c1492ab90c21ca4d11cef3a236ee31a3e279bb21b3fc5b0e2210588c4209e68"}, - {file = "tiktoken-0.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e2b380c5b7751272015400b26144a2bab4066ebb8daae9c3cd2a92c3b508fe5a"}, - {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f497598b9f58c99cbc0eb764b4a92272c14d5203fc713dd650b896a03a50ad"}, - {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e65e8bd6f3f279d80f1e1fbd5f588f036b9a5fa27690b7f0cc07021f1dfa0839"}, - {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1495450a54e564d236769d25bfefbf77727e232d7a8a378f97acddee08c1ae"}, - {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6c4e4857d99f6fb4670e928250835b21b68c59250520a1941618b5b4194e20c3"}, - {file = "tiktoken-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:168d718f07a39b013032741867e789971346df8e89983fe3c0ef3fbd5a0b1cb9"}, - {file = "tiktoken-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47fdcfe11bd55376785a6aea8ad1db967db7f66ea81aed5c43fad497521819a4"}, - {file = "tiktoken-0.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb7d2ccbf1a7784810aff6b80b4012fb42c6fc37eaa68cb3b553801a5cc2d1fc"}, - {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ccb7a111ee76af5d876a729a347f8747d5ad548e1487eeea90eaf58894b3138"}, - {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2048e1086b48e3c8c6e2ceeac866561374cd57a84622fa49a6b245ffecb7744"}, - {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07f229a5eb250b6403a61200199cecf0aac4aa23c3ecc1c11c1ca002cbb8f159"}, - {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:432aa3be8436177b0db5a2b3e7cc28fd6c693f783b2f8722539ba16a867d0c6a"}, - {file = "tiktoken-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8bfe8a19c8b5c40d121ee7938cd9c6a278e5b97dc035fd61714b4f0399d2f7a1"}, - {file = "tiktoken-0.6.0.tar.gz", hash = "sha256:ace62a4ede83c75b0374a2ddfa4b76903cf483e9cb06247f566be3bf14e6beed"}, + {file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"}, + {file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"}, + {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"}, + {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"}, + {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"}, + {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"}, + {file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"}, + {file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"}, + {file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"}, + {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"}, + {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"}, + {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"}, + {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"}, + {file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"}, + {file = "tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908"}, + {file = "tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410"}, + {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704"}, + {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350"}, + {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4"}, + {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97"}, + {file = "tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f"}, + {file = "tiktoken-0.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2398fecd38c921bcd68418675a6d155fad5f5e14c2e92fcf5fe566fa5485a858"}, + {file = "tiktoken-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f5f6afb52fb8a7ea1c811e435e4188f2bef81b5e0f7a8635cc79b0eef0193d6"}, + {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:861f9ee616766d736be4147abac500732b505bf7013cfaf019b85892637f235e"}, + {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54031f95c6939f6b78122c0aa03a93273a96365103793a22e1793ee86da31685"}, + {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fffdcb319b614cf14f04d02a52e26b1d1ae14a570f90e9b55461a72672f7b13d"}, + {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c72baaeaefa03ff9ba9688624143c858d1f6b755bb85d456d59e529e17234769"}, + {file = "tiktoken-0.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:131b8aeb043a8f112aad9f46011dced25d62629091e51d9dc1adbf4a1cc6aa98"}, + {file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"}, + {file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"}, + {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"}, + {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"}, + {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"}, + {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"}, + {file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"}, + {file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"}, ] [package.dependencies] @@ -5180,111 +4060,111 @@ test = ["pytest", "ruff"] [[package]] name = "tokenizers" -version = "0.19.1" +version = "0.20.0" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97"}, - {file = "tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe"}, - {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98"}, - {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3"}, - {file = "tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837"}, - {file = "tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059"}, - {file = "tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d"}, - {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6"}, - {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b"}, - {file = "tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256"}, - {file = "tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153"}, - {file = "tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840"}, - {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea"}, - {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c"}, - {file = "tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57"}, - {file = "tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:bb9dfe7dae85bc6119d705a76dc068c062b8b575abe3595e3c6276480e67e3f1"}, - {file = "tokenizers-0.19.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:1f0360cbea28ea99944ac089c00de7b2e3e1c58f479fb8613b6d8d511ce98267"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:71e3ec71f0e78780851fef28c2a9babe20270404c921b756d7c532d280349214"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b82931fa619dbad979c0ee8e54dd5278acc418209cc897e42fac041f5366d626"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e8ff5b90eabdcdaa19af697885f70fe0b714ce16709cf43d4952f1f85299e73a"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e742d76ad84acbdb1a8e4694f915fe59ff6edc381c97d6dfdd054954e3478ad4"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8c5d59d7b59885eab559d5bc082b2985555a54cda04dda4c65528d90ad252ad"}, - {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2da5c32ed869bebd990c9420df49813709e953674c0722ff471a116d97b22d"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:638e43936cc8b2cbb9f9d8dde0fe5e7e30766a3318d2342999ae27f68fdc9bd6"}, - {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:78e769eb3b2c79687d9cb0f89ef77223e8e279b75c0a968e637ca7043a84463f"}, - {file = "tokenizers-0.19.1-cp37-none-win32.whl", hash = "sha256:72791f9bb1ca78e3ae525d4782e85272c63faaef9940d92142aa3eb79f3407a3"}, - {file = "tokenizers-0.19.1-cp37-none-win_amd64.whl", hash = "sha256:f3bbb7a0c5fcb692950b041ae11067ac54826204318922da754f908d95619fbc"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:07f9295349bbbcedae8cefdbcfa7f686aa420be8aca5d4f7d1ae6016c128c0c5"}, - {file = "tokenizers-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10a707cc6c4b6b183ec5dbfc5c34f3064e18cf62b4a938cb41699e33a99e03c1"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6309271f57b397aa0aff0cbbe632ca9d70430839ca3178bf0f06f825924eca22"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad23d37d68cf00d54af184586d79b84075ada495e7c5c0f601f051b162112dc"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:427c4f0f3df9109314d4f75b8d1f65d9477033e67ffaec4bca53293d3aca286d"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e83a31c9cf181a0a3ef0abad2b5f6b43399faf5da7e696196ddd110d332519ee"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c27b99889bd58b7e301468c0838c5ed75e60c66df0d4db80c08f43462f82e0d3"}, - {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bac0b0eb952412b0b196ca7a40e7dce4ed6f6926489313414010f2e6b9ec2adf"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6298bde623725ca31c9035a04bf2ef63208d266acd2bed8c2cb7d2b7d53ce6"}, - {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08a44864e42fa6d7d76d7be4bec62c9982f6f6248b4aa42f7302aa01e0abfd26"}, - {file = "tokenizers-0.19.1-cp38-none-win32.whl", hash = "sha256:1de5bc8652252d9357a666e609cb1453d4f8e160eb1fb2830ee369dd658e8975"}, - {file = "tokenizers-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:0bcce02bf1ad9882345b34d5bd25ed4949a480cf0e656bbd468f4d8986f7a3f1"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0b9394bd204842a2a1fd37fe29935353742be4a3460b6ccbaefa93f58a8df43d"}, - {file = "tokenizers-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4692ab92f91b87769d950ca14dbb61f8a9ef36a62f94bad6c82cc84a51f76f6a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6258c2ef6f06259f70a682491c78561d492e885adeaf9f64f5389f78aa49a051"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85cf76561fbd01e0d9ea2d1cbe711a65400092bc52b5242b16cfd22e51f0c58"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670b802d4d82bbbb832ddb0d41df7015b3e549714c0e77f9bed3e74d42400fbe"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85aa3ab4b03d5e99fdd31660872249df5e855334b6c333e0bc13032ff4469c4a"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf001afbbed111a79ca47d75941e9e5361297a87d186cbfc11ed45e30b5daba"}, - {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c89aa46c269e4e70c4d4f9d6bc644fcc39bb409cb2a81227923404dd6f5227"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39c1ec76ea1027438fafe16ecb0fb84795e62e9d643444c1090179e63808c69d"}, - {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c2a0d47a89b48d7daa241e004e71fb5a50533718897a4cd6235cb846d511a478"}, - {file = "tokenizers-0.19.1-cp39-none-win32.whl", hash = "sha256:61b7fe8886f2e104d4caf9218b157b106207e0f2a4905c9c7ac98890688aabeb"}, - {file = "tokenizers-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:f97660f6c43efd3e0bfd3f2e3e5615bf215680bad6ee3d469df6454b8c6e8256"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e"}, - {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b4399b59d1af5645bcee2072a463318114c39b8547437a7c2d6a186a1b5a0e2d"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6852c5b2a853b8b0ddc5993cd4f33bfffdca4fcc5d52f89dd4b8eada99379285"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd266ae85c3d39df2f7e7d0e07f6c41a55e9a3123bb11f854412952deacd828"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecb2651956eea2aa0a2d099434134b1b68f1c31f9a5084d6d53f08ed43d45ff2"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b279ab506ec4445166ac476fb4d3cc383accde1ea152998509a94d82547c8e2a"}, - {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:89183e55fb86e61d848ff83753f64cded119f5d6e1f553d14ffee3700d0a4a49"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2edbc75744235eea94d595a8b70fe279dd42f3296f76d5a86dde1d46e35f574"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0e64bfde9a723274e9a71630c3e9494ed7b4c0f76a1faacf7fe294cd26f7ae7c"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b5ca92bfa717759c052e345770792d02d1f43b06f9e790ca0a1db62838816f3"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f8a20266e695ec9d7a946a019c1d5ca4eddb6613d4f466888eee04f16eedb85"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c38f45d8f2a2ec0f3a20073cccb335b9f99f73b3c69483cd52ebc75369d8a1"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dd26e3afe8a7b61422df3176e06664503d3f5973b94f45d5c45987e1cb711876"}, - {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:eddd5783a4a6309ce23432353cdb36220e25cbb779bfa9122320666508b44b88"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:56ae39d4036b753994476a1b935584071093b55c7a72e3b8288e68c313ca26e7"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f9939ca7e58c2758c01b40324a59c034ce0cebad18e0d4563a9b1beab3018243"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c330c0eb815d212893c67a032e9dc1b38a803eccb32f3e8172c19cc69fbb439"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec11802450a2487cdf0e634b750a04cbdc1c4d066b97d94ce7dd2cb51ebb325b"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b718f316b596f36e1dae097a7d5b91fc5b85e90bf08b01ff139bd8953b25af"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ed69af290c2b65169f0ba9034d1dc39a5db9459b32f1dd8b5f3f32a3fcf06eab"}, - {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f8a9c828277133af13f3859d1b6bf1c3cb6e9e1637df0e45312e6b7c2e622b1f"}, - {file = "tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3"}, + {file = "tokenizers-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6cff5c5e37c41bc5faa519d6f3df0679e4b37da54ea1f42121719c5e2b4905c0"}, + {file = "tokenizers-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:62a56bf75c27443432456f4ca5ca055befa95e25be8a28141cc495cac8ae4d6d"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc7de6a63f09c4a86909c2597b995aa66e19df852a23aea894929c74369929"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:053c37ecee482cc958fdee53af3c6534286a86f5d35aac476f7c246830e53ae5"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d7074aaabc151a6363fa03db5493fc95b423b2a1874456783989e96d541c7b6"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a11435780f2acd89e8fefe5e81cecf01776f6edb9b3ac95bcb76baee76b30b90"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a81cd2712973b007d84268d45fc3f6f90a79c31dfe7f1925e6732f8d2959987"}, + {file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7dfd796ab9d909f76fb93080e1c7c8309f196ecb316eb130718cd5e34231c69"}, + {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8029ad2aa8cb00605c9374566034c1cc1b15130713e0eb5afcef6cface8255c9"}, + {file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca4d54260ebe97d59dfa9a30baa20d0c4dd9137d99a8801700055c561145c24e"}, + {file = "tokenizers-0.20.0-cp310-none-win32.whl", hash = "sha256:95ee16b57cec11b86a7940174ec5197d506439b0f415ab3859f254b1dffe9df0"}, + {file = "tokenizers-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:0a61a11e93eeadbf02aea082ffc75241c4198e0608bbbac4f65a9026851dcf37"}, + {file = "tokenizers-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6636b798b3c4d6c9b1af1a918bd07c867808e5a21c64324e95318a237e6366c3"}, + {file = "tokenizers-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ec603e42eaf499ffd58b9258162add948717cf21372458132f14e13a6bc7172"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce124264903a8ea6f8f48e1cc7669e5ef638c18bd4ab0a88769d5f92debdf7f"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07bbeba0231cf8de07aa6b9e33e9779ff103d47042eeeb859a8c432e3292fb98"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06c0ca8397b35d38b83a44a9c6929790c1692957d88541df061cb34d82ebbf08"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca6557ac3b83d912dfbb1f70ab56bd4b0594043916688e906ede09f42e192401"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a5ad94c9e80ac6098328bee2e3264dbced4c6faa34429994d473f795ec58ef4"}, + {file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b5c7f906ee6bec30a9dc20268a8b80f3b9584de1c9f051671cb057dc6ce28f6"}, + {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:31e087e9ee1b8f075b002bfee257e858dc695f955b43903e1bb4aa9f170e37fe"}, + {file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c3124fb6f3346cb3d8d775375d3b429bf4dcfc24f739822702009d20a4297990"}, + {file = "tokenizers-0.20.0-cp311-none-win32.whl", hash = "sha256:a4bb8b40ba9eefa621fdcabf04a74aa6038ae3be0c614c6458bd91a4697a452f"}, + {file = "tokenizers-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:2b709d371f1fe60a28ef0c5c67815952d455ca7f34dbe7197eaaed3cc54b658e"}, + {file = "tokenizers-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:15c81a17d0d66f4987c6ca16f4bea7ec253b8c7ed1bb00fdc5d038b1bb56e714"}, + {file = "tokenizers-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a531cdf1fb6dc41c984c785a3b299cb0586de0b35683842a3afbb1e5207f910"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06caabeb4587f8404e0cd9d40f458e9cba3e815c8155a38e579a74ff3e2a4301"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8768f964f23f5b9f50546c0369c75ab3262de926983888bbe8b98be05392a79c"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:626403860152c816f97b649fd279bd622c3d417678c93b4b1a8909b6380b69a8"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c1b88fa9e5ff062326f4bf82681da5a96fca7104d921a6bd7b1e6fcf224af26"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7e559436a07dc547f22ce1101f26d8b2fad387e28ec8e7e1e3b11695d681d8"}, + {file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48afb75e50449848964e4a67b0da01261dd3aa8df8daecf10db8fd7f5b076eb"}, + {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:baf5d0e1ff44710a95eefc196dd87666ffc609fd447c5e5b68272a7c3d342a1d"}, + {file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e5e56df0e8ed23ba60ae3848c3f069a0710c4b197218fe4f89e27eba38510768"}, + {file = "tokenizers-0.20.0-cp312-none-win32.whl", hash = "sha256:ec53e5ecc142a82432f9c6c677dbbe5a2bfee92b8abf409a9ecb0d425ee0ce75"}, + {file = "tokenizers-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:f18661ece72e39c0dfaa174d6223248a15b457dbd4b0fc07809b8e6d3ca1a234"}, + {file = "tokenizers-0.20.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:f7065b1084d8d1a03dc89d9aad69bcbc8415d4bc123c367063eb32958cd85054"}, + {file = "tokenizers-0.20.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e5d4069e4714e3f7ba0a4d3d44f9d84a432cd4e4aa85c3d7dd1f51440f12e4a1"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799b808529e54b7e1a36350bda2aeb470e8390e484d3e98c10395cee61d4e3c6"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f9baa027cc8a281ad5f7725a93c204d7a46986f88edbe8ef7357f40a23fb9c7"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:010ec7f3f7a96adc4c2a34a3ada41fa14b4b936b5628b4ff7b33791258646c6b"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98d88f06155335b14fd78e32ee28ca5b2eb30fced4614e06eb14ae5f7fba24ed"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e13eb000ef540c2280758d1b9cfa5fe424b0424ae4458f440e6340a4f18b2638"}, + {file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fab3cf066ff426f7e6d70435dc28a9ff01b2747be83810e397cba106f39430b0"}, + {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:39fa3761b30a89368f322e5daf4130dce8495b79ad831f370449cdacfb0c0d37"}, + {file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c8da0fba4d179ddf2607821575998df3c294aa59aa8df5a6646dc64bc7352bce"}, + {file = "tokenizers-0.20.0-cp37-none-win32.whl", hash = "sha256:fada996d6da8cf213f6e3c91c12297ad4f6cdf7a85c2fadcd05ec32fa6846fcd"}, + {file = "tokenizers-0.20.0-cp37-none-win_amd64.whl", hash = "sha256:7d29aad702279e0760c265fcae832e89349078e3418dd329732d4503259fd6bd"}, + {file = "tokenizers-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:099c68207f3ef0227ecb6f80ab98ea74de559f7b124adc7b17778af0250ee90a"}, + {file = "tokenizers-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:68012d8a8cddb2eab3880870d7e2086cb359c7f7a2b03f5795044f5abff4e850"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9253bdd209c6aee168deca7d0e780581bf303e0058f268f9bb06859379de19b6"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f868600ddbcb0545905ed075eb7218a0756bf6c09dae7528ea2f8436ebd2c93"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9643d9c8c5f99b6aba43fd10034f77cc6c22c31f496d2f0ee183047d948fa0"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c375c6a889aeab44734028bc65cc070acf93ccb0f9368be42b67a98e1063d3f6"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e359f852328e254f070bbd09a19a568421d23388f04aad9f2fb7da7704c7228d"}, + {file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d98b01a309d4387f3b1c1dd68a8b8136af50376cf146c1b7e8d8ead217a5be4b"}, + {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:459f7537119554c2899067dec1ac74a00d02beef6558f4ee2e99513bf6d568af"}, + {file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:392b87ec89452628c045c9f2a88bc2a827f4c79e7d84bc3b72752b74c2581f70"}, + {file = "tokenizers-0.20.0-cp38-none-win32.whl", hash = "sha256:55a393f893d2ed4dd95a1553c2e42d4d4086878266f437b03590d3f81984c4fe"}, + {file = "tokenizers-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:30ffe33c5c2f2aab8e9a3340d0110dd9f7ace7eec7362e20a697802306bd8068"}, + {file = "tokenizers-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aa2d4a6fed2a7e3f860c7fc9d48764bb30f2649d83915d66150d6340e06742b8"}, + {file = "tokenizers-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5ef0f814084a897e9071fc4a868595f018c5c92889197bdc4bf19018769b148"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1e1b791e8c3bf4c4f265f180dadaff1c957bf27129e16fdd5e5d43c2d3762c"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b69e55e481459c07885263743a0d3c18d52db19bae8226a19bcca4aaa213fff"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806b4d82e27a2512bc23057b2986bc8b85824914286975b84d8105ff40d03d9"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9859e9ef13adf5a473ccab39d31bff9c550606ae3c784bf772b40f615742a24f"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef703efedf4c20488a8eb17637b55973745b27997ff87bad88ed499b397d1144"}, + {file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eec0061bab94b1841ab87d10831fdf1b48ebaed60e6d66d66dbe1d873f92bf5"}, + {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:980f3d0d7e73f845b69087f29a63c11c7eb924c4ad6b358da60f3db4cf24bdb4"}, + {file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c157550a2f3851b29d7fdc9dc059fcf81ff0c0fc49a1e5173a89d533ed043fa"}, + {file = "tokenizers-0.20.0-cp39-none-win32.whl", hash = "sha256:8a3d2f4d08608ec4f9895ec25b4b36a97f05812543190a5f2c3cd19e8f041e5a"}, + {file = "tokenizers-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:d90188d12afd0c75e537f9a1d92f9c7375650188ee4f48fdc76f9e38afbd2251"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d68e15f1815357b059ec266062340c343ea7f98f7f330602df81ffa3474b6122"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:23f9ecec637b9bc80da5f703808d29ed5329e56b5aa8d791d1088014f48afadc"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f830b318ee599e3d0665b3e325f85bc75ee2d2ca6285f52e439dc22b64691580"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3dc750def789cb1de1b5a37657919545e1d9ffa667658b3fa9cb7862407a1b8"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e26e6c755ae884c2ea6135cd215bdd0fccafe4ee62405014b8c3cd19954e3ab9"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a1158c7174f427182e08baa2a8ded2940f2b4a3e94969a85cc9cfd16004cbcea"}, + {file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:6324826287a3fc198898d3dcf758fe4a8479e42d6039f4c59e2cedd3cf92f64e"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d8653149405bb0c16feaf9cfee327fdb6aaef9dc2998349fec686f35e81c4e2"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a2dc1e402a155e97309287ca085c80eb1b7fab8ae91527d3b729181639fa51"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bef67b20aa6e5f7868c42c7c5eae4d24f856274a464ae62e47a0f2cccec3da"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da06e397182ff53789c506c7833220c192952c57e1581a53f503d8d953e2d67e"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:302f7e11a14814028b7fc88c45a41f1bbe9b5b35fd76d6869558d1d1809baa43"}, + {file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:055ec46e807b875589dfbe3d9259f9a6ee43394fb553b03b3d1e9541662dbf25"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e3144b8acebfa6ae062e8f45f7ed52e4b50fb6c62f93afc8871b525ab9fdcab3"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b52aa3fd14b2a07588c00a19f66511cff5cca8f7266ca3edcdd17f3512ad159f"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b8cf52779ffc5d4d63a0170fbeb512372bad0dd014ce92bbb9149756c831124"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:983a45dd11a876124378dae71d6d9761822199b68a4c73f32873d8cdaf326a5b"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6b819c9a19831ebec581e71a7686a54ab45d90faf3842269a10c11d746de0c"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e738cfd80795fcafcef89c5731c84b05638a4ab3f412f97d5ed7765466576eb1"}, + {file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c8842c7be2fadb9c9edcee233b1b7fe7ade406c99b0973f07439985c1c1d0683"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e47a82355511c373a4a430c4909dc1e518e00031207b1fec536c49127388886b"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9afbf359004551179a5db19424180c81276682773cff2c5d002f6eaaffe17230"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07eaa8799a92e6af6f472c21a75bf71575de2af3c0284120b7a09297c0de2f3"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0994b2e5fc53a301071806bc4303e4bc3bdc3f490e92a21338146a36746b0872"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6466e0355b603d10e3cc3d282d350b646341b601e50969464a54939f9848d0"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1e86594c2a433cb1ea09cfbe596454448c566e57ee8905bd557e489d93e89986"}, + {file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3e14cdef1efa96ecead6ea64a891828432c3ebba128bdc0596e3059fea104ef3"}, + {file = "tokenizers-0.20.0.tar.gz", hash = "sha256:39d7acc43f564c274085cafcd1dae9d36f332456de1a31970296a6b8da4eac8d"}, ] [package.dependencies] @@ -5317,17 +4197,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "toolz" -version = "0.12.1" -description = "List processing tools and functional utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, - {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, -] - [[package]] name = "tornado" version = "6.4.1" @@ -5350,13 +4219,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.4" +version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [package.dependencies] @@ -5383,83 +4252,15 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] -[[package]] -name = "transformers" -version = "4.43.3" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "transformers-4.43.3-py3-none-any.whl", hash = "sha256:6552beada5d826c25ff9b79139d237ab9050c6ea96b73d7fd2f8a8ba23ee76a4"}, - {file = "transformers-4.43.3.tar.gz", hash = "sha256:820c5b192bb1bf47250802901a8f0bf581e06b8fded89179d4ef08a1e903ee1c"}, -] - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.23.2,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -safetensors = ">=0.4.1" -tokenizers = ">=0.19,<0.20" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.21.0)"] -agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] -all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -benchmark = ["optimum-benchmark (>=0.2.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "scipy (<1.13.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1,<0.14.0)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.19,<0.20)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm (<=0.9.16)", "tokenizers (>=0.19,<0.20)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)", "scipy (<1.13.0)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.6,<0.15.0)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "isort (>=5.5.4)", "ruff (==0.4.4)", "urllib3 (<2.0.0)"] -ray = ["ray[tune] (>=2.7.0)"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -ruff = ["ruff (==0.4.4)"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "nltk", "parameterized", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-rich", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.4.4)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow (>2.9,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-cpu = ["keras (>2.9,<2.16)", "keras-nlp (>=0.3.1,<0.14.0)", "onnxconverter-common", "tensorflow-cpu (>2.9,<2.16)", "tensorflow-probability (<0.24)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm (<=0.9.16)"] -tokenizers = ["tokenizers (>=0.19,<0.20)"] -torch = ["accelerate (>=0.21.0)", "torch"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.23.2,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.19,<0.20)", "torch", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow (>=10.0.1,<=15.0)"] - [[package]] name = "typer" -version = "0.12.3" +version = "0.12.4" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" files = [ - {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, - {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, + {file = "typer-0.12.4-py3-none-any.whl", hash = "sha256:819aa03699f438397e876aa12b0d63766864ecba1b579092cc9fe35d886e34b6"}, + {file = "typer-0.12.4.tar.gz", hash = "sha256:c9c1613ed6a166162705b3347b8d10b661ccc5d95692654d0fb628118f2c34e6"}, ] [package.dependencies] @@ -5481,13 +4282,13 @@ files = [ [[package]] name = "types-python-dateutil" -version = "2.9.0.20240316" +version = "2.9.0.20240821" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, - {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, + {file = "types-python-dateutil-2.9.0.20240821.tar.gz", hash = "sha256:9649d1dcb6fef1046fb18bebe9ea2aa0028b160918518c34589a46045f6ebd98"}, + {file = "types_python_dateutil-2.9.0.20240821-py3-none-any.whl", hash = "sha256:f5889fcb4e63ed4aaa379b44f93c32593d50b9a94c9a60a0c854d8cc3511cd57"}, ] [[package]] @@ -5501,21 +4302,6 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, -] - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - [[package]] name = "tzdata" version = "2024.1" @@ -5560,19 +4346,18 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.23.2" +version = "0.30.6" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.23.2-py3-none-any.whl", hash = "sha256:1f9be6558f01239d4fdf22ef8126c39cb1ad0addf76c40e760549d2c2f43ab53"}, - {file = "uvicorn-0.23.2.tar.gz", hash = "sha256:4d3cc12d7727ba72b64d12d3cc7743124074c0a69f7b201512fc50c3e3f1569a"}, + {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, + {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, ] [package.dependencies] click = ">=7.0" h11 = ">=0.8" -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] @@ -5622,13 +4407,13 @@ files = [ [[package]] name = "webcolors" -version = "24.6.0" +version = "24.8.0" description = "A library for working with the color formats defined by HTML and CSS." optional = false python-versions = ">=3.8" files = [ - {file = "webcolors-24.6.0-py3-none-any.whl", hash = "sha256:8cf5bc7e28defd1d48b9e83d5fc30741328305a8195c29a8e668fa45586568a1"}, - {file = "webcolors-24.6.0.tar.gz", hash = "sha256:1d160d1de46b3e81e58d0a280d0c78b467dc80f47294b91b1ad8029d2cedb55b"}, + {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, + {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, ] [package.extras] @@ -5664,130 +4449,13 @@ test = ["websockets"] [[package]] name = "widgetsnbextension" -version = "4.0.11" +version = "4.0.13" description = "Jupyter interactive widgets for Jupyter Notebook" optional = false python-versions = ">=3.7" files = [ - {file = "widgetsnbextension-4.0.11-py3-none-any.whl", hash = "sha256:55d4d6949d100e0d08b94948a42efc3ed6dfdc0e9468b2c4b128c9a2ce3a7a36"}, - {file = "widgetsnbextension-4.0.11.tar.gz", hash = "sha256:8b22a8f1910bfd188e596fe7fc05dcbd87e810c8a4ba010bdb3da86637398474"}, -] - -[[package]] -name = "xxhash" -version = "3.4.1" -description = "Python binding for xxHash" -optional = false -python-versions = ">=3.7" -files = [ - {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, - {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, - {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, - {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, - {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, - {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, - {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, - {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, - {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, - {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, - {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, - {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, - {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, - {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, - {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, - {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, - {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, - {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, - {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, - {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, + {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, + {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, ] [[package]] @@ -5893,26 +4561,15 @@ files = [ idna = ">=2.0" multidict = ">=4.0" -[[package]] -name = "zict" -version = "3.0.0" -description = "Mutable mapping tools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zict-3.0.0-py2.py3-none-any.whl", hash = "sha256:5796e36bd0e0cc8cf0fbc1ace6a68912611c1dbd74750a3f3026b9b9d6a327ae"}, - {file = "zict-3.0.0.tar.gz", hash = "sha256:e321e263b6a97aafc0790c3cfb3c04656b7066e6738c37fffcca95d803c9fba5"}, -] - [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] @@ -5921,5 +4578,5 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" -python-versions = ">=3.10,<3.11" -content-hash = "0d21d885d24128fea87c662550775ee8df4f6af419cd7c844351b8b202b3efd1" +python-versions = ">=3.11,<3.12" +content-hash = "ce6fb9e63ff83a508ad9660217393b84e7174493f6b30f21f6a23fe9b4262fc8" diff --git a/agents-api/pyproject.toml b/agents-api/pyproject.toml index 701279b2f..eceeba341 100644 --- a/agents-api/pyproject.toml +++ b/agents-api/pyproject.toml @@ -7,48 +7,45 @@ readme = "README.md" packages = [{include = "agents_api"}] [tool.poetry.dependencies] -python = ">=3.10,<3.11" -fastapi = "^0.110.1" +python = ">=3.11,<3.12" +fastapi = "^0.112.1" pycozo = {extras = ["embedded"], version = "^0.7.6"} -uvicorn = "^0.23.2" +uvicorn = "^0.30.6" fire = "^0.5.0" environs = "^10.3.0" -google-cloud-aiplatform = "^1.33.0" -pandas = "^2.1.0" -openai = "^1.12.0" -httpx = "^0.26.0" -async-lru = "^2.0.4" -sentry-sdk = {extras = ["fastapi"], version = "^1.38.0"} -temporalio = "^1.4.0" -pydantic = "^2.5.3" +pandas = "^2.2.2" +openai = "^1.41.0" +httpx = "^0.27.0" +sentry-sdk = {extras = ["fastapi"], version = "^2.13.0"} +temporalio = "^1.6.0" +pydantic = "^2.8.2" arrow = "^1.3.0" -jinja2 = "^3.1.3" +jinja2 = "^3.1.4" jinja2schema = "^0.1.4" jsonschema = "^4.21.1" -litellm = "^1.35.32" -numpy = "^1.26.4" -transformers = "^4.40.1" -tiktoken = "^0.6.0" -xxhash = "^3.4.1" -tenacity = "^8.3.0" +litellm = "^1.43.18" +numpy = "^2.1.0" +tiktoken = "^0.7.0" +tenacity = "^9.0.0" beartype = "^0.18.5" pydantic-partial = "^0.5.5" simpleeval = "^0.9.13" +lz4 = "^4.3.3" +pyyaml = "^6.0.2" +google-re2 = "^1.1.20240702" +python-box = "^7.2.0" [tool.poetry.group.dev.dependencies] -ipython = "^8.18.1" +ipython = "^8.26.0" ruff = "^0.5.5" -datamodel-code-generator = "^0.25.8" +datamodel-code-generator = "^0.25.9" cozo-migrate = "^0.2.0" poethepoet = "^0.25.1" pytype = ">=2024.4.11" -julep = "^0.2.4" pyjwt = "^2.8.0" ward = "^0.68.0b0" -jupyterlab = "^4.1.8" -ipywidgets = "^8.1.2" -jupyter-ai = "^2.14.1" -langchain-openai = "^0.1.6" +jupyterlab = "^4.2.4" +ipywidgets = "^8.1.3" wat-inspector = "^0.2.1" [build-system] @@ -67,19 +64,20 @@ check = [ "format", "typecheck", ] -test = "ward" codegen = """ datamodel-codegen \ --input ../openapi.yaml \ --input-file-type openapi \ --output agents_api/autogen/ \ --output-model-type pydantic_v2.BaseModel \ + --strict-types bool \ --strict-nullable \ --allow-population-by-field-name \ --field-include-all-keys \ + --reuse-model \ + --snake-case-field \ --enum-field-as-literal all \ --field-constraints \ - --reuse-model \ --use-operation-id-as-name \ --use-schema-description \ --use-field-description \ @@ -93,8 +91,12 @@ datamodel-codegen \ --use-exact-imports \ --use-standard-collections \ --use-non-positive-negative-number-constrained-types \ + --target-python-version 3.11 \ --collapse-root-models \ - --target-python-version 3.10 \ --openapi-scopes schemas \ --keep-model-order \ - --disable-timestamp""" \ No newline at end of file + --disable-timestamp""" + +[tool.poe.tasks.test] +env = { AGENTS_API_TESTING = "true" } +cmd = "ward test" diff --git a/agents-api/pytype.toml b/agents-api/pytype.toml index 1b95217a6..2371cea58 100644 --- a/agents-api/pytype.toml +++ b/agents-api/pytype.toml @@ -4,13 +4,11 @@ # Space-separated list of files or directories to exclude. exclude = [ - '**/*_test.py', - '**/test_*.py', ] # Space-separated list of files or directories to process. inputs = [ - 'agents_api', + '.', ] # Keep going past errors to analyze as many files as possible. @@ -30,62 +28,37 @@ platform = 'linux' pythonpath = '.' # Python version (major.minor) of the target code. -python_version = '3.10' - -# Bind 'self' in methods with non-transparent decorators. This flag is temporary -# and will be removed once this behavior is enabled by default. -bind_decorated_methods = true +python_version = '3.11' # Don't allow None to match bool. This flag is temporary and will be removed # once this behavior is enabled by default. -none_is_not_bool = false - -# Enable parameter count checks for overriding methods with renamed arguments. -# This flag is temporary and will be removed once this behavior is enabled by -# default. -overriding_renamed_parameter_count_checks = true +none_is_not_bool = true # Variables initialized as None retain their None binding. This flag is # temporary and will be removed once this behavior is enabled by default. strict_none_binding = true -# Support the third-party fiddle library. This flag is temporary and will be -# removed once this behavior is enabled by default. -use_fiddle_overlay = true +# Space-separated list of error names to ignore. +disable = [ + 'pyi-error', +] + +# -------------- +# Optional flags +# -------------- + +# Bind 'self' in methods with non-transparent decorators. This flag is temporary +# and will be removed once this behavior is enabled by default. +bind_decorated_methods = false + +# Enable parameter count checks for overriding methods with renamed arguments. +# This flag is temporary and will be removed once this behavior is enabled by +# default. +overriding_renamed_parameter_count_checks = false # Opt-in: Do not allow Any as a return type. no_return_any = false # Opt-in: Require decoration with @typing.override when overriding a method or # nested class attribute of a parent class. -require_override_decorator = false - -# Experimental: Infer precise return types even for invalid function calls. -precise_return = true - -# Experimental: Solve unknown types to label with structural types. -protocols = false - -# Experimental: Only load submodules that are explicitly imported. -strict_import = true - -# Experimental: Enable exhaustive checking of function parameter types. -strict_parameter_checks = false - -# Experimental: Emit errors for comparisons between incompatible primitive -# types. -strict_primitive_comparisons = false - -# Experimental: Check that variables are defined in all possible code paths. -strict_undefined_checks = false - -# Experimental: FOR TESTING ONLY. Use pytype/rewrite/. -use_rewrite = false - -# Space-separated list of error names to ignore. -disable = [ - 'pyi-error', -] - -# Don't report errors. -report_errors = true +require_override_decorator = false \ No newline at end of file diff --git a/agents-api/tests/fixtures.py b/agents-api/tests/fixtures.py index 2f7cfbffe..c6a98ee99 100644 --- a/agents-api/tests/fixtures.py +++ b/agents-api/tests/fixtures.py @@ -1,94 +1,356 @@ from uuid import uuid4 -from julep import AsyncClient, Client +from cozo_migrate.api import apply, init +from fastapi.testclient import TestClient +from pycozo import Client as CozoClient +from temporalio.client import WorkflowHandle from ward import fixture -from agents_api.routers.sessions.session import BaseSession +from agents_api.autogen.openapi_model import ( + CreateAgentRequest, + CreateDocRequest, + CreateExecutionRequest, + CreateSessionRequest, + CreateTaskRequest, + CreateToolRequest, + CreateTransitionRequest, + CreateUserRequest, +) +from agents_api.env import api_key, api_key_header_name +from agents_api.models.agent.create_agent import create_agent +from agents_api.models.agent.delete_agent import delete_agent +from agents_api.models.developer.get_developer import get_developer +from agents_api.models.docs.create_doc import create_doc +from agents_api.models.docs.delete_doc import delete_doc +from agents_api.models.execution.create_execution import create_execution +from agents_api.models.execution.create_execution_transition import ( + create_execution_transition, +) +from agents_api.models.execution.create_temporal_lookup import create_temporal_lookup +from agents_api.models.session.create_session import create_session +from agents_api.models.session.delete_session import delete_session +from agents_api.models.task.create_task import create_task +from agents_api.models.task.delete_task import delete_task +from agents_api.models.tools.create_tools import create_tools +from agents_api.models.tools.delete_tool import delete_tool +from agents_api.models.user.create_user import create_user +from agents_api.models.user.delete_user import delete_user +from agents_api.web import app -# TODO: make clients connect to real service +from .utils import patch_embed_acompletion as patch_embed_acompletion_ctx + +EMBEDDING_SIZE: int = 1024 @fixture(scope="global") -def base_session(): - return BaseSession(uuid4(), uuid4()) +def cozo_client(migrations_dir: str = "./migrations"): + # Create a new client for each test + # and initialize the schema. + client = CozoClient() + + init(client) + apply(client, migrations_dir=migrations_dir, all_=True) + + return client @fixture(scope="global") -def client(): - # Mock server base url - base_url = "http://localhost:8080" - client = Client(api_key="thisisnotarealapikey", base_url=base_url) +def test_developer_id(cozo_client=cozo_client): + developer_id = uuid4() - return client + cozo_client.run( + f""" + ?[developer_id, email, settings] <- [["{str(developer_id)}", "developers@julep.ai", {{}}]] + :insert developers {{ developer_id, email, settings }} + """ + ) + yield developer_id -@fixture -def async_client(): - # Mock server base url - base_url = "http://localhost:8080" - client = AsyncClient(api_key="thisisnotarealapikey", base_url=base_url) + cozo_client.run( + f""" + ?[developer_id, email] <- [["{str(developer_id)}", "developers@julep.ai"]] + :delete developers {{ developer_id, email }} + """ + ) - return client + +@fixture(scope="global") +def test_developer(cozo_client=cozo_client, developer_id=test_developer_id): + return get_developer( + developer_id=developer_id, + client=cozo_client, + ) -@fixture -def agent(client=client): - return client.agents.create( - name="Samantha", - about="about Samantha", - instructions=[ - "non-important content", - "important content", - ], - functions=[ - { - "description": "func desc", - "name": "some_func", - "parameters": {"param1": "string"}, - } - ], - default_settings={ - "frequency_penalty": 0.1, - "length_penalty": 0.9, - "presence_penalty": 0.8, - "repetition_penalty": 0.7, - "temperature": 0.6, - "top_p": 0.5, - }, - model="julep-ai/samantha-1-turbo", - docs=[ - { - "title": "some titie", - "content": "some content", - }, - ], - ) - - -@fixture -def user(client=client): - return client.users.create( - name="test user", - about="test user about", - ) - - -@fixture -def session(user=user, agent=agent, client=client): - return client.sessions.create( +@fixture(scope="test") +def patch_embed_acompletion(): + output = {"role": "assistant", "content": "Hello, world!"} + + with patch_embed_acompletion_ctx(output) as (embed, acompletion): + yield embed, acompletion + + +@fixture(scope="global") +def test_agent(cozo_client=cozo_client, developer_id=test_developer_id): + agent = create_agent( + developer_id=developer_id, + data=CreateAgentRequest( + model="gpt-4o", + name="test agent", + about="test agent about", + metadata={"test": "test"}, + ), + client=cozo_client, + ) + + yield agent + + delete_agent( + developer_id=developer_id, + agent_id=agent.id, + client=cozo_client, + ) + + +@fixture(scope="global") +def test_user(cozo_client=cozo_client, developer_id=test_developer_id): + user = create_user( + developer_id=developer_id, + data=CreateUserRequest( + name="test user", + about="test user about", + ), + client=cozo_client, + ) + + yield user + + delete_user( + developer_id=developer_id, user_id=user.id, + client=cozo_client, + ) + + +@fixture(scope="global") +def test_session( + cozo_client=cozo_client, + developer_id=test_developer_id, + test_user=test_user, + test_agent=test_agent, +): + session = create_session( + developer_id=developer_id, + data=CreateSessionRequest( + agent=test_agent.id, + user=test_user.id, + ), + client=cozo_client, + ) + + yield session + + delete_session( + developer_id=developer_id, + session_id=session.id, + client=cozo_client, + ) + + +@fixture(scope="global") +def test_doc( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + doc = create_doc( + developer_id=developer_id, + owner_type="agent", + owner_id=agent.id, + data=CreateDocRequest(title="Hello", content=["World"]), + client=client, + ) + + yield doc + + delete_doc( + developer_id=developer_id, + doc_id=doc.id, + owner_type="agent", + owner_id=agent.id, + client=client, + ) + + +@fixture(scope="global") +def test_user_doc( + client=cozo_client, + developer_id=test_developer_id, + user=test_user, +): + doc = create_doc( + developer_id=developer_id, + owner_type="user", + owner_id=user.id, + data=CreateDocRequest(title="Hello", content=["World"]), + client=client, + ) + + yield doc + + delete_doc( + developer_id=developer_id, + doc_id=doc.id, + owner_type="user", + owner_id=user.id, + client=client, + ) + + +@fixture(scope="global") +def test_task( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + task = create_task( + developer_id=developer_id, agent_id=agent.id, - situation="test situation", + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [{"evaluate": {"hello": '"world"'}}], + } + ), + client=client, + ) + + yield task + + delete_task( + developer_id=developer_id, + task_id=task.id, + client=client, ) -@fixture -def task(agent=agent, client=client): - return client.tasks.create( +@fixture(scope="global") +def test_execution( + client=cozo_client, + developer_id=test_developer_id, + task=test_task, +): + workflow_handle = WorkflowHandle( + client=None, + id="blah", + ) + + execution = create_execution( + developer_id=developer_id, + task_id=task.id, + data=CreateExecutionRequest(input={"test": "test"}), + client=client, + ) + create_temporal_lookup( + developer_id=developer_id, + task_id=task.id, + workflow_handle=workflow_handle, + client=client, + ) + + yield execution + + client.run( + f""" + ?[execution_id] <- ["{str(execution.id)}"] + :delete executions {{ execution_id }} + """ + ) + + +@fixture(scope="global") +def test_transition( + client=cozo_client, + developer_id=test_developer_id, + execution=test_execution, +): + transition = create_execution_transition( + developer_id=developer_id, + execution_id=execution.id, + data=CreateTransitionRequest( + type="step", + output={}, + current={"workflow": "main", "step": 0}, + next={"workflow": "wf1", "step": 1}, + ), + client=client, + ) + + yield transition + + client.run( + f""" + ?[transition_id] <- ["{str(transition.id)}"] + :delete transitions {{ transition_id }} + """ + ) + + +@fixture(scope="global") +def test_tool( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + function = { + "description": "A function that prints hello world", + "parameters": {"type": "object", "properties": {}}, + } + + tool = { + "function": function, + "name": "hello_world1", + "type": "function", + } + + [tool, *_] = create_tools( + developer_id=developer_id, + agent_id=agent.id, + data=[CreateToolRequest(**tool)], + client=client, + ) + + yield tool + + delete_tool( + developer_id=developer_id, agent_id=agent.id, - name="task1", - description="task 1", - tools_available=["tool1"], - input_schema={}, - main=[], + tool_id=tool.id, + client=client, ) + + +@fixture(scope="global") +def client(cozo_client=cozo_client): + client = TestClient(app=app) + app.state.cozo_client = cozo_client + + return client + + +@fixture(scope="global") +def make_request(client=client, developer_id=test_developer_id): + def _make_request(method, url, **kwargs): + headers = kwargs.pop("headers", {}) + headers = { + **headers, + "X-Developer-Id": str(developer_id), + api_key_header_name: api_key, + } + + return client.request(method, url, headers=headers, **kwargs) + + return _make_request diff --git a/model-serving/model_api/__init__.py b/agents-api/tests/sample_tasks/__init__.py similarity index 100% rename from model-serving/model_api/__init__.py rename to agents-api/tests/sample_tasks/__init__.py diff --git a/agents-api/tests/sample_tasks/find_selector.yaml b/agents-api/tests/sample_tasks/find_selector.yaml new file mode 100644 index 000000000..465a2b578 --- /dev/null +++ b/agents-api/tests/sample_tasks/find_selector.yaml @@ -0,0 +1,86 @@ +name: Find request and selector for identity provider + +input_schema: + type: object + properties: + screenshot_base64: + type: string + network_requests: + type: array + items: + type: object + properties: + request: + type: object + properties: + url: + type: string + method: + type: string + headers: + type: object + additionalProperties: + type: string + body: + type: string + response: + type: object + properties: + status: + type: integer + headers: + type: object + additionalProperties: + type: string + body: + type: string + parameters: + type: array + items: + type: string + + # Shortcut to require all props + additionalProperties: false + minProperties: 3 + +main: + - map: + prompt: + - role: system + content: |- + From the screenshot below, can you identify if the page has {{_}} for the user? + Write your answer in the following yaml format: + + found: true|false + value: |null + + Make sure to end your answer in the above format only. + Please do not include any other information or explanation after it. + + - role: user + content: + - type: image_url + image_url: + url: "{{inputs[0].screenshot_base64}}" + + over: _["parameters"] + reduce: >- + results + + [ + load_yaml(_["choices"][0]["message"].content.strip()) + ] + + - evaluate: + result: >- + [ + {"value": result["value"], "network_request": request} + for request in inputs[0]["network_requests"] + for result in _ + if result["found"] and result["value"] in request["response"]["body"] + ] + + - if: len(_["result"]) > 0 + then: + log: list(zip(_, inputs[0]["network_requests"])) + else: + error: "Could not find the selector in any of the network requests" \ No newline at end of file diff --git a/agents-api/tests/sample_tasks/test_find_selector.py b/agents-api/tests/sample_tasks/test_find_selector.py new file mode 100644 index 000000000..67ad88607 --- /dev/null +++ b/agents-api/tests/sample_tasks/test_find_selector.py @@ -0,0 +1,121 @@ +# Tests for task queries + +import os +from uuid import uuid4 + +from ward import raises, test + +from ..fixtures import cozo_client, test_agent, test_developer_id +from ..utils import patch_embed_acompletion, patch_http_client_with_temporal + +this_dir = os.path.dirname(__file__) + + +@test("workflow sample: find-selector create task") +async def _( + cozo_client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + agent_id = str(agent.id) + task_id = str(uuid4()) + + with patch_embed_acompletion(), open( + f"{this_dir}/find_selector.yaml", "r" + ) as sample_file: + task_def = sample_file.read() + + async with patch_http_client_with_temporal( + cozo_client=cozo_client, developer_id=developer_id + ) as ( + make_request, + _, + ): + make_request( + method="POST", + url=f"/agents/{agent_id}/tasks/{task_id}", + headers={"Content-Type": "application/x-yaml"}, + data=task_def, + ).raise_for_status() + + +@test("workflow sample: find-selector start with bad input should fail") +async def _( + cozo_client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + agent_id = str(agent.id) + task_id = str(uuid4()) + + with patch_embed_acompletion(), open( + f"{this_dir}/find_selector.yaml", "r" + ) as sample_file: + task_def = sample_file.read() + + async with patch_http_client_with_temporal( + cozo_client=cozo_client, developer_id=developer_id + ) as ( + make_request, + temporal_client, + ): + make_request( + method="POST", + url=f"/agents/{agent_id}/tasks/{task_id}", + headers={"Content-Type": "application/x-yaml"}, + data=task_def, + ).raise_for_status() + + execution_data = dict(input={"test": "input"}) + + with raises(BaseException): + make_request( + method="POST", + url=f"/tasks/{task_id}/executions", + json=execution_data, + ).raise_for_status() + + +@test("workflow sample: find-selector start with correct input") +async def _( + cozo_client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + agent_id = str(agent.id) + task_id = str(uuid4()) + + with patch_embed_acompletion( + output={"role": "assistant", "content": "found: true\nvalue: 'Gaga'"} + ), open(f"{this_dir}/find_selector.yaml", "r") as sample_file: + task_def = sample_file.read() + + async with patch_http_client_with_temporal( + cozo_client=cozo_client, developer_id=developer_id + ) as ( + make_request, + temporal_client, + ): + make_request( + method="POST", + url=f"/agents/{agent_id}/tasks/{task_id}", + headers={"Content-Type": "application/x-yaml"}, + data=task_def, + ).raise_for_status() + + input = dict( + screenshot_base64="iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA", + network_requests=[{"request": {}, "response": {"body": "Lady Gaga"}}], + parameters=["name"], + ) + execution_data = dict(input=input) + + execution_created = make_request( + method="POST", + url=f"/tasks/{task_id}/executions", + json=execution_data, + ).json() + + handle = temporal_client.get_workflow_handle(execution_created["jobs"][0]) + + await handle.result() diff --git a/agents-api/tests/test_activities.py b/agents-api/tests/test_activities.py index 5ebf49d5b..98dfc97b5 100644 --- a/agents-api/tests/test_activities.py +++ b/agents-api/tests/test_activities.py @@ -1,92 +1,55 @@ -import time -import uuid +from uuid import uuid4 from ward import test -from agents_api.activities.truncation import get_extra_entries -from agents_api.autogen.openapi_model import Role -from agents_api.common.protocol.entries import Entry - - -@test("get extra entries, do not strip system message") -def _(): - session_ids = [uuid.uuid4()] * 3 - entry_ids = [uuid.uuid4()] * 3 - now = time.time() - messages = [ - Entry( - entry_id=entry_ids[0], - session_id=session_ids[0], - role=Role.system, - content="content 1", - created_at=now, - timestamp=now, - ), - Entry( - entry_id=entry_ids[1], - session_id=session_ids[1], - role=Role.assistant, - content="content 2", - created_at=now, - timestamp=now, - ), - Entry( - entry_id=entry_ids[2], - session_id=session_ids[2], - role=Role.user, - content="content 3", - created_at=now, - timestamp=now, - ), - ] - - threshold = sum([m.token_count for m in messages]) - 1 - result = get_extra_entries(messages, threshold) - - assert result == [messages[1].id] - - -@test("get extra entries") -def _(): - session_ids = [uuid.uuid4()] * 3 - entry_ids = [uuid.uuid4()] * 3 - now = time.time() - messages = [ - Entry( - entry_id=entry_ids[0], - session_id=session_ids[0], - role=Role.user, - content="content 1", - created_at=now, - timestamp=now, +from agents_api.activities.embed_docs import embed_docs +from agents_api.activities.types import EmbedDocsPayload +from agents_api.clients import temporal +from agents_api.env import temporal_task_queue +from agents_api.workflows.demo import DemoWorkflow + +from .fixtures import ( + cozo_client, + test_developer_id, + test_doc, +) +from .utils import patch_testing_temporal + + +@test("activity: call direct embed_docs") +async def _( + cozo_client=cozo_client, + developer_id=test_developer_id, + doc=test_doc, +): + title = "title" + content = ["content 1"] + include_title = True + + await embed_docs( + EmbedDocsPayload( + developer_id=developer_id, + doc_id=doc.id, + title=title, + content=content, + include_title=include_title, + embed_instruction=None, ), - Entry( - entry_id=entry_ids[1], - session_id=session_ids[1], - role=Role.assistant, - content="content 2", - created_at=now, - timestamp=now, - ), - Entry( - entry_id=entry_ids[2], - session_id=session_ids[2], - role=Role.user, - content="content 3", - created_at=now, - timestamp=now, - ), - ] - - threshold = sum([m.token_count for m in messages]) - 1 - result = get_extra_entries(messages, threshold) + cozo_client, + ) - assert result == [messages[0].id] +@test("activity: call demo workflow via temporal client") +async def _(): + async with patch_testing_temporal() as (_, mock_get_client): + client = await temporal.get_client() -@test("get extra entries, no change if empty") -def _(): - messages = [] - result = get_extra_entries(messages, 1) + result = await client.execute_workflow( + DemoWorkflow.run, + args=[1, 2], + id=str(uuid4()), + task_queue=temporal_task_queue, + ) - assert result == [] + assert result == 3 + mock_get_client.assert_called_once() diff --git a/agents-api/tests/test_agent_queries.py b/agents-api/tests/test_agent_queries.py new file mode 100644 index 000000000..f074a768a --- /dev/null +++ b/agents-api/tests/test_agent_queries.py @@ -0,0 +1,163 @@ +# Tests for agent queries +from uuid import uuid4 + +from ward import raises, test + +from agents_api.autogen.openapi_model import ( + Agent, + CreateAgentRequest, + CreateOrUpdateAgentRequest, + PatchAgentRequest, + ResourceUpdatedResponse, + UpdateAgentRequest, +) +from agents_api.models.agent.create_agent import create_agent +from agents_api.models.agent.create_or_update_agent import create_or_update_agent +from agents_api.models.agent.delete_agent import delete_agent +from agents_api.models.agent.get_agent import get_agent +from agents_api.models.agent.list_agents import list_agents +from agents_api.models.agent.patch_agent import patch_agent +from agents_api.models.agent.update_agent import update_agent +from tests.fixtures import cozo_client, test_agent, test_developer_id + + +@test("model: create agent") +def _(client=cozo_client, developer_id=test_developer_id): + create_agent( + developer_id=developer_id, + data=CreateAgentRequest( + name="test agent", + about="test agent about", + model="gpt-4o", + ), + client=client, + ) + + +@test("model: create agent with instructions") +def _(client=cozo_client, developer_id=test_developer_id): + create_agent( + developer_id=developer_id, + data=CreateAgentRequest( + name="test agent", + about="test agent about", + model="gpt-4o", + instructions=["test instruction"], + ), + client=client, + ) + + +@test("model: create or update agent") +def _(client=cozo_client, developer_id=test_developer_id): + create_or_update_agent( + developer_id=developer_id, + agent_id=uuid4(), + data=CreateOrUpdateAgentRequest( + name="test agent", + about="test agent about", + model="gpt-4o", + instructions=["test instruction"], + ), + client=client, + ) + + +@test("model: get agent not exists") +def _(client=cozo_client, developer_id=test_developer_id): + agent_id = uuid4() + + with raises(Exception): + get_agent(agent_id=agent_id, developer_id=developer_id, client=client) + + +@test("model: get agent exists") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + result = get_agent(agent_id=agent.id, developer_id=developer_id, client=client) + + assert result is not None + assert isinstance(result, Agent) + + +@test("model: delete agent") +def _(client=cozo_client, developer_id=test_developer_id): + temp_agent = create_agent( + developer_id=developer_id, + data=CreateAgentRequest( + name="test agent", + about="test agent about", + model="gpt-4o", + instructions=["test instruction"], + ), + client=client, + ) + + # Delete the agent + delete_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client) + + # Check that the agent is deleted + with raises(Exception): + get_agent(agent_id=temp_agent.id, developer_id=developer_id, client=client) + + +@test("model: update agent") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + result = update_agent( + agent_id=agent.id, + developer_id=developer_id, + data=UpdateAgentRequest( + name="updated agent", + about="updated agent about", + model="gpt-4o", + default_settings={"temperature": 1.0}, + metadata={"hello": "world"}, + ), + client=client, + ) + + assert result is not None + assert isinstance(result, ResourceUpdatedResponse) + + agent = get_agent( + agent_id=agent.id, + developer_id=developer_id, + client=client, + ) + + assert "test" not in agent.metadata + + +@test("model: patch agent") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + result = patch_agent( + agent_id=agent.id, + developer_id=developer_id, + data=PatchAgentRequest( + name="patched agent", + about="patched agent about", + default_settings={"temperature": 1.0}, + metadata={"something": "else"}, + ), + client=client, + ) + + assert result is not None + assert isinstance(result, ResourceUpdatedResponse) + + agent = get_agent( + agent_id=agent.id, + developer_id=developer_id, + client=client, + ) + + assert "hello" in agent.metadata + + +@test("model: list agents") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + """Tests listing all agents associated with a developer in the database. Verifies that the correct list of agents is retrieved.""" + + result = list_agents(developer_id=developer_id, client=client) + + assert isinstance(result, list) + assert all(isinstance(agent, Agent) for agent in result) diff --git a/agents-api/tests/test_agent_routes.py b/agents-api/tests/test_agent_routes.py new file mode 100644 index 000000000..0bd6d36df --- /dev/null +++ b/agents-api/tests/test_agent_routes.py @@ -0,0 +1,212 @@ +# Tests for agent queries +from uuid import uuid4 + +from ward import test + +from tests.fixtures import client, make_request, test_agent + + +@test("route: unauthorized should fail") +def _(client=client): + data = dict( + name="test agent", + about="test agent about", + model="gpt-4o", + ) + + response = client.request( + method="POST", + url="/agents", + json=data, + ) + + assert response.status_code == 403 + + +@test("route: create agent") +def _(make_request=make_request): + data = dict( + name="test agent", + about="test agent about", + model="gpt-4o", + ) + + response = make_request( + method="POST", + url="/agents", + json=data, + ) + + assert response.status_code == 201 + + +@test("route: create agent with instructions") +def _(make_request=make_request): + data = dict( + name="test agent", + about="test agent about", + model="gpt-4o", + instructions=["test instruction"], + ) + + response = make_request( + method="POST", + url="/agents", + json=data, + ) + + assert response.status_code == 201 + + +@test("route: create or update agent") +def _(make_request=make_request): + agent_id = str(uuid4()) + + data = dict( + name="test agent", + about="test agent about", + model="gpt-4o", + instructions=["test instruction"], + ) + + response = make_request( + method="POST", + url=f"/agents/{agent_id}", + json=data, + ) + + assert response.status_code == 201 + + +@test("route: get agent not exists") +def _(make_request=make_request): + agent_id = str(uuid4()) + + response = make_request( + method="GET", + url=f"/agents/{agent_id}", + ) + + assert response.status_code == 404 + + +@test("route: get agent exists") +def _(make_request=make_request, agent=test_agent): + agent_id = str(agent.id) + + response = make_request( + method="GET", + url=f"/agents/{agent_id}", + ) + + assert response.status_code != 404 + + +@test("route: delete agent") +def _(make_request=make_request): + data = dict( + name="test agent", + about="test agent about", + model="gpt-4o", + instructions=["test instruction"], + ) + + response = make_request( + method="POST", + url="/agents", + json=data, + ) + agent_id = response.json()["id"] + + response = make_request( + method="DELETE", + url=f"/agents/{agent_id}", + ) + + assert response.status_code == 202 + + response = make_request( + method="GET", + url=f"/agents/{agent_id}", + ) + + assert response.status_code == 404 + + +@test("route: update agent") +def _(make_request=make_request, agent=test_agent): + data = dict( + name="updated agent", + about="updated agent about", + default_settings={"temperature": 1.0}, + model="gpt-4o", + metadata={"hello": "world"}, + ) + + agent_id = str(agent.id) + response = make_request( + method="PUT", + url=f"/agents/{agent_id}", + json=data, + ) + + assert response.status_code == 200 + + agent_id = response.json()["id"] + + response = make_request( + method="GET", + url=f"/agents/{agent_id}", + ) + + assert response.status_code == 200 + agent = response.json() + + assert "test" not in agent["metadata"] + + +@test("route: patch agent") +def _(make_request=make_request, agent=test_agent): + agent_id = str(agent.id) + + data = dict( + name="patched agent", + about="patched agent about", + default_settings={"temperature": 1.0}, + metadata={"something": "else"}, + ) + + response = make_request( + method="PATCH", + url=f"/agents/{agent_id}", + json=data, + ) + + assert response.status_code == 200 + + agent_id = response.json()["id"] + + response = make_request( + method="GET", + url=f"/agents/{agent_id}", + ) + + assert response.status_code == 200 + agent = response.json() + + assert "hello" in agent["metadata"] + + +@test("route: list agents") +def _(make_request=make_request): + response = make_request( + method="GET", + url="/agents", + ) + + assert response.status_code == 200 + response = response.json() + agents = response["items"] + + assert isinstance(agents, list) + assert len(agents) > 0 diff --git a/agents-api/tests/test_agents.py b/agents-api/tests/test_agents.py deleted file mode 100644 index e5076e37d..000000000 --- a/agents-api/tests/test_agents.py +++ /dev/null @@ -1,433 +0,0 @@ -import uuid - -from julep.api import Agent, ResourceCreatedResponse, ResourceUpdatedResponse -from julep.api.core import ApiError -from ward import test - -from tests.fixtures import agent, async_client, client - - -@test("create new agent with tools") -def _(client=client): - agent = client.agents.create( - name="Samantha", - about="about Samantha", - instructions=[ - "non-important content", - "important content", - ], - tools=[ - { - "type": "function", - "function": { - "description": "func desc", - "name": "some_func", - "parameters": {"param1": "string"}, - }, - } - ], - default_settings={ - "frequency_penalty": 0.1, - "length_penalty": 0.9, - "presence_penalty": 0.8, - "repetition_penalty": 0.7, - "temperature": 0.6, - "top_p": 0.5, - }, - model="julep-ai/samantha-1-turbo", - docs=[ - { - "title": "some titie", - "content": "some content", - }, - ], - ) - - assert isinstance(agent, ResourceCreatedResponse) - assert agent.created_at - assert bool(uuid.UUID(str(agent.id), version=4)) - - -@test("async create new agent with tools") -async def _(client=async_client): - agent = await client.agents.create( - name="Samantha", - about="about Samantha", - instructions=[ - "non-important content", - "important content", - ], - tools=[ - { - "type": "function", - "function": { - "description": "func desc", - "name": "some_func", - "parameters": {"param1": "string"}, - }, - } - ], - default_settings={ - "frequency_penalty": 0.1, - "length_penalty": 0.9, - "presence_penalty": 0.8, - "repetition_penalty": 0.7, - "temperature": 0.6, - "top_p": 0.5, - }, - model="julep-ai/samantha-1-turbo", - docs=[ - { - "title": "some titie", - "content": "some content", - }, - ], - ) - - assert isinstance(agent, ResourceCreatedResponse) - assert agent.created_at - assert bool(uuid.UUID(str(agent.id), version=4)) - - -@test("create new agent with functions") -def _(client=client): - agent = client.agents.create( - name="Samantha", - about="about Samantha", - instructions=[ - "non-important content", - "important content", - ], - functions=[ - { - "description": "func desc", - "name": "some_func", - "parameters": {"param1": "string"}, - } - ], - default_settings={ - "frequency_penalty": 0.1, - "length_penalty": 0.9, - "presence_penalty": 0.8, - "repetition_penalty": 0.7, - "temperature": 0.6, - "top_p": 0.5, - }, - model="julep-ai/samantha-1-turbo", - docs=[ - { - "title": "some titie", - "content": "some content", - }, - ], - ) - - assert isinstance(agent, ResourceCreatedResponse) - assert agent.created_at - assert bool(uuid.UUID(str(agent.id), version=4)) - - -@test("async create new agent with functions") -async def _(client=async_client): - agent = await client.agents.create( - name="Samantha", - about="about Samantha", - instructions=[ - "non-important content", - "important content", - ], - functions=[ - { - "description": "func desc", - "name": "some_func", - "parameters": {"param1": "string"}, - } - ], - default_settings={ - "frequency_penalty": 0.1, - "length_penalty": 0.9, - "presence_penalty": 0.8, - "repetition_penalty": 0.7, - "temperature": 0.6, - "top_p": 0.5, - }, - model="julep-ai/samantha-1-turbo", - docs=[ - { - "title": "some titie", - "content": "some content", - }, - ], - ) - - assert isinstance(agent, ResourceCreatedResponse) - assert agent.created_at - assert bool(uuid.UUID(str(agent.id), version=4)) - - -@test("create new agent with functions and tools") -def _(client=client): - try: - client.agents.create( - name="Samantha", - about="about Samantha", - instructions=[ - "non-important content", - "important content", - ], - tools=[ - { - "type": "function", - "function": { - "description": "func desc", - "name": "some_func", - "parameters": {"param1": "string"}, - }, - } - ], - functions=[ - { - "description": "func desc", - "name": "some_func", - "parameters": {"param1": "string"}, - } - ], - default_settings={ - "frequency_penalty": 0.1, - "length_penalty": 0.9, - "presence_penalty": 0.8, - "repetition_penalty": 0.7, - "temperature": 0.6, - "top_p": 0.5, - }, - model="julep-ai/samantha-1-turbo", - docs=[ - { - "title": "some titie", - "content": "some content", - }, - ], - ) - except Exception: - assert True - else: - assert False - - -@test("async create new agent with functions and tools") -async def _(client=async_client): - try: - await client.agents.create( - name="Samantha", - about="about Samantha", - instructions=[ - "non-important content", - "important content", - ], - tools=[ - { - "type": "function", - "function": { - "description": "func desc", - "name": "some_func", - "parameters": {"param1": "string"}, - }, - } - ], - functions=[ - { - "description": "func desc", - "name": "some_func", - "parameters": {"param1": "string"}, - } - ], - default_settings={ - "frequency_penalty": 0.1, - "length_penalty": 0.9, - "presence_penalty": 0.8, - "repetition_penalty": 0.7, - "temperature": 0.6, - "top_p": 0.5, - }, - model="julep-ai/samantha-1-turbo", - docs=[ - { - "title": "some titie", - "content": "some content", - }, - ], - ) - except Exception: - assert True - else: - assert False - - -@test("update existing agent") -def _(client=client, existing_agent=agent): - response = client.agents.update( - agent_id=agent.id, - name="test user", - about="test user about", - instructions=["test agent instructions"], - default_settings={"temperature": 0.5}, - model="some model", - ) - - assert isinstance(response, ResourceUpdatedResponse) - assert response.updated_at != existing_agent.updated_at - assert response.id == existing_agent.id - - -@test("async update existing agent") -async def _(client=async_client, existing_agent=agent): - response = await client.agents.update( - agent_id=agent.id, - name="test user", - about="test user about", - instructions=["test agent instructions"], - default_settings={"temperature": 0.5}, - model="some model", - ) - - assert isinstance(response, ResourceUpdatedResponse) - assert response.updated_at != existing_agent.updated_at - assert response.id == existing_agent.id - - -@test("update non-existing agent") -def _(client=client): - try: - client.agents.update( - agent_id=uuid.uuid4(), - name="test user", - about="test user about", - instructions=["test agent instructions"], - default_settings={"temperature": 0.5}, - model="some model", - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("async update non-existing agent") -async def _(client=async_client): - try: - await client.agents.update( - agent_id=uuid.uuid4(), - name="test user", - about="test user about", - instructions=["test agent instructions"], - default_settings={"temperature": 0.5}, - model="some model", - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("delete existing agent") -def _(client=client, existing_agent=agent): - response = client.agents.delete( - existing_agent.id, - ) - - assert response is None - - -@test("async delete existing agent") -async def _(client=async_client, existing_agent=agent): - response = await client.agents.delete( - existing_agent.id, - ) - - assert response is None - - -@test("delete non-existing agent") -def _(client=client): - try: - client.agents.delete( - uuid.uuid4(), - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("async delete non-existing agent") -async def _(client=async_client): - try: - await client.agents.delete( - uuid.uuid4(), - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("get existing agent") -def _(client=client, existing_agent=agent): - response = client.agents.get(existing_agent.id) - assert isinstance(response, Agent) - assert response.id == existing_agent.id - - -@test("async get existing agent") -async def _(client=async_client, existing_agent=agent): - response = await client.agents.get(existing_agent.id) - assert isinstance(response, Agent) - assert response.id == existing_agent.id - - -@test("get non-existing agent") -def _(client=client): - try: - client.agents.get(uuid.uuid4()) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("async get non-existing agent") -async def _(client=async_client): - try: - await client.agents.get(uuid.uuid4()) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("list agents") -def _(client=client, existing_agent=agent): - response = client.agents.list() - assert len(response) > 0 - assert isinstance(response[0], Agent) - assert response[0].id == existing_agent.id - - -@test("async list agents") -async def _(client=async_client, existing_agent=agent): - response = await client.agents.list() - assert len(response) > 0 - assert isinstance(response[0], Agent) - assert response[0].id == existing_agent.id diff --git a/agents-api/tests/test_chat_routes.py b/agents-api/tests/test_chat_routes.py new file mode 100644 index 000000000..674703bfe --- /dev/null +++ b/agents-api/tests/test_chat_routes.py @@ -0,0 +1,156 @@ +# Tests for session queries + +from ward import test + +from agents_api.autogen.openapi_model import ChatInput, CreateSessionRequest +from agents_api.clients import embed, litellm +from agents_api.common.protocol.sessions import ChatContext +from agents_api.models.chat.gather_messages import gather_messages +from agents_api.models.chat.prepare_chat_context import prepare_chat_context +from agents_api.models.session.create_session import create_session +from tests.fixtures import ( + cozo_client, + make_request, + patch_embed_acompletion, + test_agent, + test_developer, + test_developer_id, + test_session, + test_tool, + test_user, +) + + +@test("chat: check that patching libs works") +async def _( + _=patch_embed_acompletion, +): + assert (await litellm.acompletion(model="gpt-4o", messages=[])).id == "fake_id" + assert (await embed.embed())[0][0] == 1.0 # pytype: disable=missing-parameter + + +@test("chat: check that non-recall gather_messages works") +async def _( + developer=test_developer, + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, + session=test_session, + tool=test_tool, + user=test_user, + mocks=patch_embed_acompletion, +): + (embed, _) = mocks + + chat_context = prepare_chat_context( + developer_id=developer_id, + session_id=session.id, + client=client, + ) + + session_id = session.id + + messages = [{"role": "user", "content": "hello"}] + + past_messages, doc_references = await gather_messages( + developer=developer, + session_id=session_id, + chat_context=chat_context, + chat_input=ChatInput(messages=messages, recall=False), + ) + + assert isinstance(past_messages, list) + assert len(past_messages) >= 0 + assert isinstance(doc_references, list) + assert len(doc_references) == 0 + + # Check that embed was not called + embed.assert_not_called() + + +@test("chat: check that gather_messages works") +async def _( + developer=test_developer, + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, + session=test_session, + tool=test_tool, + user=test_user, + mocks=patch_embed_acompletion, +): + (embed, _) = mocks + + chat_context = prepare_chat_context( + developer_id=developer_id, + session_id=session.id, + client=client, + ) + + session_id = session.id + + messages = [{"role": "user", "content": "hello"}] + + past_messages, doc_references = await gather_messages( + developer=developer, + session_id=session_id, + chat_context=chat_context, + chat_input=ChatInput(messages=messages, recall=True), + ) + + assert isinstance(past_messages, list) + assert isinstance(doc_references, list) + + # Check that embed was called at least once + embed.assert_called() + + +@test("chat: check that chat route calls both mocks") +async def _( + make_request=make_request, + developer_id=test_developer_id, + agent=test_agent, + mocks=patch_embed_acompletion, + client=cozo_client, +): + session = create_session( + developer_id=developer_id, + data=CreateSessionRequest( + agent=agent.id, + situation="test session about", + ), + client=client, + ) + + (embed, acompletion) = mocks + + response = make_request( + method="POST", + url=f"/sessions/{session.id}/chat", + json={"messages": [{"role": "user", "content": "hello"}]}, + ) + + response.raise_for_status() + + # Check that both mocks were called at least once + embed.assert_called() + acompletion.assert_called() + + +@test("model: prepare chat context") +def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, + session=test_session, + tool=test_tool, + user=test_user, +): + context = prepare_chat_context( + developer_id=developer_id, + session_id=session.id, + client=client, + ) + + assert isinstance(context, ChatContext) + assert len(context.toolsets) > 0 diff --git a/agents-api/tests/test_developer_queries.py b/agents-api/tests/test_developer_queries.py new file mode 100644 index 000000000..569733fa5 --- /dev/null +++ b/agents-api/tests/test_developer_queries.py @@ -0,0 +1,36 @@ +# Tests for agent queries +from uuid import uuid4 + +from ward import raises, test + +from agents_api.common.protocol.developers import Developer +from agents_api.models.developer.get_developer import get_developer, verify_developer +from tests.fixtures import cozo_client, test_developer_id + + +@test("model: get developer") +def _(client=cozo_client, developer_id=test_developer_id): + developer = get_developer( + developer_id=developer_id, + client=client, + ) + + assert isinstance(developer, Developer) + assert developer.id + + +@test("model: verify developer exists") +def _(client=cozo_client, developer_id=test_developer_id): + verify_developer( + developer_id=developer_id, + client=client, + ) + + +@test("model: verify developer not exists") +def _(client=cozo_client): + with raises(Exception): + verify_developer( + developer_id=uuid4(), + client=client, + ) diff --git a/agents-api/tests/test_docs_queries.py b/agents-api/tests/test_docs_queries.py new file mode 100644 index 000000000..fcf7f9bd6 --- /dev/null +++ b/agents-api/tests/test_docs_queries.py @@ -0,0 +1,154 @@ +# Tests for entry queries + +from ward import test + +from agents_api.autogen.openapi_model import CreateDocRequest +from agents_api.models.docs.create_doc import create_doc +from agents_api.models.docs.delete_doc import delete_doc +from agents_api.models.docs.embed_snippets import embed_snippets +from agents_api.models.docs.get_doc import get_doc +from agents_api.models.docs.list_docs import list_docs +from agents_api.models.docs.search_docs_by_embedding import search_docs_by_embedding +from agents_api.models.docs.search_docs_by_text import search_docs_by_text +from tests.fixtures import ( + EMBEDDING_SIZE, + cozo_client, + test_agent, + test_developer_id, + test_doc, + test_user, +) + + +@test("model: create docs") +def _( + client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user +): + create_doc( + developer_id=developer_id, + owner_type="agent", + owner_id=agent.id, + data=CreateDocRequest(title="Hello", content=["World"]), + client=client, + ) + + create_doc( + developer_id=developer_id, + owner_type="user", + owner_id=user.id, + data=CreateDocRequest(title="Hello", content=["World"]), + client=client, + ) + + +@test("model: get docs") +def _(client=cozo_client, doc=test_doc, developer_id=test_developer_id): + get_doc( + developer_id=developer_id, + doc_id=doc.id, + client=client, + ) + + +@test("model: delete doc") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + doc = create_doc( + developer_id=developer_id, + owner_type="agent", + owner_id=agent.id, + data=CreateDocRequest(title="Hello", content=["World"]), + client=client, + ) + + delete_doc( + developer_id=developer_id, + doc_id=doc.id, + owner_type="agent", + owner_id=agent.id, + client=client, + ) + + +@test("model: list docs") +def _( + client=cozo_client, developer_id=test_developer_id, doc=test_doc, agent=test_agent +): + result = list_docs( + developer_id=developer_id, + owner_type="agent", + owner_id=agent.id, + client=client, + ) + + assert len(result) >= 1 + + +@test("model: search docs by text") +def _(client=cozo_client, agent=test_agent, developer_id=test_developer_id): + create_doc( + developer_id=developer_id, + owner_type="agent", + owner_id=agent.id, + data=CreateDocRequest( + title="Hello", content=["The world is a funny little thing"] + ), + client=client, + ) + + result = search_docs_by_text( + developer_id=developer_id, + owners=[("agent", agent.id)], + query="funny", + client=client, + ) + + assert len(result) >= 1 + + +@test("model: search docs by embedding") +def _(client=cozo_client, agent=test_agent, developer_id=test_developer_id): + doc = create_doc( + developer_id=developer_id, + owner_type="agent", + owner_id=agent.id, + data=CreateDocRequest(title="Hello", content=["World"]), + client=client, + ) + + ### Add embedding to the snippet + embed_snippets( + developer_id=developer_id, + doc_id=doc.id, + snippet_indices=[0], + embeddings=[[1.0] * EMBEDDING_SIZE], + client=client, + ) + + ### Search + query_embedding = [0.99] * EMBEDDING_SIZE + + result = search_docs_by_embedding( + developer_id=developer_id, + owners=[("agent", agent.id)], + query_embedding=query_embedding, + client=client, + ) + + assert len(result) >= 1 + + +@test("model: embed snippets") +def _(client=cozo_client, developer_id=test_developer_id, doc=test_doc): + snippet_indices = [0] + embeddings = [[1.0] * EMBEDDING_SIZE] + + result = embed_snippets( + developer_id=developer_id, + doc_id=doc.id, + snippet_indices=snippet_indices, + embeddings=embeddings, + client=client, + ) + + assert result is not None + assert result.id == doc.id diff --git a/agents-api/tests/test_docs_routes.py b/agents-api/tests/test_docs_routes.py new file mode 100644 index 000000000..168211fc6 --- /dev/null +++ b/agents-api/tests/test_docs_routes.py @@ -0,0 +1,195 @@ +from ward import test + +from .fixtures import ( + make_request, + patch_embed_acompletion, + test_agent, + test_doc, + test_user, + test_user_doc, +) +from .utils import patch_testing_temporal + + +@test("route: create user doc") +async def _(make_request=make_request, user=test_user): + async with patch_testing_temporal(): + data = dict( + title="Test User Doc", + content=["This is a test user document."], + ) + + response = make_request( + method="POST", + url=f"/users/{user.id}/docs", + json=data, + ) + + assert response.status_code == 201 + + result = response.json() + assert len(result["jobs"]) > 0 + + +@test("route: create agent doc") +async def _(make_request=make_request, agent=test_agent): + async with patch_testing_temporal(): + data = dict( + title="Test Agent Doc", + content=["This is a test agent document."], + ) + + response = make_request( + method="POST", + url=f"/agents/{agent.id}/docs", + json=data, + ) + + assert response.status_code == 201 + + result = response.json() + assert len(result["jobs"]) > 0 + + +@test("route: delete doc") +async def _(make_request=make_request, agent=test_agent): + async with patch_testing_temporal(): + data = dict( + title="Test Agent Doc", + content=["This is a test agent document."], + ) + + response = make_request( + method="POST", + url=f"/agents/{agent.id}/docs", + json=data, + ) + doc_id = response.json()["id"] + + response = make_request( + method="DELETE", + url=f"/agents/{agent.id}/docs/{doc_id}", + ) + + assert response.status_code == 202 + + response = make_request( + method="GET", + url=f"/docs/{doc_id}", + ) + + assert response.status_code == 404 + + +@test("route: get doc") +async def _(make_request=make_request, agent=test_agent): + async with patch_testing_temporal(): + data = dict( + title="Test Agent Doc", + content=["This is a test agent document."], + ) + + response = make_request( + method="POST", + url=f"/agents/{agent.id}/docs", + json=data, + ) + doc_id = response.json()["id"] + + response = make_request( + method="GET", + url=f"/docs/{doc_id}", + ) + + assert response.status_code == 200 + + +@test("route: list user docs") +def _(make_request=make_request, user=test_user): + response = make_request( + method="GET", + url=f"/users/{user.id}/docs", + ) + + assert response.status_code == 200 + response = response.json() + docs = response["items"] + + assert isinstance(docs, list) + + +@test("route: list agent docs") +def _(make_request=make_request, agent=test_agent): + response = make_request( + method="GET", + url=f"/agents/{agent.id}/docs", + ) + + assert response.status_code == 200 + response = response.json() + docs = response["items"] + + assert isinstance(docs, list) + + +@test("route: search agent docs") +def _(make_request=make_request, agent=test_agent, doc=test_doc): + search_params = dict( + text=doc.content[0], + limit=1, + ) + + response = make_request( + method="POST", + url=f"/agents/{agent.id}/search", + json=search_params, + ) + + assert response.status_code == 200 + response = response.json() + docs = response["docs"] + + assert isinstance(docs, list) + assert len(docs) >= 1 + + +@test("route: search user docs") +def _(make_request=make_request, user=test_user, doc=test_user_doc): + search_params = dict( + text=doc.content[0], + limit=1, + ) + + response = make_request( + method="POST", + url=f"/users/{user.id}/search", + json=search_params, + ) + + assert response.status_code == 200 + response = response.json() + docs = response["docs"] + + assert isinstance(docs, list) + + # FIXME: This test is failing because the search is not returning the expected results + # assert len(docs) >= 1 + + +@test("routes: embed route") +async def _( + make_request=make_request, + mocks=patch_embed_acompletion, +): + (embed, _) = mocks + + response = make_request( + method="POST", + url="/embed", + json={"text": "blah blah"}, + ) + + result = response.json() + assert "vectors" in result + + embed.assert_called() diff --git a/agents-api/tests/test_entry_queries.py b/agents-api/tests/test_entry_queries.py new file mode 100644 index 000000000..c6b7150b6 --- /dev/null +++ b/agents-api/tests/test_entry_queries.py @@ -0,0 +1,201 @@ +""" +This module contains tests for entry queries against the CozoDB database. +It verifies the functionality of adding, retrieving, and processing entries as defined in the schema. +""" + +# Tests for entry queries + +import time + +from ward import test + +from agents_api.autogen.openapi_model import CreateEntryRequest +from agents_api.models.entry.create_entries import create_entries +from agents_api.models.entry.delete_entries import delete_entries +from agents_api.models.entry.get_history import get_history +from agents_api.models.entry.list_entries import list_entries +from agents_api.models.session.get_session import get_session +from tests.fixtures import cozo_client, test_developer_id, test_session + +MODEL = "gpt-4o" + + +@test("model: create entry") +def _(client=cozo_client, developer_id=test_developer_id, session=test_session): + """ + Tests the addition of a new entry to the database. + Verifies that the entry can be successfully added using the create_entries function. + """ + + test_entry = CreateEntryRequest.from_model_input( + model=MODEL, + role="user", + source="internal", + content="test entry content", + ) + + create_entries( + developer_id=developer_id, + session_id=session.id, + data=[test_entry], + mark_session_as_updated=False, + client=client, + ) + + +@test("model: create entry, update session") +def _(client=cozo_client, developer_id=test_developer_id, session=test_session): + """ + Tests the addition of a new entry to the database. + Verifies that the entry can be successfully added using the create_entries function. + """ + + test_entry = CreateEntryRequest.from_model_input( + model=MODEL, + role="user", + source="internal", + content="test entry content", + ) + + # FIXME: We should make sessions.updated_at also a updated_at_ms field to avoid this sleep + time.sleep(1) + + create_entries( + developer_id=developer_id, + session_id=session.id, + data=[test_entry], + mark_session_as_updated=True, + client=client, + ) + + updated_session = get_session( + developer_id=developer_id, + session_id=session.id, + client=client, + ) + + assert updated_session.updated_at > session.updated_at + + +@test("model: get entries") +def _(client=cozo_client, developer_id=test_developer_id, session=test_session): + """ + Tests the retrieval of entries from the database. + Verifies that entries matching specific criteria can be successfully retrieved. + """ + + test_entry = CreateEntryRequest.from_model_input( + model=MODEL, + role="user", + source="api_request", + content="test entry content", + ) + + internal_entry = CreateEntryRequest.from_model_input( + model=MODEL, + role="user", + content="test entry content", + source="internal", + ) + + create_entries( + developer_id=developer_id, + session_id=session.id, + data=[test_entry, internal_entry], + client=client, + ) + + result = list_entries( + developer_id=developer_id, + session_id=session.id, + client=client, + ) + + # Asserts that only one entry is retrieved, matching the session_id. + assert len(result) == 1 + + +@test("model: get history") +def _(client=cozo_client, developer_id=test_developer_id, session=test_session): + """ + Tests the retrieval of entries from the database. + Verifies that entries matching specific criteria can be successfully retrieved. + """ + + test_entry = CreateEntryRequest.from_model_input( + model=MODEL, + role="user", + source="api_request", + content="test entry content", + ) + + internal_entry = CreateEntryRequest.from_model_input( + model=MODEL, + role="user", + content="test entry content", + source="internal", + ) + + create_entries( + developer_id=developer_id, + session_id=session.id, + data=[test_entry, internal_entry], + client=client, + ) + + result = get_history( + developer_id=developer_id, + session_id=session.id, + client=client, + ) + + # Asserts that only one entry is retrieved, matching the session_id. + assert len(result.entries) > 0 + assert result.entries[0].id + + +@test("model: delete entries") +def _(client=cozo_client, developer_id=test_developer_id, session=test_session): + """ + Tests the deletion of entries from the database. + Verifies that entries can be successfully deleted using the delete_entries function. + """ + + test_entry = CreateEntryRequest.from_model_input( + model=MODEL, + role="user", + source="api_request", + content="test entry content", + ) + + internal_entry = CreateEntryRequest.from_model_input( + model=MODEL, + role="user", + content="internal entry content", + source="internal", + ) + + created_entries = create_entries( + developer_id=developer_id, + session_id=session.id, + data=[test_entry, internal_entry], + client=client, + ) + + entry_ids = [entry.id for entry in created_entries] + + delete_entries( + developer_id=developer_id, + session_id=session.id, + entry_ids=entry_ids, + client=client, + ) + + result = list_entries( + developer_id=developer_id, + session_id=session.id, + client=client, + ) + + # Asserts that no entries are retrieved after deletion. + assert all(id not in [entry.id for entry in result] for id in entry_ids) diff --git a/agents-api/tests/test_execution_queries.py b/agents-api/tests/test_execution_queries.py new file mode 100644 index 000000000..70fef5bb8 --- /dev/null +++ b/agents-api/tests/test_execution_queries.py @@ -0,0 +1,115 @@ +# Tests for execution queries + +from temporalio.client import WorkflowHandle +from ward import test + +from agents_api.autogen.Executions import ( + CreateExecutionRequest, +) +from agents_api.autogen.openapi_model import CreateTransitionRequest, Execution +from agents_api.models.execution.create_execution import create_execution +from agents_api.models.execution.create_execution_transition import ( + create_execution_transition, +) +from agents_api.models.execution.create_temporal_lookup import create_temporal_lookup +from agents_api.models.execution.get_execution import get_execution +from agents_api.models.execution.list_executions import list_executions +from tests.fixtures import cozo_client, test_developer_id, test_execution, test_task + +MODEL = "gpt-4o" + + +@test("model: create execution") +def _(client=cozo_client, developer_id=test_developer_id, task=test_task): + workflow_handle = WorkflowHandle( + client=None, + id="blah", + ) + + create_execution( + developer_id=developer_id, + task_id=task.id, + data=CreateExecutionRequest(input={"test": "test"}), + client=client, + ) + create_temporal_lookup( + developer_id=developer_id, + task_id=task.id, + workflow_handle=workflow_handle, + client=client, + ) + + +@test("model: get execution") +def _(client=cozo_client, developer_id=test_developer_id, execution=test_execution): + result = get_execution( + execution_id=execution.id, + client=client, + ) + + assert result is not None + assert isinstance(result, Execution) + assert result.status == "queued" + + +@test("model: list executions") +def _( + client=cozo_client, + developer_id=test_developer_id, + execution=test_execution, + task=test_task, +): + result = list_executions( + developer_id=developer_id, + task_id=task.id, + client=client, + ) + + assert isinstance(result, list) + assert len(result) >= 1 + assert result[0].status == "queued" + + +@test("model: create execution transition") +def _(client=cozo_client, developer_id=test_developer_id, execution=test_execution): + result = create_execution_transition( + developer_id=developer_id, + execution_id=execution.id, + data=CreateTransitionRequest( + type="step", + output={"result": "test"}, + current={"workflow": "main", "step": 0}, + next={"workflow": "main", "step": 1}, + ), + client=client, + ) + + assert result is not None + assert result.type == "step" + assert result.output == {"result": "test"} + + +@test("model: create execution transition with execution update") +def _( + client=cozo_client, + developer_id=test_developer_id, + task=test_task, + execution=test_execution, +): + result = create_execution_transition( + developer_id=developer_id, + execution_id=execution.id, + data=CreateTransitionRequest( + type="cancelled", + output={"result": "test"}, + current={"workflow": "main", "step": 0}, + next=None, + ), + task_id=task.id, + update_execution_status=True, + client=client, + ) + + assert result is not None + assert result.type == "cancelled" + assert result.output == {"result": "test"} diff --git a/agents-api/tests/test_execution_workflow.py b/agents-api/tests/test_execution_workflow.py new file mode 100644 index 000000000..2dd79e017 --- /dev/null +++ b/agents-api/tests/test_execution_workflow.py @@ -0,0 +1,760 @@ +# Tests for task queries + +import asyncio +from unittest.mock import patch + +import yaml +from google.protobuf.json_format import MessageToDict +from litellm.types.utils import Choices, ModelResponse +from ward import raises, test + +from agents_api.autogen.openapi_model import ( + CreateExecutionRequest, + CreateTaskRequest, +) +from agents_api.models.task.create_task import create_task +from agents_api.routers.tasks.create_task_execution import start_execution + +from .fixtures import cozo_client, test_agent, test_developer_id +from .utils import patch_testing_temporal + +EMBEDDING_SIZE: int = 1024 + + +@test("workflow: evaluate step single") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [{"evaluate": {"hello": '"world"'}}], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == "world" + + +@test("workflow: evaluate step multiple") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [ + {"evaluate": {"hello": '"nope"'}}, + {"evaluate": {"hello": '"world"'}}, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == "world" + + +@test("workflow: variable access in expressions") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [ + # Testing that we can access the input + {"evaluate": {"hello": '_["test"]'}}, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == data.input["test"] + + +@test("workflow: yield step") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "other_workflow": [ + # Testing that we can access the input + {"evaluate": {"hello": '_["test"]'}}, + ], + "main": [ + # Testing that we can access the input + { + "workflow": "other_workflow", + "arguments": {"test": '_["test"]'}, + }, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == data.input["test"] + + +@test("workflow: sleep step") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "other_workflow": [ + # Testing that we can access the input + {"evaluate": {"hello": '_["test"]'}}, + {"sleep": {"days": 5}}, + ], + "main": [ + # Testing that we can access the input + { + "workflow": "other_workflow", + "arguments": {"test": '_["test"]'}, + }, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == data.input["test"] + + +@test("workflow: return step") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "other_workflow": [ + # Testing that we can access the input + {"evaluate": {"hello": '_["test"]'}}, + {"return": {"value": '_["hello"]'}}, + {"return": {"value": '"banana"'}}, + ], + "main": [ + # Testing that we can access the input + { + "workflow": "other_workflow", + "arguments": {"test": '_["test"]'}, + }, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["value"] == data.input["test"] + + +@test("workflow: log step") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "other_workflow": [ + # Testing that we can access the input + {"evaluate": {"hello": '_["test"]'}}, + {"log": '_["hello"]'}, + ], + "main": [ + # Testing that we can access the input + { + "workflow": "other_workflow", + "arguments": {"test": '_["test"]'}, + }, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == data.input["test"] + + +@test("workflow: log step expression fail") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "other_workflow": [ + # Testing that we can access the input + {"evaluate": {"hello": '_["test"]'}}, + {"log": '_["hell"]'}, # <--- The "hell" key does not exist + ], + "main": [ + # Testing that we can access the input + { + "workflow": "other_workflow", + "arguments": {"test": '_["test"]'}, + }, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + with raises(BaseException): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == data.input["test"] + + +@test("workflow: wait for input step start") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [ + {"wait_for_input": {"info": {"hi": '"bye"'}}}, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + mock_run_task_execution_workflow.assert_called_once() + + # Let it run for a bit + await asyncio.sleep(1) + + # Get the history + history = await handle.fetch_history() + events = [MessageToDict(e) for e in history.events] + assert len(events) > 0 + + activities_scheduled = [ + event.get("activityTaskScheduledEventAttributes", {}) + .get("activityType", {}) + .get("name") + for event in events + if "ACTIVITY_TASK_SCHEDULED" in event["eventType"] + ] + activities_scheduled = [ + activity for activity in activities_scheduled if activity + ] + + assert "wait_for_input_step" in activities_scheduled + + +@test("workflow: if-else step") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [ + { + "if": "True", + "then": {"evaluate": {"hello": '"world"'}}, + "else": {"evaluate": {"hello": '"nope"'}}, + }, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == "world" + + +@test("workflow: switch step") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [ + { + "switch": [ + { + "case": "False", + "then": {"evaluate": {"hello": '"bubbles"'}}, + }, + { + "case": "True", + "then": {"evaluate": {"hello": '"world"'}}, + }, + { + "case": "True", + "then": {"evaluate": {"hello": '"bye"'}}, + }, + ] + }, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == "world" + + +@test("workflow: for each step") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [ + { + "foreach": { + "in": "'a b c'.split()", + "do": {"evaluate": {"hello": '"world"'}}, + }, + }, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["hello"] == "world" + + +@test("workflow: map reduce step") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + data = CreateExecutionRequest(input={"test": "input"}) + + map_step = { + "over": "'a b c'.split()", + "map": { + "evaluate": {"res": "_"}, + }, + } + + task_def = { + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [map_step], + } + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest(**task_def), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert [r["res"] for r in result] == ["a", "b", "c"] + + +@test("workflow: prompt step") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + mock_model_response = ModelResponse( + id="fake_id", + choices=[Choices(message={"role": "assistant", "content": "Hello, world!"})], + created=0, + object="text_completion", + ) + + with patch("agents_api.clients.litellm.acompletion") as acompletion: + acompletion.return_value = mock_model_response + data = CreateExecutionRequest(input={"test": "input"}) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [ + { + "prompt": [ + { + "role": "user", + "content": "message", + }, + ], + "settings": {}, + }, + ], + } + ), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + + mock_run_task_execution_workflow.assert_called_once() + + result = await handle.result() + assert result["content"] == "Hello, world!" + assert result["role"] == "assistant" + + +@test("workflow: execute yaml task") +async def _( + client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + mock_model_response = ModelResponse( + id="fake_id", + choices=[Choices(message={"role": "assistant", "content": "found: true\nvalue: 'Gaga'"})], + created=0, + object="text_completion", + ) + + with patch("agents_api.clients.litellm.acompletion") as acompletion, open( + "./tests/sample_tasks/find_selector.yaml", "r" + ) as task_file: + input = dict( + screenshot_base64="iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA", + network_requests=[{"request": {}, "response": {"body": "Lady Gaga"}}], + parameters=["name"], + ) + task_definition = yaml.safe_load(task_file) + acompletion.return_value = mock_model_response + data = CreateExecutionRequest(input=input) + + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest(**task_definition), + client=client, + ) + + async with patch_testing_temporal() as (_, mock_run_task_execution_workflow): + execution, handle = await start_execution( + developer_id=developer_id, + task_id=task.id, + data=data, + client=client, + ) + + assert handle is not None + assert execution.task_id == task.id + assert execution.input == data.input + + mock_run_task_execution_workflow.assert_called_once() + + await handle.result() diff --git a/agents-api/tests/test_messages_truncation.py b/agents-api/tests/test_messages_truncation.py index 2edd5610e..97516617a 100644 --- a/agents-api/tests/test_messages_truncation.py +++ b/agents-api/tests/test_messages_truncation.py @@ -1,316 +1,316 @@ -from uuid import uuid4 +# from uuid import uuid4 -from ward import raises, test +# from ward import raises, test -from agents_api.autogen.openapi_model import Role -from agents_api.common.protocol.entries import Entry -from agents_api.routers.sessions.exceptions import InputTooBigError -from tests.fixtures import base_session +# from agents_api.autogen.openapi_model import Role +# from agents_api.common.protocol.entries import Entry +# from agents_api.routers.sessions.exceptions import InputTooBigError +# from tests.fixtures import base_session -@test("truncate empty messages list", tags=["messages_truncate"]) -def _(session=base_session): - messages: list[Entry] = [] - result = session.truncate(messages, 10) +# @test("truncate empty messages list", tags=["messages_truncate"]) +# def _(session=base_session): +# messages: list[Entry] = [] +# result = session.truncate(messages, 10) - assert messages == result +# assert messages == result -@test("do not truncate", tags=["messages_truncate"]) -def _(session=base_session): - contents = [ - "content1", - "content2", - "content3", - ] - threshold = sum([len(c) // 3.5 for c in contents]) +# @test("do not truncate", tags=["messages_truncate"]) +# def _(session=base_session): +# contents = [ +# "content1", +# "content2", +# "content3", +# ] +# threshold = sum([len(c) // 3.5 for c in contents]) - messages: list[Entry] = [ - Entry(session_id=uuid4(), role=Role.user, content=contents[0][0]), - Entry(session_id=uuid4(), role=Role.assistant, content=contents[1][0]), - Entry(session_id=uuid4(), role=Role.user, content=contents[2][0]), - ] - result = session.truncate(messages, threshold) +# messages: list[Entry] = [ +# Entry(session_id=uuid4(), role=Role.user, content=contents[0][0]), +# Entry(session_id=uuid4(), role=Role.assistant, content=contents[1][0]), +# Entry(session_id=uuid4(), role=Role.user, content=contents[2][0]), +# ] +# result = session.truncate(messages, threshold) - assert messages == result +# assert messages == result -@test("truncate thoughts partially", tags=["messages_truncate"]) -def _(session=base_session): - contents = [ - ("content1", True), - ("content2", True), - ("content3", False), - ("content4", True), - ("content5", True), - ("content6", True), - ] - session_ids = [uuid4()] * len(contents) - threshold = sum([len(c) // 3.5 for c, i in contents if i]) +# @test("truncate thoughts partially", tags=["messages_truncate"]) +# def _(session=base_session): +# contents = [ +# ("content1", True), +# ("content2", True), +# ("content3", False), +# ("content4", True), +# ("content5", True), +# ("content6", True), +# ] +# session_ids = [uuid4()] * len(contents) +# threshold = sum([len(c) // 3.5 for c, i in contents if i]) - messages: list[Entry] = [ - Entry( - session_id=session_ids[0], - role=Role.system, - name="thought", - content=contents[0][0], - ), - Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), - Entry( - session_id=session_ids[2], - role=Role.system, - name="thought", - content=contents[2][0], - ), - Entry( - session_id=session_ids[3], - role=Role.system, - name="thought", - content=contents[3][0], - ), - Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), - Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), - ] - result = session.truncate(messages, threshold) - [ - messages[0], - messages[1], - messages[3], - messages[4], - messages[5], - ] +# messages: list[Entry] = [ +# Entry( +# session_id=session_ids[0], +# role=Role.system, +# name="thought", +# content=contents[0][0], +# ), +# Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), +# Entry( +# session_id=session_ids[2], +# role=Role.system, +# name="thought", +# content=contents[2][0], +# ), +# Entry( +# session_id=session_ids[3], +# role=Role.system, +# name="thought", +# content=contents[3][0], +# ), +# Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), +# Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), +# ] +# result = session.truncate(messages, threshold) +# [ +# messages[0], +# messages[1], +# messages[3], +# messages[4], +# messages[5], +# ] - assert result == [ - messages[0], - messages[1], - messages[3], - messages[4], - messages[5], - ] +# assert result == [ +# messages[0], +# messages[1], +# messages[3], +# messages[4], +# messages[5], +# ] -@test("truncate thoughts partially 2", tags=["messages_truncate"]) -def _(session=base_session): - contents = [ - ("content1", True), - ("content2", True), - ("content3", False), - ("content4", False), - ("content5", True), - ("content6", True), - ] - session_ids = [uuid4()] * len(contents) - threshold = sum([len(c) // 3.5 for c, i in contents if i]) +# @test("truncate thoughts partially 2", tags=["messages_truncate"]) +# def _(session=base_session): +# contents = [ +# ("content1", True), +# ("content2", True), +# ("content3", False), +# ("content4", False), +# ("content5", True), +# ("content6", True), +# ] +# session_ids = [uuid4()] * len(contents) +# threshold = sum([len(c) // 3.5 for c, i in contents if i]) - messages: list[Entry] = [ - Entry( - session_id=session_ids[0], - role=Role.system, - name="thought", - content=contents[0][0], - ), - Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), - Entry( - session_id=session_ids[2], - role=Role.system, - name="thought", - content=contents[2][0], - ), - Entry( - session_id=session_ids[3], - role=Role.system, - name="thought", - content=contents[3][0], - ), - Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), - Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), - ] - result = session.truncate(messages, threshold) +# messages: list[Entry] = [ +# Entry( +# session_id=session_ids[0], +# role=Role.system, +# name="thought", +# content=contents[0][0], +# ), +# Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), +# Entry( +# session_id=session_ids[2], +# role=Role.system, +# name="thought", +# content=contents[2][0], +# ), +# Entry( +# session_id=session_ids[3], +# role=Role.system, +# name="thought", +# content=contents[3][0], +# ), +# Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), +# Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), +# ] +# result = session.truncate(messages, threshold) - assert result == [ - messages[0], - messages[1], - messages[4], - messages[5], - ] +# assert result == [ +# messages[0], +# messages[1], +# messages[4], +# messages[5], +# ] -@test("truncate all thoughts", tags=["messages_truncate"]) -def _(session=base_session): - contents = [ - ("content1", False), - ("content2", True), - ("content3", False), - ("content4", False), - ("content5", True), - ("content6", True), - ("content7", False), - ] - session_ids = [uuid4()] * len(contents) - threshold = sum([len(c) // 3.5 for c, i in contents if i]) +# @test("truncate all thoughts", tags=["messages_truncate"]) +# def _(session=base_session): +# contents = [ +# ("content1", False), +# ("content2", True), +# ("content3", False), +# ("content4", False), +# ("content5", True), +# ("content6", True), +# ("content7", False), +# ] +# session_ids = [uuid4()] * len(contents) +# threshold = sum([len(c) // 3.5 for c, i in contents if i]) - messages: list[Entry] = [ - Entry( - session_id=session_ids[0], - role=Role.system, - name="thought", - content=contents[0][0], - ), - Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), - Entry( - session_id=session_ids[2], - role=Role.system, - name="thought", - content=contents[2][0], - ), - Entry( - session_id=session_ids[3], - role=Role.system, - name="thought", - content=contents[3][0], - ), - Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), - Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), - Entry( - session_id=session_ids[6], - role=Role.system, - name="thought", - content=contents[6][0], - ), - ] - result = session.truncate(messages, threshold) +# messages: list[Entry] = [ +# Entry( +# session_id=session_ids[0], +# role=Role.system, +# name="thought", +# content=contents[0][0], +# ), +# Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), +# Entry( +# session_id=session_ids[2], +# role=Role.system, +# name="thought", +# content=contents[2][0], +# ), +# Entry( +# session_id=session_ids[3], +# role=Role.system, +# name="thought", +# content=contents[3][0], +# ), +# Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), +# Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), +# Entry( +# session_id=session_ids[6], +# role=Role.system, +# name="thought", +# content=contents[6][0], +# ), +# ] +# result = session.truncate(messages, threshold) - assert result == [ - messages[1], - messages[4], - messages[5], - ] +# assert result == [ +# messages[1], +# messages[4], +# messages[5], +# ] -@test("truncate user assistant pairs", tags=["messages_truncate"]) -def _(session=base_session): - contents = [ - ("content1", False), - ("content2", True), - ("content3", False), - ("content4", False), - ("content5", True), - ("content6", True), - ("content7", True), - ("content8", False), - ("content9", True), - ("content10", True), - ("content11", True), - ("content12", True), - ("content13", False), - ] - session_ids = [uuid4()] * len(contents) - threshold = sum([len(c) // 3.5 for c, i in contents if i]) +# @test("truncate user assistant pairs", tags=["messages_truncate"]) +# def _(session=base_session): +# contents = [ +# ("content1", False), +# ("content2", True), +# ("content3", False), +# ("content4", False), +# ("content5", True), +# ("content6", True), +# ("content7", True), +# ("content8", False), +# ("content9", True), +# ("content10", True), +# ("content11", True), +# ("content12", True), +# ("content13", False), +# ] +# session_ids = [uuid4()] * len(contents) +# threshold = sum([len(c) // 3.5 for c, i in contents if i]) - messages: list[Entry] = [ - Entry( - session_id=session_ids[0], - role=Role.system, - name="thought", - content=contents[0][0], - ), - Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), - Entry( - session_id=session_ids[2], - role=Role.system, - name="thought", - content=contents[2][0], - ), - Entry( - session_id=session_ids[3], - role=Role.system, - name="thought", - content=contents[3][0], - ), - Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), - Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), - Entry(session_id=session_ids[6], role=Role.user, content=contents[6][0]), - Entry(session_id=session_ids[7], role=Role.assistant, content=contents[7][0]), - Entry(session_id=session_ids[8], role=Role.user, content=contents[8][0]), - Entry(session_id=session_ids[9], role=Role.assistant, content=contents[9][0]), - Entry(session_id=session_ids[10], role=Role.user, content=contents[10][0]), - Entry(session_id=session_ids[11], role=Role.assistant, content=contents[11][0]), - Entry( - session_id=session_ids[12], - role=Role.system, - name="thought", - content=contents[12][0], - ), - ] +# messages: list[Entry] = [ +# Entry( +# session_id=session_ids[0], +# role=Role.system, +# name="thought", +# content=contents[0][0], +# ), +# Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), +# Entry( +# session_id=session_ids[2], +# role=Role.system, +# name="thought", +# content=contents[2][0], +# ), +# Entry( +# session_id=session_ids[3], +# role=Role.system, +# name="thought", +# content=contents[3][0], +# ), +# Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), +# Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), +# Entry(session_id=session_ids[6], role=Role.user, content=contents[6][0]), +# Entry(session_id=session_ids[7], role=Role.assistant, content=contents[7][0]), +# Entry(session_id=session_ids[8], role=Role.user, content=contents[8][0]), +# Entry(session_id=session_ids[9], role=Role.assistant, content=contents[9][0]), +# Entry(session_id=session_ids[10], role=Role.user, content=contents[10][0]), +# Entry(session_id=session_ids[11], role=Role.assistant, content=contents[11][0]), +# Entry( +# session_id=session_ids[12], +# role=Role.system, +# name="thought", +# content=contents[12][0], +# ), +# ] - result = session.truncate(messages, threshold) +# result = session.truncate(messages, threshold) - assert result == [ - messages[1], - messages[4], - messages[5], - messages[6], - messages[8], - messages[9], - messages[10], - messages[11], - ] +# assert result == [ +# messages[1], +# messages[4], +# messages[5], +# messages[6], +# messages[8], +# messages[9], +# messages[10], +# messages[11], +# ] -@test("unable to truncate", tags=["messages_truncate"]) -def _(session=base_session): - contents = [ - ("content1", False), - ("content2", True), - ("content3", False), - ("content4", False), - ("content5", False), - ("content6", False), - ("content7", True), - ("content8", False), - ("content9", True), - ("content10", False), - ] - session_ids = [uuid4()] * len(contents) - threshold = sum([len(c) // 3.5 for c, i in contents if i]) - all_tokens = sum([len(c) // 3.5 for c, _ in contents]) +# @test("unable to truncate", tags=["messages_truncate"]) +# def _(session=base_session): +# contents = [ +# ("content1", False), +# ("content2", True), +# ("content3", False), +# ("content4", False), +# ("content5", False), +# ("content6", False), +# ("content7", True), +# ("content8", False), +# ("content9", True), +# ("content10", False), +# ] +# session_ids = [uuid4()] * len(contents) +# threshold = sum([len(c) // 3.5 for c, i in contents if i]) +# all_tokens = sum([len(c) // 3.5 for c, _ in contents]) - messages: list[Entry] = [ - Entry( - session_id=session_ids[0], - role=Role.system, - name="thought", - content=contents[0][0], - ), - Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), - Entry( - session_id=session_ids[2], - role=Role.system, - name="thought", - content=contents[2][0], - ), - Entry( - session_id=session_ids[3], - role=Role.system, - name="thought", - content=contents[3][0], - ), - Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), - Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), - Entry(session_id=session_ids[6], role=Role.user, content=contents[6][0]), - Entry(session_id=session_ids[7], role=Role.assistant, content=contents[7][0]), - Entry(session_id=session_ids[8], role=Role.user, content=contents[8][0]), - Entry( - session_id=session_ids[9], - role=Role.system, - name="thought", - content=contents[9][0], - ), - ] - with raises(InputTooBigError) as ex: - session.truncate(messages, threshold) +# messages: list[Entry] = [ +# Entry( +# session_id=session_ids[0], +# role=Role.system, +# name="thought", +# content=contents[0][0], +# ), +# Entry(session_id=session_ids[1], role=Role.assistant, content=contents[1][0]), +# Entry( +# session_id=session_ids[2], +# role=Role.system, +# name="thought", +# content=contents[2][0], +# ), +# Entry( +# session_id=session_ids[3], +# role=Role.system, +# name="thought", +# content=contents[3][0], +# ), +# Entry(session_id=session_ids[4], role=Role.user, content=contents[4][0]), +# Entry(session_id=session_ids[5], role=Role.assistant, content=contents[5][0]), +# Entry(session_id=session_ids[6], role=Role.user, content=contents[6][0]), +# Entry(session_id=session_ids[7], role=Role.assistant, content=contents[7][0]), +# Entry(session_id=session_ids[8], role=Role.user, content=contents[8][0]), +# Entry( +# session_id=session_ids[9], +# role=Role.system, +# name="thought", +# content=contents[9][0], +# ), +# ] +# with raises(InputTooBigError) as ex: +# session.truncate(messages, threshold) - assert ( - str(ex.raised) - == f"input is too big, {threshold} tokens required, but you got {all_tokens} tokens" - ) +# assert ( +# str(ex.raised) +# == f"input is too big, {threshold} tokens required, but you got {all_tokens} tokens" +# ) diff --git a/agents-api/tests/test_session_queries.py b/agents-api/tests/test_session_queries.py new file mode 100644 index 000000000..7eae8485f --- /dev/null +++ b/agents-api/tests/test_session_queries.py @@ -0,0 +1,145 @@ +# Tests for session queries +from uuid import uuid4 + +from ward import test + +from agents_api.autogen.openapi_model import CreateOrUpdateSessionRequest, Session +from agents_api.autogen.Sessions import CreateSessionRequest +from agents_api.models.session.create_or_update_session import create_or_update_session +from agents_api.models.session.create_session import create_session +from agents_api.models.session.delete_session import delete_session +from agents_api.models.session.get_session import get_session +from agents_api.models.session.list_sessions import list_sessions +from tests.fixtures import ( + cozo_client, + test_agent, + test_developer_id, + test_session, + test_user, +) + +MODEL = "gpt-4o" + + +@test("model: create session") +def _( + client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user +): + create_session( + developer_id=developer_id, + data=CreateSessionRequest( + users=[user.id], + agents=[agent.id], + situation="test session about", + ), + client=client, + ) + + +@test("model: create session no user") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + create_session( + developer_id=developer_id, + data=CreateSessionRequest( + agents=[agent.id], + situation="test session about", + ), + client=client, + ) + + +@test("model: get session not exists") +def _(client=cozo_client, developer_id=test_developer_id): + session_id = uuid4() + + try: + get_session( + session_id=session_id, + developer_id=developer_id, + client=client, + ) + except Exception: + pass + else: + assert False, "Session should not exist" + + +@test("model: get session exists") +def _(client=cozo_client, developer_id=test_developer_id, session=test_session): + result = get_session( + session_id=session.id, + developer_id=developer_id, + client=client, + ) + + assert result is not None + assert isinstance(result, Session) + + +@test("model: delete session") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + session = create_session( + developer_id=developer_id, + data=CreateSessionRequest( + agent=agent.id, + situation="test session about", + ), + client=client, + ) + + delete_session( + session_id=session.id, + developer_id=developer_id, + client=client, + ) + + try: + get_session( + session_id=session.id, + developer_id=developer_id, + client=client, + ) + except Exception: + pass + + else: + assert False, "Session should not exist" + + +@test("model: list sessions") +def _(client=cozo_client, developer_id=test_developer_id, session=test_session): + result = list_sessions( + developer_id=developer_id, + client=client, + ) + + assert isinstance(result, list) + assert len(result) > 0 + + +@test("model: create or update session") +def _( + client=cozo_client, developer_id=test_developer_id, agent=test_agent, user=test_user +): + session_id = uuid4() + + create_or_update_session( + session_id=session_id, + developer_id=developer_id, + data=CreateOrUpdateSessionRequest( + users=[user.id], + agents=[agent.id], + situation="test session about", + ), + client=client, + ) + + result = get_session( + session_id=session_id, + developer_id=developer_id, + client=client, + ) + + assert result is not None + assert isinstance(result, Session) + assert result.id == session_id diff --git a/agents-api/tests/test_sessions.py b/agents-api/tests/test_sessions.py deleted file mode 100644 index 74ad0151c..000000000 --- a/agents-api/tests/test_sessions.py +++ /dev/null @@ -1,307 +0,0 @@ -import uuid - -from julep.api import ( - ChatMlMessage, - ChatResponse, - ChatSettingsResponseFormat, - ChatSettingsResponseFormatType, - InputChatMlMessage, - InputChatMlMessageRole, - ResourceCreatedResponse, - ResourceUpdatedResponse, - Session, - Suggestion, - Tool, - ToolChoiceOption, -) -from julep.api.core import ApiError -from ward import test - -from tests.fixtures import agent, async_client, client, session, user - - -@test("get existing session") -def _(existing_session=session, client=client): - response = client.sessions.get(id=existing_session.id) - - assert isinstance(response, Session) - assert response.id == existing_session.id - - -@test("async get existing sessions") -async def _(existing_session=session, client=async_client): - response = await client.sessions.get(id=existing_session.id) - - assert isinstance(response, Session) - assert response.id == existing_session.id - - -@test("get non-existing session") -def _(client=client): - try: - client.sessions.get(id=uuid.uuid4()) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("async get non-existing sessions") -async def _(existing_session=session, client=async_client): - try: - await client.sessions.get(id=uuid.uuid4()) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("create sessions") -def _(user=user, agent=agent, client=client): - response = client.sessions.create( - user_id=user.id, - agent_id=agent.id, - situation="test situation", - ) - - assert isinstance(response, ResourceCreatedResponse) - assert response.created_at - bool(uuid.UUID(str(response.id), version=4)) - - -@test("async create sessions") -async def _(user=user, agent=agent, client=async_client): - response = await client.sessions.create( - user_id=user.id, - agent_id=agent.id, - situation="test situation", - ) - - assert isinstance(response, ResourceCreatedResponse) - assert response.created_at - bool(uuid.UUID(str(response.id), version=4)) - - -@test("list sessions") -def _(existing_session=session, client=client): - response = client.sessions.list() - - assert len(response) > 0 - assert isinstance(response[0], Session) - assert response[0].id == existing_session.id - - -@test("async list sessions") -async def _(existing_session=session, client=async_client): - response = await client.sessions.list() - - assert len(response) > 0 - assert isinstance(response[0], Session) - assert response[0].id == existing_session.id - - -@test("update existing session") -def _(existing_session=session, client=client): - response = client.sessions.update( - session_id=existing_session.id, - situation="test situation", - ) - - assert isinstance(response, ResourceUpdatedResponse) - assert response.updated_at - assert response.updated_at != existing_session.updated_at - assert response.id == existing_session.id - - -@test("async update existing session") -async def _(existing_session=session, client=async_client): - response = await client.sessions.update( - session_id=existing_session.id, - situation="test situation", - ) - - assert isinstance(response, ResourceUpdatedResponse) - assert response.updated_at - assert response.updated_at != existing_session.updated_at - assert response.id == existing_session.id - - -@test("update non-existing session") -def _(client=client): - try: - client.sessions.update( - session_id=uuid.uuid4(), - situation="test situation", - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("async update non-existing session") -async def _(client=async_client): - try: - await client.sessions.update( - session_id=uuid.uuid4(), - situation="test situation", - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("delete existing sessions") -def _(existing_session=session, client=client): - response = client.sessions.delete( - session_id=existing_session.id, - ) - - assert response is None - - -@test("async delete existing sessions") -async def _(existing_session=session, client=client): - response = await client.sessions.delete( - session_id=existing_session.id, - ) - - assert response is None - - -# TODO: implement below tests properly -@test("sessions.chat") -def _(client=client): - response = client.sessions.chat( - session_id=str(uuid.uuid4()), - messages=[ - InputChatMlMessage( - role=InputChatMlMessageRole.USER, - content="test content", - name="tets name", - ) - ], - tools=[ - Tool( - **{ - "type": "function", - "function": { - "description": "test description", - "name": "test name", - "parameters": {"test_arg": "test val"}, - }, - "id": str(uuid.uuid4()), - }, - ) - ], - tool_choice=ToolChoiceOption("auto"), - frequency_penalty=0.5, - length_penalty=0.5, - logit_bias={"test": 1}, - max_tokens=120, - presence_penalty=0.5, - repetition_penalty=0.5, - response_format=ChatSettingsResponseFormat( - type=ChatSettingsResponseFormatType.TEXT, - ), - seed=1, - stop=["<"], - stream=False, - temperature=0.7, - top_p=0.9, - recall=False, - remember=False, - ) - - assert isinstance(response, ChatResponse) - - -@test("async sessions.chat") -async def _(client=async_client): - response = await client.sessions.chat( - session_id=str(uuid.uuid4()), - messages=[ - InputChatMlMessage( - role=InputChatMlMessageRole.USER, - content="test content", - name="tets name", - ) - ], - tools=[ - Tool( - **{ - "type": "function", - "function": { - "description": "test description", - "name": "test name", - "parameters": {"test_arg": "test val"}, - }, - "id": str(uuid.uuid4()), - }, - ) - ], - tool_choice=ToolChoiceOption("auto"), - frequency_penalty=0.5, - length_penalty=0.5, - logit_bias={"test": 1}, - max_tokens=120, - presence_penalty=0.5, - repetition_penalty=0.5, - response_format=ChatSettingsResponseFormat( - type=ChatSettingsResponseFormatType.TEXT, - ), - seed=1, - stop=["<"], - stream=False, - temperature=0.7, - top_p=0.9, - recall=False, - remember=False, - ) - - assert isinstance(response, ChatResponse) - - -@test("sessions.suggestions") -def _(client=client): - response = client.sessions.suggestions( - session_id=uuid.uuid4(), - ) - assert len(response) > 0 - assert isinstance(response[0], Suggestion) - - -@test("async sessions.suggestions") -async def _(client=async_client): - response = await client.sessions.suggestions( - session_id=uuid.uuid4(), - ) - assert len(response) > 0 - assert isinstance(response[0], Suggestion) - - -@test("sessions.history") -def _(client=client): - response = client.sessions.history( - session_id=uuid.uuid4(), - ) - assert len(response) > 0 - assert isinstance(response[0], ChatMlMessage) - - -@test("async sessions.list") -async def _(client=async_client): - response = await client.sessions.history( - session_id=uuid.uuid4(), - ) - assert len(response) > 0 - assert isinstance(response[0], ChatMlMessage) diff --git a/agents-api/tests/test_task_queries.py b/agents-api/tests/test_task_queries.py new file mode 100644 index 000000000..cd9b5b612 --- /dev/null +++ b/agents-api/tests/test_task_queries.py @@ -0,0 +1,161 @@ +# Tests for task queries +from uuid import uuid4 + +from ward import test + +from agents_api.autogen.openapi_model import ( + CreateTaskRequest, + ResourceUpdatedResponse, + Task, + UpdateTaskRequest, +) +from agents_api.models.task.create_or_update_task import create_or_update_task +from agents_api.models.task.create_task import create_task +from agents_api.models.task.delete_task import delete_task +from agents_api.models.task.get_task import get_task +from agents_api.models.task.list_tasks import list_tasks +from agents_api.models.task.update_task import update_task + +from .fixtures import cozo_client, test_agent, test_developer_id, test_task + + +@test("model: create task") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + task_id = uuid4() + + create_task( + developer_id=developer_id, + agent_id=agent.id, + task_id=task_id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [], + } + ), + client=client, + ) + + +@test("model: create or update task") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + task_id = uuid4() + + create_or_update_task( + developer_id=developer_id, + agent_id=agent.id, + task_id=task_id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [], + } + ), + client=client, + ) + + +@test("model: get task not exists") +def _(client=cozo_client, developer_id=test_developer_id): + task_id = uuid4() + + try: + get_task( + developer_id=developer_id, + task_id=task_id, + client=client, + ) + except Exception: + pass + else: + assert False, "Task should not exist" + + +@test("model: get task exists") +def _(client=cozo_client, developer_id=test_developer_id, task=test_task): + result = get_task( + developer_id=developer_id, + task_id=task.id, + client=client, + ) + + assert result is not None + assert isinstance(result, Task) + + +@test("model: delete task") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + task = create_task( + developer_id=developer_id, + agent_id=agent.id, + data=CreateTaskRequest( + **{ + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [], + } + ), + client=client, + ) + + delete_task( + developer_id=developer_id, + agent_id=agent.id, + task_id=task.id, + client=client, + ) + + try: + get_task( + developer_id=developer_id, + task_id=task.id, + client=client, + ) + except Exception: + pass + + else: + assert False, "Task should not exist" + + +@test("model: update task") +def _( + client=cozo_client, developer_id=test_developer_id, agent=test_agent, task=test_task +): + result = update_task( + developer_id=developer_id, + task_id=task.id, + agent_id=agent.id, + data=UpdateTaskRequest( + **{ + "name": "updated task", + "description": "updated task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [], + } + ), + client=client, + ) + + assert result is not None + assert isinstance(result, ResourceUpdatedResponse) + + +@test("model: list tasks") +def _( + client=cozo_client, developer_id=test_developer_id, task=test_task, agent=test_agent +): + result = list_tasks( + developer_id=developer_id, + agent_id=agent.id, + client=client, + ) + + assert isinstance(result, list) + assert len(result) > 0 + assert all(isinstance(task, Task) for task in result) diff --git a/agents-api/tests/test_task_routes.py b/agents-api/tests/test_task_routes.py new file mode 100644 index 000000000..092a8a0f1 --- /dev/null +++ b/agents-api/tests/test_task_routes.py @@ -0,0 +1,195 @@ +# Tests for task routes + +from uuid import uuid4 + +from ward import test + +from .fixtures import ( + client, + make_request, + test_agent, + test_execution, + test_task, + test_transition, +) +from .utils import patch_testing_temporal + + +@test("route: unauthorized should fail") +def _(client=client, agent=test_agent): + data = dict( + name="test user", + main=[ + { + "kind_": "evaluate", + "evaluate": { + "additionalProp1": "value1", + }, + } + ], + ) + + response = client.request( + method="POST", + url=f"/agents/{str(agent.id)}/tasks", + data=data, + ) + + assert response.status_code == 403 + + +@test("route: create task") +def _(make_request=make_request, agent=test_agent): + data = dict( + name="test user", + main=[ + { + "kind_": "evaluate", + "evaluate": { + "additionalProp1": "value1", + }, + } + ], + ) + + response = make_request( + method="POST", + url=f"/agents/{str(agent.id)}/tasks", + json=data, + ) + + assert response.status_code == 201 + + +@test("route: create task execution") +async def _(make_request=make_request, task=test_task): + data = dict( + input={}, + metadata={}, + ) + + async with patch_testing_temporal(): + response = make_request( + method="POST", + url=f"/tasks/{str(task.id)}/executions", + json=data, + ) + + assert response.status_code == 201 + + +@test("route: get execution not exists") +def _(make_request=make_request): + execution_id = str(uuid4()) + + response = make_request( + method="GET", + url=f"/executions/{execution_id}", + ) + + assert response.status_code == 404 + + +@test("route: get execution exists") +def _(make_request=make_request, execution=test_execution): + response = make_request( + method="GET", + url=f"/executions/{str(execution.id)}", + ) + + assert response.status_code == 200 + + +@test("route: get task not exists") +def _(make_request=make_request): + task_id = str(uuid4()) + + response = make_request( + method="GET", + url=f"/tasks/{task_id}", + ) + + assert response.status_code == 400 + + +@test("route: get task exists") +def _(make_request=make_request, task=test_task): + response = make_request( + method="GET", + url=f"/tasks/{str(task.id)}", + ) + + assert response.status_code == 200 + + +# FIXME: This test is failing +@test("model: list execution transitions") +def _(make_request=make_request, execution=test_execution, transition=test_transition): + response = make_request( + method="GET", + url=f"/executions/{str(execution.id)}/transitions", + ) + + assert response.status_code == 200 + response = response.json() + transitions = response["items"] + + assert isinstance(transitions, list) + assert len(transitions) > 0 + + +@test("model: list task executions") +def _(make_request=make_request, execution=test_execution): + response = make_request( + method="GET", + url=f"/tasks/{str(execution.task_id)}/executions", + ) + + assert response.status_code == 200 + response = response.json() + executions = response["items"] + + assert isinstance(executions, list) + assert len(executions) > 0 + + +@test("model: list tasks") +def _(make_request=make_request, agent=test_agent): + response = make_request( + method="GET", + url=f"/agents/{str(agent.id)}/tasks", + ) + + assert response.status_code == 200 + response = response.json() + tasks = response["items"] + + assert isinstance(tasks, list) + assert len(tasks) > 0 + + +@test("model: patch execution") +def _(make_request=make_request, execution=test_execution): + data = dict( + status="running", + ) + + response = make_request( + method="PATCH", + url=f"/tasks/{str(execution.task_id)}/executions/{str(execution.id)}", + json=data, + ) + + assert response.status_code == 200 + + execution_id = response.json()["id"] + + response = make_request( + method="GET", + url=f"/executions/{execution_id}", + ) + + assert response.status_code == 200 + execution = response.json() + + assert execution["status"] == "running" diff --git a/agents-api/tests/test_tasks.py b/agents-api/tests/test_tasks.py deleted file mode 100644 index 24562282b..000000000 --- a/agents-api/tests/test_tasks.py +++ /dev/null @@ -1,159 +0,0 @@ -import uuid -from typing import List - -from julep.api.types import Execution, Task -from ward import test - -from tests.fixtures import agent, async_client, client, task - - -@test("create task") -def _(client=client, agent=agent): - task = client.tasks.create( - agent_id=agent.id, - name="task1", - description="task 1", - tools_available=["tool1"], - input_schema={}, - main=[], - ) - - assert isinstance(task, Task) - assert task.created_at - assert bool(uuid.UUID(str(task.id), version=4)) - - assert task.agent_id == agent.id - assert task.name == "task1" - assert task.description == "task 1" - assert task.tools_available == ["tool1"] - assert task.input_schema == {} - assert task.main == [] - - -@test("get task") -def _(client=client, agent=agent, task=task): - task = client.tasks.get( - agent_id=agent.id, - task_id=task.id, - ) - - assert isinstance(task, Task) - assert task.created_at - assert bool(uuid.UUID(str(task.id), version=4)) - - assert task.agent_id == agent.id - assert task.name == "task1" - assert task.description == "task 1" - assert task.tools_available == ["tool1"] - assert task.input_schema == {} - assert task.main == [] - - -@test("list task") -def _(client=client, agent=agent): - tasks = client.tasks.list( - agent_id=agent.id, - ) - - assert isinstance(tasks, List[Task]) - assert len(tasks) > 0 - - task = tasks[0] - - assert task.created_at - assert bool(uuid.UUID(str(task.id), version=4)) - - assert task.agent_id == agent.id - assert task.name == "task1" - assert task.description == "task 1" - assert task.tools_available == ["tool1"] - assert task.input_schema == {} - assert task.main == [] - - -@test("start task execution") -def _(client=client, agent=agent, task=task): - execution = client.tasks.start_task_execution( - agent_id=agent.id, - task_id=task.id, - arguments={}, - status="enqueued", - ) - - assert isinstance(execution, Execution) - - -@test("create task") -async def _(client=async_client, agent=agent): - task = await client.tasks.create( - agent_id=agent.id, - name="task1", - description="task 1", - tools_available=["tool1"], - input_schema={}, - main=[], - ) - - assert isinstance(task, Task) - assert task.created_at - assert bool(uuid.UUID(str(task.id), version=4)) - - assert task.agent_id == agent.id - assert task.name == "task1" - assert task.description == "task 1" - assert task.tools_available == ["tool1"] - assert task.input_schema == {} - assert task.main == [] - - -@test("get task") -async def _(client=async_client, agent=agent, task=task): - task = await client.tasks.get( - agent_id=agent.id, - task_id=task.id, - ) - - assert isinstance(task, Task) - assert task.created_at - assert bool(uuid.UUID(str(task.id), version=4)) - - assert task.agent_id == agent.id - assert task.name == "task1" - assert task.description == "task 1" - assert task.tools_available == ["tool1"] - assert task.input_schema == {} - assert task.main == [] - - -@test("list task") -async def _(client=async_client, agent=agent): - tasks = await client.tasks.list( - agent_id=agent.id, - ) - - assert isinstance(tasks, List[Task]) - assert len(tasks) > 0 - - task = tasks[0] - - assert task.created_at - assert bool(uuid.UUID(str(task.id), version=4)) - - assert task.agent_id == agent.id - assert task.name == "task1" - assert task.description == "task 1" - assert task.tools_available == ["tool1"] - assert task.input_schema == {} - assert task.main == [] - - -@test("start task execution") -async def _(client=async_client, agent=agent, task=task): - execution = await client.tasks.start_task_execution( - agent_id=agent.id, - task_id=task.id, - arguments={}, - status="enqueued", - ) - - assert isinstance(execution, Execution) diff --git a/agents-api/tests/test_tool_queries.py b/agents-api/tests/test_tool_queries.py new file mode 100644 index 000000000..c21b7fbfb --- /dev/null +++ b/agents-api/tests/test_tool_queries.py @@ -0,0 +1,169 @@ +# Tests for tool queries + +from ward import test + +from agents_api.autogen.openapi_model import ( + CreateToolRequest, + PatchToolRequest, + Tool, + UpdateToolRequest, +) +from agents_api.models.tools.create_tools import create_tools +from agents_api.models.tools.delete_tool import delete_tool +from agents_api.models.tools.get_tool import get_tool +from agents_api.models.tools.list_tools import list_tools +from agents_api.models.tools.patch_tool import patch_tool +from agents_api.models.tools.update_tool import update_tool +from tests.fixtures import cozo_client, test_agent, test_developer_id, test_tool + + +@test("model: create tool") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + function = { + "name": "hello_world", + "description": "A function that prints hello world", + "parameters": {"type": "object", "properties": {}}, + } + + tool = { + "function": function, + "name": "hello_world", + "type": "function", + } + + result = create_tools( + developer_id=developer_id, + agent_id=agent.id, + data=[CreateToolRequest(**tool)], + client=client, + ) + + assert result is not None + assert isinstance(result[0], Tool) + + +@test("model: delete tool") +def _(client=cozo_client, developer_id=test_developer_id, agent=test_agent): + function = { + "name": "temp_temp", + "description": "A function that prints hello world", + "parameters": {"type": "object", "properties": {}}, + } + + tool = { + "function": function, + "name": "temp_temp", + "type": "function", + } + + [tool, *_] = create_tools( + developer_id=developer_id, + agent_id=agent.id, + data=[CreateToolRequest(**tool)], + client=client, + ) + + result = delete_tool( + developer_id=developer_id, + agent_id=agent.id, + tool_id=tool.id, + client=client, + ) + + assert result is not None + + +@test("model: get tool") +def _( + client=cozo_client, developer_id=test_developer_id, tool=test_tool, agent=test_agent +): + result = get_tool( + developer_id=developer_id, + agent_id=agent.id, + tool_id=tool.id, + client=client, + ) + + assert result is not None + + +@test("model: list tools") +def _( + client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool +): + result = list_tools( + developer_id=developer_id, + agent_id=agent.id, + client=client, + ) + + assert result is not None + assert all(isinstance(tool, Tool) for tool in result) + + +@test("model: patch tool") +def _( + client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool +): + patch_data = PatchToolRequest( + **{ + "name": "patched_tool", + "function": { + "description": "A patched function that prints hello world", + }, + } + ) + + result = patch_tool( + developer_id=developer_id, + agent_id=agent.id, + tool_id=tool.id, + data=patch_data, + client=client, + ) + + assert result is not None + + tool = get_tool( + developer_id=developer_id, + agent_id=agent.id, + tool_id=tool.id, + client=client, + ) + + assert tool.name == "patched_tool" + assert tool.function.description == "A patched function that prints hello world" + assert tool.function.parameters + + +@test("model: update tool") +def _( + client=cozo_client, developer_id=test_developer_id, agent=test_agent, tool=test_tool +): + update_data = UpdateToolRequest( + name="updated_tool", + type="function", + function={ + "description": "An updated function that prints hello world", + }, + ) + + result = update_tool( + developer_id=developer_id, + agent_id=agent.id, + tool_id=tool.id, + data=update_data, + client=client, + ) + + assert result is not None + + tool = get_tool( + developer_id=developer_id, + agent_id=agent.id, + tool_id=tool.id, + client=client, + ) + + assert tool.name == "updated_tool" + assert not tool.function.parameters diff --git a/agents-api/tests/test_user_queries.py b/agents-api/tests/test_user_queries.py new file mode 100644 index 000000000..ab5c62ed0 --- /dev/null +++ b/agents-api/tests/test_user_queries.py @@ -0,0 +1,118 @@ +# This module contains tests for user-related queries against the 'cozodb' database. It includes tests for creating, updating, and retrieving user information. + +# Tests for user queries +from uuid import uuid4 + +from ward import test + +from agents_api.autogen.openapi_model import ( + CreateOrUpdateUserRequest, + CreateUserRequest, + ResourceUpdatedResponse, + UpdateUserRequest, + User, +) +from agents_api.models.user.create_or_update_user import create_or_update_user +from agents_api.models.user.create_user import create_user +from agents_api.models.user.get_user import get_user +from agents_api.models.user.list_users import list_users +from agents_api.models.user.update_user import update_user +from tests.fixtures import cozo_client, test_developer_id, test_user + + +@test("model: create user") +def _(client=cozo_client, developer_id=test_developer_id): + """Test that a user can be successfully created.""" + + create_user( + developer_id=developer_id, + data=CreateUserRequest( + name="test user", + about="test user about", + ), + client=client, + ) + + +@test("model: create or update user") +def _(client=cozo_client, developer_id=test_developer_id): + """Test that a user can be successfully created or updated.""" + + create_or_update_user( + developer_id=developer_id, + user_id=uuid4(), + data=CreateOrUpdateUserRequest( + name="test user", + about="test user about", + ), + client=client, + ) + + +@test("model: update user") +def _(client=cozo_client, developer_id=test_developer_id, user=test_user): + """Test that an existing user's information can be successfully updated.""" + + # Verify that the 'updated_at' timestamp is greater than the 'created_at' timestamp, indicating a successful update. + update_result = update_user( + user_id=user.id, + developer_id=developer_id, + data=UpdateUserRequest( + name="updated user", + about="updated user about", + ), + client=client, + ) + + assert update_result is not None + assert isinstance(update_result, ResourceUpdatedResponse) + assert update_result.updated_at > user.created_at + + +@test("model: get user not exists") +def _(client=cozo_client, developer_id=test_developer_id): + """Test that retrieving a non-existent user returns an empty result.""" + + user_id = uuid4() + + # Ensure that the query for an existing user returns exactly one result. + try: + get_user( + user_id=user_id, + developer_id=developer_id, + client=client, + ) + except Exception: + pass + else: + assert ( + False + ), "Expected an exception to be raised when retrieving a non-existent user." + + +@test("model: get user exists") +def _(client=cozo_client, developer_id=test_developer_id, user=test_user): + """Test that retrieving an existing user returns the correct user information.""" + + result = get_user( + user_id=user.id, + developer_id=developer_id, + client=client, + ) + + assert result is not None + assert isinstance(result, User) + + +@test("model: list users") +def _(client=cozo_client, developer_id=test_developer_id, user=test_user): + """Test that listing users returns a collection of user information.""" + + result = list_users( + developer_id=developer_id, + client=client, + ) + + assert isinstance(result, list) + assert len(result) >= 1 + assert all(isinstance(user, User) for user in result) diff --git a/agents-api/tests/test_user_routes.py b/agents-api/tests/test_user_routes.py new file mode 100644 index 000000000..88f14d85a --- /dev/null +++ b/agents-api/tests/test_user_routes.py @@ -0,0 +1,167 @@ +# Tests for user routes +from uuid import uuid4 + +from ward import test + +from tests.fixtures import client, make_request, test_user + + +@test("route: unauthorized should fail") +def _(client=client): + data = dict( + name="test user", + about="test user about", + ) + + response = client.request( + method="POST", + url="/users", + data=data, + ) + + assert response.status_code == 403 + + +@test("route: create user") +def _(make_request=make_request): + data = dict( + name="test user", + about="test user about", + ) + + response = make_request( + method="POST", + url="/users", + json=data, + ) + + assert response.status_code == 201 + + +@test("route: get user not exists") +def _(make_request=make_request): + user_id = str(uuid4()) + + response = make_request( + method="GET", + url=f"/users/{user_id}", + ) + + assert response.status_code == 404 + + +@test("route: get user exists") +def _(make_request=make_request, user=test_user): + user_id = str(user.id) + + response = make_request( + method="GET", + url=f"/users/{user_id}", + ) + + assert response.status_code != 404 + + +@test("route: delete user") +def _(make_request=make_request): + data = dict( + name="test user", + about="test user about", + ) + + response = make_request( + method="POST", + url="/users", + json=data, + ) + user_id = response.json()["id"] + + response = make_request( + method="DELETE", + url=f"/users/{user_id}", + ) + + assert response.status_code == 202 + + response = make_request( + method="GET", + url=f"/users/{user_id}", + ) + + assert response.status_code == 404 + + +@test("route: update user") +def _(make_request=make_request, user=test_user): + data = dict( + name="updated user", + about="updated user about", + ) + + user_id = str(user.id) + response = make_request( + method="PUT", + url=f"/users/{user_id}", + json=data, + ) + + assert response.status_code == 200 + + user_id = response.json()["id"] + + response = make_request( + method="GET", + url=f"/users/{user_id}", + ) + + assert response.status_code == 200 + user = response.json() + + assert user["name"] == "updated user" + assert user["about"] == "updated user about" + + +@test("model: patch user") +def _(make_request=make_request, user=test_user): + user_id = str(user.id) + + data = dict( + name="patched user", + about="patched user about", + ) + + response = make_request( + method="PATCH", + url=f"/users/{user_id}", + json=data, + ) + + assert response.status_code == 200 + + user_id = response.json()["id"] + + response = make_request( + method="GET", + url=f"/users/{user_id}", + ) + + assert response.status_code == 200 + user = response.json() + + assert user["name"] == "patched user" + assert user["about"] == "patched user about" + + +@test("model: list users") +def _(make_request=make_request): + response = make_request( + method="GET", + url="/users", + ) + + assert response.status_code == 200 + response = response.json() + users = response["items"] + + assert isinstance(users, list) + assert len(users) > 0 diff --git a/agents-api/tests/test_users.py b/agents-api/tests/test_users.py deleted file mode 100644 index 6dc2b0789..000000000 --- a/agents-api/tests/test_users.py +++ /dev/null @@ -1,191 +0,0 @@ -import uuid - -from julep.api import ResourceCreatedResponse, ResourceUpdatedResponse, User -from julep.api.core import ApiError -from ward import test - -from tests.fixtures import async_client, client, user - - -@test("create user") -def _(client=client): - response = client.users.create( - name="test user", - about="test user about", - ) - - assert isinstance(response, ResourceCreatedResponse) - assert response.created_at - assert bool(uuid.UUID(str(response.id), version=4)) - - -@test("async create user") -async def _(client=async_client): - response = await client.users.create( - name="test user", - about="test user about", - ) - - assert isinstance(response, ResourceCreatedResponse) - assert response.created_at - assert bool(uuid.UUID(str(response.id), version=4)) - - -@test("get existing user") -def _(existing_user=user, client=client): - response = client.users.get(existing_user.id) - assert isinstance(response, User) - assert existing_user.id == response.id - - -@test("async get existing user") -async def _(existing_user=user, client=async_client): - response = await client.users.get(existing_user.id) - assert isinstance(response, User) - assert existing_user.id == response.id - - -@test("get non-existing user") -def _(client=client): - try: - client.users.get(uuid.uuid4()) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("async get non-existing user") -async def _(client=async_client): - try: - await client.users.get(uuid.uuid4()) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("update existing user") -def _(existing_user=user, client=client): - response = client.users.update( - user_id=existing_user.id, - name="test user", - about="test user about", - ) - - assert isinstance(response, ResourceUpdatedResponse) - assert response.updated_at - assert response.updated_at != existing_user.updated_at - assert response.id == existing_user.id - - -@test("async update existing user") -async def _(existing_user=user, async_client=client): - response = await client.users.update( - user_id=existing_user.id, - name="test user", - about="test user about", - ) - - assert isinstance(response, ResourceUpdatedResponse) - assert response.updated_at - assert response.updated_at != existing_user.updated_at - assert response.id == existing_user.id - - -@test("update non-existing user") -def _(client=client): - try: - client.users.update( - user_id=uuid.uuid4(), - name="test user", - about="test user about", - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("async update non-existing user") -async def _(client=async_client): - try: - await client.users.update( - user_id=uuid.uuid4(), - name="test user", - about="test user about", - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("delete existing user") -def _(existing_user=user, client=client): - response = client.users.delete( - user_id=existing_user.id, - ) - - assert response is None - - -@test("async delete existing user") -async def _(existing_user=user, client=async_client): - response = await client.users.delete( - user_id=existing_user.id, - ) - - assert response is None - - -@test("delete non-existing user") -def _(client=client): - try: - client.users.delete( - user_id=uuid.uuid4(), - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("async delete non-existing user") -async def _(client=async_client): - try: - await client.users.delete( - user_id=uuid.uuid4(), - ) - except ApiError as e: - assert e.status_code == 404 - except Exception: - assert False - else: - assert False - - -@test("list users") -def _(existing_user=user, client=client): - response = client.users.list() - assert len(response) > 0 - assert isinstance(response[0], User) - assert response[0].id == existing_user.id - - -@test("async list users") -async def _(existing_user=user, client=async_client): - response = await client.users.list() - assert len(response) > 0 - assert isinstance(response[0], User) - assert response[0].id == existing_user.id diff --git a/agents-api/tests/test_workflow_routes.py b/agents-api/tests/test_workflow_routes.py new file mode 100644 index 000000000..34aa0101c --- /dev/null +++ b/agents-api/tests/test_workflow_routes.py @@ -0,0 +1,88 @@ +# Tests for task queries + +from uuid import uuid4 + +from ward import test + +from .fixtures import cozo_client, test_agent, test_developer_id +from .utils import patch_http_client_with_temporal + + +@test("workflow route: evaluate step single") +async def _( + cozo_client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + agent_id = str(agent.id) + task_id = str(uuid4()) + + async with patch_http_client_with_temporal( + cozo_client=cozo_client, developer_id=developer_id + ) as ( + make_request, + client, + ): + task_data = { + "name": "test task", + "description": "test task about", + "input_schema": {"type": "object", "additionalProperties": True}, + "main": [{"evaluate": {"hello": '"world"'}}], + } + + make_request( + method="POST", + url=f"/agents/{agent_id}/tasks/{task_id}", + json=task_data, + ).raise_for_status() + + execution_data = dict(input={"test": "input"}) + + make_request( + method="POST", + url=f"/tasks/{task_id}/executions", + json=execution_data, + ).raise_for_status() + + +@test("workflow route: evaluate step single with yaml") +async def _( + cozo_client=cozo_client, + developer_id=test_developer_id, + agent=test_agent, +): + agent_id = str(agent.id) + task_id = str(uuid4()) + + async with patch_http_client_with_temporal( + cozo_client=cozo_client, developer_id=developer_id + ) as ( + make_request, + client, + ): + task_data = """ +name: test task +description: test task about +input_schema: + type: object + additionalProperties: true + +main: + - evaluate: + hello: '"world"' +""" + + make_request( + method="POST", + url=f"/agents/{agent_id}/tasks/{task_id}", + content=task_data.encode("utf-8"), + headers={"Content-Type": "text/yaml"}, + ).raise_for_status() + + execution_data = dict(input={"test": "input"}) + + make_request( + method="POST", + url=f"/tasks/{task_id}/executions", + json=execution_data, + ).raise_for_status() diff --git a/agents-api/tests/utils.py b/agents-api/tests/utils.py new file mode 100644 index 000000000..0eb37819e --- /dev/null +++ b/agents-api/tests/utils.py @@ -0,0 +1,86 @@ +import asyncio +import logging +from contextlib import asynccontextmanager, contextmanager +from unittest.mock import patch + +from fastapi.testclient import TestClient +from litellm.types.utils import Choices, ModelResponse +from temporalio.testing import WorkflowEnvironment + +from agents_api.worker.codec import pydantic_data_converter +from agents_api.worker.worker import create_worker + +EMBEDDING_SIZE: int = 1024 + + +@asynccontextmanager +async def patch_testing_temporal(): + # Set log level to ERROR to avoid spamming the console + logger = logging.getLogger("temporalio") + previous_log_level = logger.getEffectiveLevel() + + logger.setLevel(logging.ERROR) + + # Start a local Temporal environment + async with await WorkflowEnvironment.start_time_skipping( + data_converter=pydantic_data_converter + ) as env: + # Create a worker with our workflows and start it + worker = create_worker(client=env.client) + asyncio.create_task(worker.run()) + + # Mock the Temporal client + mock_client = worker.client + + with patch("agents_api.clients.temporal.get_client") as mock_get_client: + mock_get_client.return_value = mock_client + + # Yield the worker and the mock client <--- + yield worker, mock_get_client + + # Shutdown the worker + await worker.shutdown() + + # Reset log level + logger.setLevel(previous_log_level) + + +@asynccontextmanager +async def patch_http_client_with_temporal(*, cozo_client, developer_id): + async with patch_testing_temporal() as (worker, mock_get_client): + from agents_api.env import api_key, api_key_header_name + from agents_api.web import app + + client = TestClient(app=app) + app.state.cozo_client = cozo_client + + def make_request(method, url, **kwargs): + headers = kwargs.pop("headers", {}) + headers = { + **headers, + "X-Developer-Id": str(developer_id), + api_key_header_name: api_key, + } + + return client.request(method, url, headers=headers, **kwargs) + + temporal_client = await mock_get_client() + yield make_request, temporal_client + + +@contextmanager +def patch_embed_acompletion(output={"role": "assistant", "content": "Hello, world!"}): + mock_model_response = ModelResponse( + id="fake_id", + choices=[Choices(message=output)], + created=0, + object="text_completion", + ) + + with patch("agents_api.clients.embed.embed") as embed, patch( + "agents_api.clients.litellm.acompletion" + ) as acompletion: + embed.return_value = [[1.0] * EMBEDDING_SIZE] + acompletion.return_value = mock_model_response + + yield embed, acompletion diff --git a/docker-compose.yml b/docker-compose.yml index f141b9252..6845bb838 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,9 +3,9 @@ version: "3" include: - ./memory-store/docker-compose.yml - - ./model-serving/docker-compose.yml - ./gateway/docker-compose.yml - ./agents-api/docker-compose.yml + - ./llm-proxy/docker-compose.yml # TODO: Enable after testing # - ./monitoring/docker-compose.yml diff --git a/docs/julep-concepts.md b/docs/julep-concepts.md new file mode 100644 index 000000000..0ceebbcad --- /dev/null +++ b/docs/julep-concepts.md @@ -0,0 +1,382 @@ +# Julep Concepts + +{{TOC}} + ++++ + +## Agent + +An Agent in Julep is the main orchestrator (or protagonist) of your application. These are backed by foundation models like GPT4 or Claude which use the agent's interaction history to figure out what to do/say next. Using agents in Julep, you can: + +- Interact with an agent in long-lived [sessions][Session]. +- Add system, integration or user-defined [tools][Tool] that the agent can use. +- Add agent-level [documents][Doc] that are auto-retrieved using semantic search inside [sessions][Session]. +- Define multi-step work flows that combine complex integrations using [tasks][Task]. Tasks are [executed][Execution] in the background, can recover from failures and manage many sub-tasks in parallel. + +> **(Upcoming Feature)** Access the [memories][Memory] that the agent makes about [users][User] in the background as the user interacts with it inside sessions. These memories are going to be scoped per user in order to maintain clear distinctions. + +At a high level, this is what defines an `Agent` (some properties omitted): + +| **Field** | **Description** | +| :------------- | :-------------------------------------------------------------- | +| `name` | The "name" of the Agent. | +| `about` | About the Agent: What it does, any guardrails, personality etc. | +| `model` | Which model to use for this Agent. | +| `instructions` | Instructions that this agent must follow across all sessions. | + +Important to keep in mind: These fields are optional. They are available inside sessions and task prompts as `jinja` templates. `Session`s, `Task`s etc. come with minimal default templates. You can override them with your own prompt templates throughout julep! + + + +## User + +You can associate sessions with `User`s. julep uses them to scope `memories` formed by agents. They are optional but, in addition to memories, can be useful to attach meta data that can be referenced by other sessions or task executions. + +A `User` consists of: + +| **Field** | **Description** | +| :-------- | :--------------------------- | +| `name` | The name of the user. | +| `about` | Information about this user. | + + + +## Session + +`Session` is the main workhorse for julep apps: +- You interact with agents inside sessions. You can create multiple sessions per agent. +- Each session maintains its own context for sending to the agent's model. +- A session can have *one or more* agents and *zero or more* users associated with it. +- You can control what happens when the history exceeds the context window limit using `context_overflow` setting. + +A `Session` consists of: + +| **Field** | **Description** | +| :----------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `agent(s)` | Agents associated with this session. At least one is required. | +| `user(s)` | The users associated with this session. Optional. | +| `situation` | The system prompt used for the session. Default prompt is shown below. | +| `token_budget` | The number of tokens to keep the context window under. Defaults to null which is equivalent to the model's context window limit. | +| `context_overflow` | Controls behavior for when context size exceeds the `token_budget`. Can be one of `null`, `"truncate"`, or `"adaptive"`. Defaults to `null` which raises an exception. | + + + +### `metadata` precedence order + +In julep, the following objects can have `metadata` added to them: +- `Agent` +- `User` +- `Session` +- `Doc` +- `Task` +- `Execution` + +Whenever multiple objects with the same `metadata` field are present in a scope, the value takes the following precedence (from highest to lowest): +- In a session: `session > user > agent` +- During a task execution: `execution > task > agent` + +### Context overflow + +Whenever the context size grows beyond the `token_budget` or the model's input limit, the backend figures out what to do next based on the `context_overflow` setting: +- `null`: Raise an exception. The client is responsible for creating a new session or clearing the history for the current one. +- `"truncate"`: Truncate the context from the top except the for system prompt until the size falls below the budget. Raises an error if system prompt and last message combined exceed the budget. +- `"adaptive"`: Whenever the context size reaches `75%` of the `token_budget`, a background task is created to compress the information by summarizing, merging and clipping messages in the context. This is done on a best effort basis. Requests might fail if the context wasn't compressed enough or on time. + +### Default system template + +```jinja +{%- if agent.name -%} +You are {{agent.name}}.{{" "}} +{%- endif -%} + +{%- if agent.about -%} +About you: {{agent.name}}.{{" "}} +{%- endif -%} + +{%- if user -%} +You are talking to a user + {%- if user.name -%}{{" "}} and their name is {{user.name}} + {%- if user.about -%}. About the user: {{user.about}}.{%- else -%}.{%- endif -%} + {%- endif -%} +{%- endif -%} + +{{"\n\n"}} + +{%- if agent.instructions -%} +Instructions:{{"\n"}} + {%- if agent.instructions is string -%} + {{agent.instructions}}{{"\n"}} + {%- else -%} + {%- for instruction in agent.instructions -%} + - {{instruction}}{{"\n"}} + {%- endfor -%} + {%- endif -%} + {{"\n"}} +{%- endif -%} + +{%- if tools -%} +Tools:{{"\n"}} + {%- for tool in tools -%} + {%- if tool.type == "function" -%} + - {{tool.function.name}} + {%- if tool.function.description -%}: {{tool.function.description}}{%- endif -%}{{"\n"}} + {%- else -%} + - {{ 0/0 }} {# Error: Other tool types aren't supported yet. #} + {%- endif -%} + {%- endfor -%} +{{"\n\n"}} +{%- endif -%} + +{%- if docs -%} +Relevant documents:{{"\n"}} + {%- for doc in docs -%} + {{doc.title}}{{"\n"}} + {%- if doc.content is string -%} + {{doc.content}}{{"\n"}} + {%- else -%} + {%- for snippet in doc.content -%} + {{snippet}}{{"\n"}} + {%- endfor -%} + {%- endif -%} + {{"---"}} + {%- endfor -%} +{%- endif -%} +``` + +### Multiple users and agents in a session + +A session can have more than one agents or users. The session's behavior changes depending on this. + +**No user**: No user data is retrieved. _(Upcoming)_ Memories are not mined from the session. + +**One or more users**: Docs, metadata, memories etc. are retrieved for all the users in the session. You can add messages for each user by referencing them by their name in the `ChatML` messages. _(Upcoming)_ Memories mined in the background are added to the corresponding user's scope. + +**One agent**: Works as expected. + +**Multiple agents**: When a message is received by the session, each agent is called one after another in the order they were defined in the session. You can also specify which `agent` to use in a request, in which case, just that agent will be used. + +### Chat endpoint + + + + +## Tool + +Agents can be given access to a number of "tools" -- any programmatic interface that a foundation model can "call" with a set of inputs to achieve a goal. For example, it might use a `web_search(query)` tool to search the Internet for some information. + +Unlike agent frameworks, julep is a _backend_ that manages agent execution. Clients can interact with agents using our SDKs. julep takes care of executing tasks and running integrations. + +Tools in julep can be one of: +1. User-defined `function`s + These are function signatures that you can give the model to choose from, similar to how [openai]'s function-calling works. An example: + ```yaml + name: send_text_message + description: Send a text message to a recipient. + parameters: + type: object + properties: + to: + type: string + description: Phone number of recipient. + text: + type: string + description: Content of the message. + ``` + +2. `system` tools (upcoming) + Built-in tools that can be used to call the julep APIs themselves, like triggering a task execution, appending to a metadata field, etc. + + `system` tools are built into the backend. They get executed automatically when needed. They do _not_ require any action from the client-side. + +3. Built-in `integration`s (upcoming) + julep backend ships with integrated third party tools from the following providers: + - [composio](https://composio.dev) \*\* + - [anon](https://anon.com) \*\* + - [langchain toolkits](https://python.langchain.com/v0.2/docs/integrations/toolkits/). Support for _Github, Gitlab, Gmail, Jira, MultiOn, Slack_ toolkits is planned. + + \*\* Since _composio_ and _anon_ are third-party providers, their tools require setting up account linking. + + `integration` tools are directly executed on the julep backend. Any additional parameters needed by them at runtime can be set in the agent/session/user's `metadata` fields. + +4. Webhooks & `api_call`s (upcoming) + julep can build natural-language tools from openapi specs. Under the hood, we use [langchain's NLA toolkit](https://python.langchain.com/v0.2/docs/integrations/toolkits/openapi_nla/) for this. Same as `integration`s, additional runtime parameters are loaded from `metadata` fields. + +### Partial application of arguments to tools + +Often, it's necessary to _partial_ some arguments of a particular tool. You can do that by setting the `x-tool-parameters` field on the `metadata` of the required scope. For instance, say you have the following user-defined function tool: +```yaml +name: check_account_status +description: Get the account status for a customer +parameters: + type: object + properties: + customer_id: + type: string + required: true +``` + +When chatting with a particular user, the `customer_id` field is expected to be fixed. In this case, you can set it on the `User` using: +```json +{ + "metadata": { + ... + "x-tool-parameters": { + "function:check_account_status": { + "customer_id": 42 + } + } + } +} +``` + +The convention for naming the fields for that object is `":"`. The values are partial-applied to the tool _before_ being sent to the model. + +### Resolving parameters with the same name + +This follows the precedence order of `metadata` fields. For example, say you are interacting with the following session: +```yaml +user: + id: 11 + metadata: + x-tool-parameters: + favorite: Emma Roberts +agent: + id: 22 + metadata: + x-tool-parameters: + favorite: Emma Watson + tools: + - type: function + name: send_fan_mail + parameters: + # ... favorite: ... +session: + id: 123 + metadata: + x-tool-parameters: + favorite: Emma Stone +``` + +Then, the `send_fan_mail` will be called with the value of `favorite` set to the session's `metadata` (as dictated by the precedence order) to `"Emma Stone"`. + + + +## Doc + +`Doc`s are collection of text snippets (image support planned) that are indexed into a built-in vector database: +- They can be scoped to an agent or a user. +- Snippets are recalled inside sessions on the fly. +- The retrieval pipeline is optimized for general-purpose use cases. +- We use vector embedding models that strike a balance between accuracy and performance. +- Any snippets retrieved during a session are returned as part of the response for attribution. +- The embeddings are kept up to date as new models and techniques emerge. +- For advanced use cases, it might be necessary to roll your own. The pros of using julep are speed and automatic updates. + +You can use the `Doc`s by: +- Searching using a query or embedding directly, or +- When they are recalled within `Session`s based on the context. + +We use the latest state-of-the-art open-source embedding model for producing the vector embeddings. As new models and techniques emerge, we migrate the existing `Doc`s in the system to use them. + +_julep cloud users:_ It is not possible to change the embedding model being used. + + + +## Task + +`Task`s, in julep, are _Github Actions_ style workflows that define long-running, multi-step actions. You can use them to conduct complex actions by defining them step-by-step. They have access to all julep integrations. + +A `Task`s is a workflow owned by an `Agent`. It consists of: + +| **Field** | **Description** | +| :-------------- | :--------------------------------------------------------------- | +| `inherit_tools` | Inherit the parent `Agent`s tools? Defaults to `true`. | +| `tools` | Additional tools for this task. | +| `input_schema` | JSON schema to validate input when executing the task. Optional. | +| `main` +others | List of steps that this task has to complete. | + +### Example task definition + +There can be multiple named workflows in a task. `main` is the entry point workflow for the task execution. Let's see an example of a task: + +```yaml +# An example Task definition +name: Daily Motivation +input_schema: + about_user: + type: string + topics: + type: array + items: + type: string + +tools: +- function: + name: send_email + # ... + +main: +# Pick a random topic. +# `evaluate` step takes a key-value object where the values are valid python *expressions*. +- evaluate: + chosen_topic: _["topics"][randint(len(_["topics"]))] + +# Think about what support the user might need. +# Note: `inputs` and `outputs` are globals. +- prompt: You are a motivational coach and you are coaching someone who is {{inputs[0]["about_user"]}}. Think of the challenges they might be facing on the {{_["chosen_topic"]}} topic and what to do about them. Write down your answer as a bulleted list. + +# Write a poem about it. +# Note: `_` stands for `outputs[-1]` i.e. the last step's output +- prompt: Write a short motivational poem about {{_["choices"][0].content}} + +# Manually call the send_email function. +# `arguments` is an object where values are python expressions. +- tool: + name: send_email + arguments: + subject: '"Daily Motivation"' + content: _["choices"][0].content + +# Sleep for a day +- sleep: 24*3600 + +# Start all over again +- workflow: main + arguments: inputs[0] +``` + +### Types of workflow steps + +| **Step Type** | **Description** | +| :------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------- | +| Prompt step | Runs a prompt using a model. You can override settings and interpolate variables using [jinja](https://jinja.palletsprojects.com/) templates. | +| Yield step | Used to switch to another named workflow. Can add custom inputs (Default: output of previous steps) | +| Evaluate | Accepts an object with values that are valid python expressions. The step runs the expressions and the result becomes the output of this step. | +| If-else | Conditional step where the `if` field expression is evaluated. If the output is truthy then the `then` branch is executed, otherwise `else` branch. | +| Error | Throws an error with the message provided and exits. | +| Sleep | Sleeps for the number of seconds evaluated. | +| Tool Call | Call the specified tool with some arguments. | +| Foreach | Run a step for every value from a list in serial order. | +| Map-reduce | Run a step for every value of the input list in parallel. Requires a reduce expression to collect the results. | +| Doc search | Search the doc store of the agent and user against a query. | +| Wait for input | Suspend the execution and wait for the caller to resume execution with an input. | + +## Execution + +An `Execution` is an instance of a `Task` that has been started with some `input`. + +At any given moment, it can be in one of these states: + +| **Status** | **Description** | +| :--------------- | :------------------------------------------------------- | +| "queued" | The execution is queued and waiting to start. | +| "starting" | The execution is starting. | +| "running" | The execution is running. | +| "awaiting_input" | The execution is suspended and awaiting input to resume. | +| "succeeded" | The execution has succeeded. | +| "failed" | The execution failed for some reason. | +| "cancelled" | The execution has been cancelled by the user. | + +Every time an execution enters a new state, a `Transition` object is created on the backend with details of the state change. You can retrieve the transition history of the execution. + +## Memory (Upcoming) \ No newline at end of file diff --git a/fern/fern.config.json b/fern/fern.config.json index 0a2289fc0..7a5d561b7 100644 --- a/fern/fern.config.json +++ b/fern/fern.config.json @@ -1,4 +1,4 @@ { "organization": "julep", - "version": "0.33.2" + "version": "0.37.16" } \ No newline at end of file diff --git a/fern/generators.yml b/fern/generators.yml index d072b43ca..123e0e957 100644 --- a/fern/generators.yml +++ b/fern/generators.yml @@ -11,8 +11,3 @@ groups: output: location: local-file-system path: ../sdks/python/julep/api - - name: fernapi/fern-postman - version: 0.1.1 - output: - location: local-file-system - path: ../sdks/postman diff --git a/gateway/docker-compose.yml b/gateway/docker-compose.yml index 9f03ac098..8a6fa85d6 100644 --- a/gateway/docker-compose.yml +++ b/gateway/docker-compose.yml @@ -2,7 +2,6 @@ name: julep-gateway version: "3" include: - - ../model-serving/docker-compose.yml - ../agents-api/docker-compose.yml services: @@ -23,8 +22,6 @@ services: container_name: gateway depends_on: - model-serving: - condition: service_started agents-api: condition: service_started build: diff --git a/llm-proxy/.dockerignore b/llm-proxy/.dockerignore new file mode 100644 index 000000000..e7a1d3a41 --- /dev/null +++ b/llm-proxy/.dockerignore @@ -0,0 +1 @@ +!.keys diff --git a/llm-proxy/.gitignore b/llm-proxy/.gitignore new file mode 100644 index 000000000..aaab9d948 --- /dev/null +++ b/llm-proxy/.gitignore @@ -0,0 +1 @@ +.keys diff --git a/llm-proxy/docker-compose.yml b/llm-proxy/docker-compose.yml new file mode 100644 index 000000000..f4cab6ebe --- /dev/null +++ b/llm-proxy/docker-compose.yml @@ -0,0 +1,54 @@ +services: + litellm: + image: ghcr.io/berriai/litellm:main-stable + volumes: + - ./litellm-config.yaml:/app/config.yaml + - .keys:/app/.keys + ports: + - "4000:4000" + env_file: + - ../.env + command: + [ + "--config", + "/app/config.yaml", + "--port", + "4000", + "--num_workers", + "8", + "--telemetry", + "False" + ] + + depends_on: + - litellm-db + - litellm-redis + + litellm-db: + image: postgres + restart: always + volumes: + - litellm-db-data:/var/lib/postgresql/data + ports: + - "5432:5432" + env_file: + - ../.env + healthcheck: + test: [ "CMD-SHELL", "pg_isready -d litellm -U llmproxy" ] + interval: 1s + timeout: 5s + retries: 10 + + litellm-redis: + image: redis/redis-stack-server + restart: always + volumes: + - litellm-redis-data:/data + ports: + - "6379:6379" + env_file: + - ../.env + +volumes: + litellm-db-data: + litellm-redis-data: diff --git a/llm-proxy/litellm-config.yaml b/llm-proxy/litellm-config.yaml new file mode 100644 index 000000000..fc276ac38 --- /dev/null +++ b/llm-proxy/litellm-config.yaml @@ -0,0 +1,127 @@ +environment_variables: + NO_DOCS: "true" + +model_list: +# -*= Paid models =*- +# ------------------- + +# Gemini models +- model_name: gemini-1.5-pro + litellm_params: + model: vertex_ai_beta/gemini-1.5-pro + tags: ["paid"] + vertex_credentials: os.environ/GOOGLE_APPLICATION_CREDENTIALS + +- model_name: claude-3.5-sonnet + litellm_params: + model: vertex_ai/claude-3-5-sonnet@20240620 + tags: ["paid"] + vertex_credentials: os.environ/GOOGLE_APPLICATION_CREDENTIALS + +# OpenAI models +- model_name: "gpt-4-turbo" + litellm_params: + model: "openai/gpt-4-turbo" + tags: ["paid"] + api_key: os.environ/OPENAI_API_KEY + +- model_name: "gpt-4o" + litellm_params: + model: "openai/gpt-4o" + tags: ["paid"] + api_key: os.environ/OPENAI_API_KEY + +# Anthropic models +- model_name: "claude-3.5-sonnet" + litellm_params: + model: "claude-3-5-sonnet-20240620" + tags: ["paid"] + api_key: os.environ/ANTHROPIC_API_KEY + +# Groq models +- model_name: "llama-3.1-70b" + litellm_params: + model: "groq/llama-3.1-70b-versatile" + tags: ["paid"] + api_key: os.environ/GROQ_API_KEY + +- model_name: "llama-3.1-8b" + litellm_params: + model: "groq/llama-3.1-8b-instant" + tags: ["paid"] + api_key: os.environ/GROQ_API_KEY + + +# -*= Embedding models =*- +# ------------------------ + +- model_name: text-embedding-3-large + litellm_params: + model: "openai/text-embedding-3-large" + api_key: os.environ/OPENAI_API_KEY + tags: ["paid"] + +- model_name: voyage-multilingual-2 + litellm_params: + model: "voyage/voyage-multilingual-2" + api_key: os.environ/VOYAGE_API_KEY + tags: ["paid"] + +- model_name: voyage-large-2 + litellm_params: + model: "voyage/voyage-large-2" + api_key: os.environ/VOYAGE_API_KEY + tags: ["paid"] + +- model_name: gte-large-en-v1.5 + litellm_params: + model: openai/Alibaba-NLP/gte-large-en-v1.5 + api_base: os.environ/EMBEDDING_SERVICE_BASE + tags: ["free"] + +- model_name: bge-m3 + litellm_params: + model: openai/BAAI/bge-m3 + api_base: os.environ/EMBEDDING_SERVICE_BASE + tags: ["free"] + + +# -*= Free models =*- +# ------------------- + +- model_name: gpt-4o-mini + litellm_params: + model: openai/gpt-4o-mini + api_key: os.environ/OPENAI_API_KEY + tags: ["free"] + + +# https://github.com/BerriAI/litellm/blob/main/litellm/__init__.py +litellm_settings: + num_retries: 3 + request_timeout: 180 + allowed_fails: 3 + cooldown_time: 30 + drop_params: true + modify_params: true + telemetry: false + retry: true + add_function_to_prompt: true + + set_verbose: false + cache: true + cache_params: # set cache params for redis + type: redis + namespace: "litellm_caching" + host: os.environ/LITELLM_REDIS_HOST + port: os.environ/LITELLM_REDIS_PORT + password: os.environ/LITELLM_REDIS_PASSWORD + +router_settings: + routing_strategy: simple-shuffle + num_retries: 3 + +general_settings: + master_key: os.environ/LITELLM_MASTER_KEY + database_url: os.environ/LITELLM_DATABASE_URL + enforce_user_param: true \ No newline at end of file diff --git a/model-serving/.gitignore b/model-serving/.gitignore deleted file mode 100644 index 07c94e456..000000000 --- a/model-serving/.gitignore +++ /dev/null @@ -1,163 +0,0 @@ -# pickle files -notebooks/*.pickle - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm.fming.dev/#use-with-ide -.pdm.toml - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ diff --git a/model-serving/.tool-versions b/model-serving/.tool-versions deleted file mode 100644 index 47cd22e3c..000000000 --- a/model-serving/.tool-versions +++ /dev/null @@ -1 +0,0 @@ -python 3.10.13 diff --git a/model-serving/Dockerfile b/model-serving/Dockerfile deleted file mode 100644 index b25c81eba..000000000 --- a/model-serving/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM vllm/vllm-openai:v0.5.0 as base - - -# Define the entrypoint -ENV MODEL_NAME julep-ai/Hermes-2-Theta-Llama-3-8B -ENV TP_SIZE 1 -ENV MAX_MODEL_LEN 8192 -ENV MAX_NUM_SEQS 1 -ENV GPU_MEMORY_UTILIZATION 0.95 -ENV DTYPE bfloat16 -ENV MODEL_API_KEY myauthkey -ENTRYPOINT python3 -m vllm.entrypoints.openai.api_server --model $MODEL_NAME --tensor-parallel-size $TP_SIZE --enforce-eager --gpu-memory-utilization $GPU_MEMORY_UTILIZATION --max-model-len $MAX_MODEL_LEN --max-num-seqs $MAX_NUM_SEQS --dtype $DTYPE --trust-remote-code --api_key=$MODEL_API_KEY diff --git a/model-serving/README.md b/model-serving/README.md deleted file mode 100644 index 7b76aaf6d..000000000 --- a/model-serving/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# Samantha API -Samantha API server - -## Deploying to modal - -1. Login to modal using CLI. -2. Set up default env: `modal config set-environment staging` -3. `modal deploy --env staging modal_staging.py` - -## Install and run vllm service -```bash -$ cd services/vllm -$ poetry install -$ poetry shell -$ python samantha_api/web.py --model ehartford/samantha-33b --tensor-parallel-size 2 --host 127.0.0.1 --port 8000 --backlog 4096 - -$ python -m model_api --model julep-ai/samantha-1-turbo -``` - -## Set up skypilot to run service on A100 spot instances - -you can use this as a starting point: -https://github.com/julep-ai/samantha-monorepo/blob/main/infra/sky/vllm.yaml - -### Docs: -- quickstart: https://skypilot.readthedocs.io/en/latest/getting-started/quickstart.html -- spot jobs: https://skypilot.readthedocs.io/en/latest/examples/spot-jobs.html -- services: https://skypilot.readthedocs.io/en/latest/serving/sky-serve.html - -### Setup: -1. Authenticate gcloud cli. `gcloud auth login` and then `gcloud auth application-default login` -1. `pip install --upgrade skypilot nightly` -1. Run `sky check` to check that it detected the gcp credentials - -### Create service: -1. Edit the vllm.yaml file with setup instructions of our custom code -1. `sky serve up -n vllm-service vllm.yaml` to start service (no support for in-place update unfortunately) -1. `sky serve logs vllm-service 1` (1 is the ID of first replica, repeat for every replica) -1. `watch -n10 sky serve status` for live status of services - -### Notes: -1. Right now `sky serve up` does not support using environment variables for some reason so set them manually in the file itself (and remember to unset before committing to git) -1. Right now `sky serve` does not support updating a service -- which means if you change anything, you have to `sky serve down vllm-service` and then `sky serve up ...` again... diff --git a/model-serving/artifacts/function_classifier.bin b/model-serving/artifacts/function_classifier.bin deleted file mode 100644 index 268dd2879..000000000 Binary files a/model-serving/artifacts/function_classifier.bin and /dev/null differ diff --git a/model-serving/artifacts/nous-llama-fix.ipynb b/model-serving/artifacts/nous-llama-fix.ipynb deleted file mode 100644 index 0cfad097e..000000000 --- a/model-serving/artifacts/nous-llama-fix.ipynb +++ /dev/null @@ -1,278 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 86, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/sidbin/miniconda3/envs/julep/lib/python3.10/site-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", - " warnings.warn(\n" - ] - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "b9c2b4fa7a02414581da3b7ec438472a", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "tokenizer_config.json: 0%| | 0.00/50.7k [00:00', '')" - ] - }, - "execution_count": 89, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "old_token, new_token" - ] - }, - { - "cell_type": "code", - "execution_count": 79, - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "8fbb2345fcca46c5b4f8cdbf7fac4901", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Fetching 12 files: 0%| | 0/12 [00:00') | list --%} - -{%- set input_messages_roles = - messages | map(attribute='role') | list --%} - -{%- set idx_first_assistant = - input_messages_roles.index('assistant') - if 'assistant' in input_messages_roles - else -1 --%} - -{%- if not messages -%} - {{ raise_exception('No messages') if 'raise_exception' is filter else 1 / 0 }} -{%- endif -%} - -{%- set ns = namespace(messages=messages) -%} - -{%- set first_message = ns.messages[0] -%} -{%- set has_situation = - first_message.role == 'system' - and first_message.name == 'situation' - or not first_message.name --%} - -{%- set default_situation = { - 'role': 'system', - 'name': 'situation', - 'content': 'You are a helpful AI Assistant.' -} -%} - -{%- if not has_situation -%} - {%- set ns.messages = [default_situation] + ns.messages -%} -{%- endif -%} - -{#- Validation -#} -{#- ---------- -#} - -{%- for message in ns.messages -%} - {%- set role = message.role | lower -%} - {%- set name = message.name | default('situation' if role == 'system' else '') -%} - {%- set is_situation = role == 'system' and name == 'situation' -%} - - {#- Validate role -#} - {%- if role not in allowed_roles -%} - {{ raise_exception('Invalid role: ' + role) if 'raise_exception' is filter else 1 / 0 }} - {%- endif -%} - - {#- Validate system name -#} - {%- if role == 'system' and name not in allowed_system_names -%} - {{ raise_exception('Invalid name for role=system: ' + name) if 'raise_exception' is filter else 1 / 0 }} - {%- endif -%} - - {#- Validate situation tag can only be in first message -#} - {%- if not loop.first and is_situation -%} - {{ raise_exception('Situation can only appear as the very first message') if 'raise_exception' is filter else 1 / 0 }} - {%- endif -%} - - {#- Validate continue=True only in last message -#} - {%- if not loop.last and message.continue -%} - {{ raise_exception('continue=True can only appear in the last message') if 'raise_exception' is filter else 1 / 0 }} - {%- endif -%} - -{%- endfor -%} - -{#- Render -#} -{#- ------ -#} - -{#- - Add bos token in the beginning; - as tokenizer.apply_chat_template does not do that automatically. - https://github.com/huggingface/transformers/blob/main/src/transformers/tokenization_utils_base.py#L1753 --#} -{{ bos_token }} - -{%- for message in ns.messages -%} - {%- set role = message.role | lower -%} - {%- set name = message.name | default('situation' if role == 'system' else '') -%} - - {#- Process content -#} - {%- set content_ns = namespace(value=message.content) -%} - - {#- Convert functions to json if needed -#} - {%- if role == 'system' and name == 'functions' and content is not string -%} - {%- set content_ns.value = - 'Available functions:\n\n' - + (content_ns.value | map('tojson', indent=4) | join('\n')) - -%} - {%- endif -%} - - {#- Escape content -#} - {%- for escape_token in escape_tokens -%} - {#- Replace '<|im_start|>' with '< |im_start|>' and so on -#} - {%- set content_ns.value = content_ns.value | replace( - escape_token, - escape_token.replace(escape_token[0], escape_token[0]+' ', 1) - ) -%} - {%- endfor -%} - - {#- Strip trailing single space -#} - {%- if (content_ns.value | length) > 1 - and content_ns.value[-1] == ' ' - and content_ns.value[-2] != ' ' - -%} - {%- set content_ns.value = content_ns.value[:-1] -%} - {%- endif -%} - -{{ newline_unless_first() }}<|im_start|> - {%- if role == 'system' -%}{{ name }} - {%- elif role == 'user' -%}person{{ ' (' + name + ')' if name else '' }} - {%- elif role == 'assistant' -%}me - {#- Only add name for assistant in the very first example -#} - {%- if name and loop.index0 == idx_first_assistant -%}{{ ' (' + name + ')' }}{%- endif -%} - {%- elif role == 'function_call' -%}function_call - {%- endif -%} - -{{ '\n' + content_ns.value }} - - {%- if not loop.last -%} -<|im_end|> - {%- elif not message.continue -%} -<|im_end|> - {%- if add_generation_prompt -%} -{{ '\n<|im_start|>' }} - {%- endif -%} - {%- endif -%} - -{%- endfor -%} diff --git a/model-serving/model_api/conversion/__init__.py b/model-serving/model_api/conversion/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/model-serving/model_api/conversion/conversions.py b/model-serving/model_api/conversion/conversions.py deleted file mode 100644 index 27fc8e1f8..000000000 --- a/model-serving/model_api/conversion/conversions.py +++ /dev/null @@ -1,233 +0,0 @@ -from io import StringIO -import re - -from .datatypes import ChatML, ChatMLMessage -from .exceptions import ( - InvalidPromptException, - InvalidFunctionName, - InvalidMessageFormat, -) -from ..protocol import RequestFunctionCall, FunctionCall, FunctionDef - - -me_regex = re.compile(r"(?Pme)(\s+\((?P.+)\)|$)") -person_regex = re.compile(r"(?Pperson)(\s+\((?P.+)\)|$)") - - -def parse_message(message: str) -> ChatMLMessage: - parts = message.split("\n", 1) - - tag = "" - content = message - if len(parts) > 1: - tag = parts[0].strip() - content = parts[1].lstrip() - - if tag in ("situation", "information", "thought"): - return ChatMLMessage(role="system", name=tag, content=content) - - if tag == "function_call": - return ChatMLMessage(role="assistant", content=None, function_call=content) - - assistant = me_regex.match(tag) - if assistant: - return ChatMLMessage(role="assistant", name=assistant["name"], content=content) - - person = person_regex.match(tag) - if person: - return ChatMLMessage(role="user", name=person["name"], content=content) - - return ChatMLMessage(role="assistant", content=message) - - -def message_role_to_prefix(message: ChatMLMessage) -> str | None: - match (message.model_dump()): - case {"role": "user", **rest}: - name = rest.get("name") - return f"person ({name})" if name else "person" - - case {"role": "assistant", **rest}: - name = rest.get("name") - return f"me ({name})" if name else "me" - - case {"role": "function_call", **rest}: - return "function_call" - - case {"role": "system", "name": "functions", **rest}: - return "functions" - - case {"role": "system", "name": "thought", **rest}: - return "thought" - - case {"role": "system", "name": "information", **rest}: - return "information" - - case {"role": "system", "name": "situation", **rest}: - return "situation" - - # If empty tag, then assume role="situation" - case {"role": "system", **rest}: - name = rest.get("name") - return name.lower() if name else "situation" - - case msg: - raise InvalidMessageFormat(msg) - - -def _check_last_message(message: ChatMLMessage): - match (message.model_dump()): - case ( - {"role": "system", "name": "thought", **_rest} - | {"role": "assistant", **_rest} - | {"role": "system", "name": "functions", **_rest} - | {"role": "function_call", **_rest} - ): - return True - - return False - - -def _validate_message(message: ChatMLMessage, continue_: bool, is_last: bool): - msg_role = message.role - if not msg_role: - raise InvalidPromptException("'role' can not be null") - - if not message.content and not (is_last and continue_): - raise InvalidPromptException("'content' can not be null") - - #### "functions" is only valid as a system name - allowed_roles = {"system", "user", "assistant", "function_call"} - if msg_role not in allowed_roles: - raise InvalidPromptException(f"role must be one of {allowed_roles}") - - allowed_system_names = { - "situation", - "thought", - "information", - "functions", - "instruction", - None, - } - - if msg_role == "system" and message.name not in allowed_system_names: - raise InvalidPromptException( - f"name for role 'system' must be one of {allowed_system_names}" - ) - - if is_last and continue_ and not _check_last_message(message): - raise InvalidPromptException( - "last message with continue=True can not have this format" - ) - - if not is_last and continue_: - raise InvalidPromptException( - "only last message can have 'continue' equal to True" - ) - - -def _validate_functions( - functions: list[FunctionDef], function_call: FunctionCall -) -> list[FunctionDef]: - for f in functions: - if f.name.strip() == function_call.name.strip(): - return [f] - - raise InvalidFunctionName(function_call.name) - - -def to_prompt( - messages: ChatML, - bos: str = "<|im_start|>", - eos: str = "<|im_end|>", - functions: list[FunctionDef] | None = None, - function_call: RequestFunctionCall | None = None, -) -> str: - # Input format: - # [ - # {"role": "system", "name": "situation", "content": "I am talking to Diwank"}, - # {"role": "assistant", "name": "Samantha", "content": "Hey Diwank"}, - # {"role": "user", "name": "Diwank", "content": "Hey!"}, - # ] - - # Output format: - # - # <|section|>situation - # I am talking to Diwank<|endsection|> - # <|section|>me (Samantha) - # Hey Diwank<|endsection|> - # <|section|>person (Diwank) - # Hey<|endsection|> - # <|section|>me (Samantha)\n - - if functions: - if function_call not in ("auto", "none", None): - formatted_functions: str = "\n".join( - [ - f.model_dump_json(indent=4) - for f in _validate_functions(functions, function_call) - ] - ) - - functions_msg = ChatMLMessage( - role="system", - name="functions", - content=f"Available functions:\n\n{formatted_functions}", - ) - messages.insert(1, functions_msg) - if messages[-1].continue_: - raise InvalidPromptException( - "Conflicting instructions, " - "please remove the last instruction with 'continue' " - "flag set to 'true' or set the flag to 'false'. " - "You can either remove `functions` and/or `function_call` parameters." - ) - - # Get function name (could be a string or an object) - if isinstance(function_call, FunctionCall): - function_name = function_call.name - else: - function_name = function_call - - messages.append( - ChatMLMessage( - role="function_call", - continue_=True, - content=f'{{"name": "{function_name}",', - ) - ) - - elif function_call in ("auto", None): - formatted_functions: str = "\n".join( - [f.model_dump_json(indent=4) for f in functions] - ) - - messages.insert( - 1, - ChatMLMessage( - role="system", - name="functions", - content=f"Available functions:\n\n{formatted_functions}", - ), - ) - - prompt = StringIO() - add_extra_message = False - - for idx, message in enumerate(messages): - continue_ = message.continue_ - is_last = idx == len(messages) - 1 - - _validate_message(message, continue_, is_last) - if is_last and not continue_: - add_extra_message = True - - end_tag = "" if is_last and continue_ else f"{eos}\n" - content = f"{bos}{message_role_to_prefix(message)}\n{(message.content or '').strip()}{end_tag}" - prompt.write(content) - - if add_extra_message: - content = bos if functions and function_call in ("auto", None) else f"{bos}me\n" - - prompt.write(content) - - return prompt.getvalue() diff --git a/model-serving/model_api/conversion/datatypes.py b/model-serving/model_api/conversion/datatypes.py deleted file mode 100644 index a3d2a40e1..000000000 --- a/model-serving/model_api/conversion/datatypes.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Literal - -from pydantic import BaseModel, Field, ConfigDict - - -ValidRole = Literal["assistant", "system", "user", "function_call"] - - -class ChatMLMessage(BaseModel): - model_config = ConfigDict(populate_by_name=True) - - name: str | None = None - role: ValidRole | None = None - content: str | None = None - continue_: bool | None = Field(default=None, alias="continue") - function_call: str | None = None - - -ChatML = list[ChatMLMessage] diff --git a/model-serving/model_api/conversion/exceptions.py b/model-serving/model_api/conversion/exceptions.py deleted file mode 100644 index 983c39a19..000000000 --- a/model-serving/model_api/conversion/exceptions.py +++ /dev/null @@ -1,17 +0,0 @@ -class BaseException(Exception): - pass - - -class InvalidPromptException(BaseException): - def __init__(self, msg: str): - super().__init__(f"Invalid prompt format: {msg}") - - -class InvalidFunctionName(BaseException): - def __init__(self, msg: str): - super().__init__(f"Invalid function name: {msg}") - - -class InvalidMessageFormat(BaseException): - def __init__(self, msg: str): - super().__init__(f"Invalid message format: {msg}") diff --git a/model-serving/model_api/conversion/test_conversions.py b/model-serving/model_api/conversion/test_conversions.py deleted file mode 100644 index 71437faa1..000000000 --- a/model-serving/model_api/conversion/test_conversions.py +++ /dev/null @@ -1,813 +0,0 @@ -import pytest -from .conversions import to_prompt -from .exceptions import InvalidFunctionName, InvalidPromptException -from .datatypes import ChatMLMessage -from ..protocol import FunctionDef, FunctionCall - - -def test_function_call_none_last_not_continue(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ] - functions = [] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call="none", - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>me -""" - ) - - -def test_function_call_auto_functions_not_passed(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ] - functions = [] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call="auto", - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>me -""" - ) - - -def test_function_call_none_functions_not_passed(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ] - functions = [] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call=None, - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>me -""" - ) - - -def test_function_call_auto_functions_passed(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ] - functions = [ - FunctionDef( - **{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ) - ] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call="auto", - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>functions -Available functions: - -{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of" - } - }, - "required": [ - "word" - ] - } -}<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>""" - ) - - -def test_function_call_none_functions_passed(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ] - functions = [ - FunctionDef( - **{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ) - ] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call=None, - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>functions -Available functions: - -{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of" - } - }, - "required": [ - "word" - ] - } -}<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>""" - ) - - -def test_function_call_none_last_continue(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ChatMLMessage(**{"role": "assistant", "name": "Samantha", "continue": True}), - ] - functions = [] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call="none", - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>me (Samantha) -""" - ) - - -def test_function_call_none_last_continue_function_call(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ChatMLMessage(**{"role": "function_call", "content": "{}", "continue": True}), - ] - functions = [] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call="none", - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>function_call -{}""" - ) - - -def test_function_call_auto_last_not_continue(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ] - functions = [ - FunctionDef( - **{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ), - FunctionDef( - **{ - "name": "other_func", - "description": "Logic", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ), - ] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call="auto", - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>functions -Available functions: - -{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of" - } - }, - "required": [ - "word" - ] - } -} -{ - "name": "other_func", - "description": "Logic", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of" - } - }, - "required": [ - "word" - ] - } -}<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>""" - ) - - -def test_function_call_auto_last_continue(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ChatMLMessage(**{"role": "assistant", "name": "Samantha", "continue": True}), - ] - functions = [ - FunctionDef( - **{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ) - ] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call="auto", - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>functions -Available functions: - -{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of" - } - }, - "required": [ - "word" - ] - } -}<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>me (Samantha) -""" - ) - - -def test_function_call_auto_last_continue_function_call(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ChatMLMessage(**{"role": "function_call", "continue": True}), - ] - functions = [ - FunctionDef( - **{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ) - ] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call="auto", - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>functions -Available functions: - -{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of" - } - }, - "required": [ - "word" - ] - } -}<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>function_call -""" - ) - - -def test_function_call_func_name_last_not_continue(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ] - functions = [ - FunctionDef( - **{ - "name": "other_func", - "description": "Logic", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ), - FunctionDef( - **{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ), - ] - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call=FunctionCall(**{"name": "generate_anagram"}), - ) - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>functions -Available functions: - -{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of" - } - }, - "required": [ - "word" - ] - } -}<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>function_call -{"name": "generate_anagram",""" - ) - - -def test_function_call_func_name_last_not_continue_invalid_function_name(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ] - functions = [ - FunctionDef( - **{ - "name": "other_func", - "description": "Logic", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ), - FunctionDef( - **{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ), - ] - with pytest.raises(InvalidFunctionName) as e_info: - to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call=FunctionCall(**{"name": "unknown"}), - ) - assert e_info.value.args[0] == "Invalid function name: unknown" - - -def test_function_call_func_name_last_continue(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ChatMLMessage(**{"role": "assistant", "name": "Samantha", "continue": True}), - ] - functions = [ - FunctionDef( - **{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ) - ] - with pytest.raises(InvalidPromptException) as e_info: - to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call=FunctionCall(**{"name": "generate_anagram"}), - ) - assert e_info.value.args[0] == ( - "Invalid prompt format: Conflicting instructions, " - "please remove the last instruction with 'continue' " - "flag set to 'true' or set the flag to 'false'. " - "You can either remove `functions` and/or `function_call` parameters." - ) - - -def test_function_call_func_name_last_continue_function_call(): - messages = [ - ChatMLMessage( - **{"role": "system", "name": "situation", "content": "I am talking to John"} - ), - ChatMLMessage( - **{"role": "assistant", "name": "Samantha", "content": "Hey John"} - ), - ChatMLMessage(**{"role": "user", "name": "John", "content": "Hey!"}), - ChatMLMessage( - **{ - "role": "function_call", - "content": '{"name": "generate_anagram", ', - "continue": True, - } - ), - ] - functions = [ - FunctionDef( - **{ - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ) - ] - with pytest.raises(InvalidPromptException) as e_info: - to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - functions=functions, - function_call=FunctionCall(**{"name": "generate_anagram"}), - ) - assert e_info.value.args[0] == ( - "Invalid prompt format: Conflicting instructions, " - "please remove the last instruction with 'continue' " - "flag set to 'true' or set the flag to 'false'. " - "You can either remove `functions` and/or `function_call` parameters." - ) - - -def test_information_message(): - messages = [ - ChatMLMessage( - **{ - "role": "system", - "name": "information", - "content": "I am talking to John", - } - ) - ] - - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - ) - - assert ( - prompt - == """<|im_start|>information -I am talking to John<|im_end|> -<|im_start|>me -""" - ) - - -def test_situation_name_is_none(): - messages = [ - ChatMLMessage( - **{ - "role": "system", - "content": "I am talking to John", - } - ) - ] - - prompt = to_prompt( - messages, - bos="<|im_start|>", - eos="<|im_end|>", - ) - - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me -""" - ) diff --git a/model-serving/model_api/dependencies/__init__.py b/model-serving/model_api/dependencies/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/model-serving/model_api/dependencies/auth.py b/model-serving/model_api/dependencies/auth.py deleted file mode 100644 index 5df7787ad..000000000 --- a/model-serving/model_api/dependencies/auth.py +++ /dev/null @@ -1,19 +0,0 @@ -from fastapi.security.api_key import APIKeyHeader -from fastapi import Security, HTTPException -from starlette.status import HTTP_403_FORBIDDEN - -from ..env import api_key, api_key_header_name - - -api_key_header = APIKeyHeader(name=api_key_header_name, auto_error=False) - - -async def get_api_key(user_api_key: str = Security(api_key_header)): - user_api_key = (user_api_key or "").replace("Bearer ", "").strip() - - if user_api_key != api_key: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Could not validate API KEY" - ) - else: - return user_api_key diff --git a/model-serving/model_api/dependencies/developer.py b/model-serving/model_api/dependencies/developer.py deleted file mode 100644 index 5edee7c35..000000000 --- a/model-serving/model_api/dependencies/developer.py +++ /dev/null @@ -1,41 +0,0 @@ -import uuid -from typing import Annotated -from fastapi import Header -from pydantic import validate_email -from pydantic_core import PydanticCustomError - -from ..env import skip_check_developer_headers -from .exceptions import InvalidHeaderFormat - - -async def get_developer_id(x_developer_id: Annotated[str | None, Header()] = None): - if skip_check_developer_headers: - return x_developer_id or uuid.UUID("00000000-0000-0000-0000-000000000000") - - if not x_developer_id: - raise InvalidHeaderFormat("X-Developer-Id header invalid") - - if isinstance(x_developer_id, str): - try: - x_developer_id = uuid.UUID(x_developer_id, version=4) - except ValueError: - raise InvalidHeaderFormat("X-Developer-Id must be a valid UUID") - - return x_developer_id - - -async def get_developer_email( - x_developer_email: Annotated[str | None, Header()] = None -): - if skip_check_developer_headers: - return x_developer_email or "unknown_user@mail.com" - - if not x_developer_email: - raise InvalidHeaderFormat("X-Developer-Email header invalid") - - try: - validate_email(x_developer_email) - except PydanticCustomError: - raise InvalidHeaderFormat("X-Developer-Email header invalid") - - return x_developer_email diff --git a/model-serving/model_api/dependencies/exceptions.py b/model-serving/model_api/dependencies/exceptions.py deleted file mode 100644 index 491868fa6..000000000 --- a/model-serving/model_api/dependencies/exceptions.py +++ /dev/null @@ -1,2 +0,0 @@ -class InvalidHeaderFormat(Exception): - pass diff --git a/model-serving/model_api/env.py b/model-serving/model_api/env.py deleted file mode 100644 index 7cbe420cf..000000000 --- a/model-serving/model_api/env.py +++ /dev/null @@ -1,36 +0,0 @@ -from pprint import pprint - -from environs import Env - - -env = Env() -env.read_env() - - -sentry_dsn: str = env.str("SENTRY_DSN", default="") -api_key: str = env.str("MODEL_API_KEY") -api_key_header_name: str = env.str("MODEL_API_KEY_HEADER_NAME", default="X-Auth-Key") -host: str = env.str("MODEL_API_HOST", default="0.0.0.0") -port: int = env.int("MODEL_API_PORT", default=8000) -backlog: int = env.int("MODEL_API_BACKLOG", default=2048) -skip_check_developer_headers: bool = env.bool( - "SKIP_CHECK_DEVELOPER_HEADERS", default=False -) -temperature_scaling_factor: float = env.float("TEMPERATURE_SCALING_FACTOR", default=1.0) -temperature_scaling_power: float = env.float("TEMPERATURE_SCALING_POWER", default=1.0) - -environment = dict( - sentry_dsn=sentry_dsn, - api_key=api_key, - api_key_header_name=api_key_header_name, - host=host, - port=port, - backlog=backlog, - skip_check_developer_headers=skip_check_developer_headers, - temperature_scaling_factor=temperature_scaling_factor, - temperature_scaling_power=temperature_scaling_power, -) - -print("Environment variables:") -pprint(environment) -print() diff --git a/model-serving/model_api/function_classifier.py b/model-serving/model_api/function_classifier.py deleted file mode 100644 index 66372ca33..000000000 --- a/model-serving/model_api/function_classifier.py +++ /dev/null @@ -1,50 +0,0 @@ -from functools import cache -import os -import pickle - -import torch - -from .tokens import tag_start_id_map - - -@cache -def load_function_classifier( - path: str = os.path.join( - os.path.dirname(__file__), "../artifacts/function_classifier.bin" - ) -): - """ - Load the classifier model from disk. - """ - - with open(path, "rb") as f: - return pickle.load(f) - - -def classify_function_call(logit_tensor: torch.Tensor) -> bool: - """ - Classify the input logit tensor as a function call or not. - """ - - # Load the classifier - classifier = load_function_classifier() - - # Get input - valid_tag_start_ids = list(tag_start_id_map.values()) - - # Only get the logits for the valid tag start ids - input = logit_tensor[valid_tag_start_ids] - - # Convert to numpy (bfloat16 is not supported by numpy) - input = input.cpu().to(dtype=torch.float16).numpy() - - # Reshape since the classifier expects a 2D array - # (1, -1) means 1 row and as many columns as needed - input = input.reshape(1, -1) - - # Get prediction - output = classifier.predict(input) - prediction = output[0] - prediction = bool(prediction) - - return prediction diff --git a/model-serving/model_api/logger.py b/model-serving/model_api/logger.py deleted file mode 100644 index 95260a3ef..000000000 --- a/model-serving/model_api/logger.py +++ /dev/null @@ -1,9 +0,0 @@ -import logging - - -logger = logging.getLogger(__name__) -h = logging.StreamHandler() -f = logging.Formatter("[%(asctime)s/%(levelname)s] - %(message)s") -h.setFormatter(f) -logger.addHandler(h) -logger.setLevel(logging.DEBUG) diff --git a/model-serving/model_api/logits_processors.py b/model-serving/model_api/logits_processors.py deleted file mode 100644 index 75391e7e2..000000000 --- a/model-serving/model_api/logits_processors.py +++ /dev/null @@ -1,55 +0,0 @@ -import torch - -from .function_classifier import classify_function_call -from .tokens import tag_start_id_map - -allowed_tag_start_token_id = list(tag_start_id_map.values()) - - -def drop_disallowed_start_tags( - previously_generated_tokens: list[int], - next_token_logits: torch.Tensor, -) -> torch.Tensor: - """ - Logits processor that sets the next token logits to -inf for all tokens that - do NOT correspond to allowed tag start tokens. - """ - - if len(previously_generated_tokens) > 0: - return next_token_logits - - next_token_logits_copy = next_token_logits.clone() - - # Creating a mask that is True for all elements except those at token indices of allowed - mask = torch.ones_like(next_token_logits_copy, dtype=torch.bool) - mask[allowed_tag_start_token_id] = False - - # Setting all except allowed to -inf - next_token_logits_copy[mask] = float("-inf") - - return next_token_logits_copy - - -def fix_function_call_prediction( - previously_generated_tokens: list[int], - next_token_logits: torch.Tensor, -) -> torch.Tensor: - """ - Logits processor that either allows or disallows the generation of function calls. - """ - - if len(previously_generated_tokens) > 0: - return next_token_logits - - next_token_logits_copy = next_token_logits.clone() - is_function_call = classify_function_call(next_token_logits_copy) - correct_tag_id = tag_start_id_map["function_call" if is_function_call else "me"] - - # Creating a mask that is True for all elements except the corrected tag - mask = torch.ones_like(next_token_logits_copy, dtype=torch.bool) - mask[correct_tag_id] = False - - # Setting all except allowed to negative inf - next_token_logits_copy[mask] = float("-inf") - - return next_token_logits_copy diff --git a/model-serving/model_api/metrics.py b/model-serving/model_api/metrics.py deleted file mode 100644 index 93b2d4eba..000000000 --- a/model-serving/model_api/metrics.py +++ /dev/null @@ -1,121 +0,0 @@ -import psutil -from aioprometheus import Gauge, Counter, MetricsMiddleware -from pynvml import ( - nvmlInit, - nvmlDeviceGetCount, - nvmlDeviceGetHandleByIndex, - nvmlDeviceGetMemoryInfo, - nvmlDeviceGetUtilizationRates, -) - -nvmlInit() - - -def _gpu_usage(): - res = [] - for index in range(nvmlDeviceGetCount()): - handle = nvmlDeviceGetHandleByIndex(index) - info = nvmlDeviceGetMemoryInfo(handle) - try: - util = nvmlDeviceGetUtilizationRates(handle) - util_gpu = util.gpu - util_memory = util.memory - except Exception: - util_gpu = 0 - util_memory = 0 - - res.append( - { - "mem_total": info.total, - "mem_free": info.free, - "mem_used": info.used, - "gpu_util_percents": util_gpu, - "gpu_memory_percents": util_memory, - } - ) - - return res - - -cpu_percent_metric = Gauge( - "cpu_percent_usage_info", - "CPU percent usage info", -) -mem_total_metric = Gauge( - "mem_total_info", - "Total memory info", -) -mem_free_metric = Gauge( - "mem_free_info", - "Free memory info", -) -mem_used_metric = Gauge( - "mem_used_info", - "Used memory info", -) -mem_percent_metric = Gauge( - "mem_percent_info", - "Memory used percentage info", -) - - -gpu_mem_total_metric = Gauge( - "gpu_mem_total", - "GPU memory total", -) -gpu_mem_free_metric = Gauge( - "gpu_mem_free", - "GPU memory free", -) -gpu_mem_used_metric = Gauge( - "gpu_mem_used", - "GPU memory used", -) -gpu_util_percents_metric = Gauge( - "gpu_util_percents", - "GPU utilization percents", -) -gpu_memory_percents_metric = Gauge( - "gpu_memory_percents", - "GPU utilization percents", -) - - -tokens_per_user_metric = Counter( - "total_tokens_per_user", - "Total tokens per user", -) - - -generation_time_metric = Gauge( - "model_response_generation_time", - "Model response generation time", -) - - -generated_tokens_per_second_metric = Gauge( - "generated_token_per_second", - "Generated tokens per second", -) - - -class MetricsMiddleware(MetricsMiddleware): - async def __call__(self, *args, **kwargs): - mem = psutil.virtual_memory() - - cpu_percent_metric.set({}, psutil.cpu_percent()) - mem_total_metric.set({}, mem.total) - mem_free_metric.set({}, mem.free) - mem_used_metric.set({}, mem.used) - mem_percent_metric.set({}, mem.percent) - - usage = _gpu_usage() - for idx, u in enumerate(usage): - idx = str(idx) - gpu_mem_total_metric.set({"gpu_index": idx}, u["mem_total"]) - gpu_mem_free_metric.set({"gpu_index": idx}, u["mem_free"]) - gpu_mem_used_metric.set({"gpu_index": idx}, u["mem_used"]) - gpu_util_percents_metric.set({"gpu_index": idx}, u["gpu_util_percents"]) - gpu_memory_percents_metric.set({"gpu_index": idx}, u["gpu_memory_percents"]) - - return await super().__call__(*args, **kwargs) diff --git a/model-serving/model_api/protocol.py b/model-serving/model_api/protocol.py deleted file mode 100644 index ba566abdd..000000000 --- a/model-serving/model_api/protocol.py +++ /dev/null @@ -1,479 +0,0 @@ -from enum import Enum -from typing import Literal, TypeAlias, Union, Optional -from pydantic import BaseModel, Field, ConfigDict, validator -from vllm.entrypoints.openai.protocol import ( - CompletionRequest, - ChatCompletionRequest, - ChatCompletionResponseChoice, - ChatCompletionResponseStreamChoice, - ChatCompletionStreamResponse, - ChatCompletionResponse, - ChatMessage, - DeltaMessage, -) -from vllm.sampling_params import LogitsProcessor, SamplingParams -from .conversion.datatypes import ChatML - - -DEFAULT_MAX_TOKENS = 4000 - - -class FunctionCall(BaseModel): - name: str - - -RequestFunctionCall: TypeAlias = Union[Literal["none", "auto"], FunctionCall] - - -class ToolCall(BaseModel): - id: str - type: Literal["function"] - function: str - - -class ChatMessage(ChatMessage): - name: str | None = None - function_call: str | None = None - tool_calls: list[ToolCall] | None = None - content: str | None = None - - -class DeltaMessage(DeltaMessage): - name: str | None = None - function_call: str | None = None - tool_calls: list[ToolCall] | None = None - - -class ChatCompletionResponseChoice(ChatCompletionResponseChoice): - message: ChatMessage - finish_reason: Literal["stop", "length", "function_call", "tool_calls"] | None = ( - None - ) - - -class ChatCompletionResponseStreamChoice(ChatCompletionResponseStreamChoice): - delta: DeltaMessage - - -class ChatCompletionStreamResponse(ChatCompletionStreamResponse): - choices: list[ChatCompletionResponseStreamChoice] - - -class ResponseFormat(BaseModel): - type_: str = Field(..., alias="type") - - -class FunctionParameters(BaseModel): - model_config = ConfigDict(extra="allow") - - -class FunctionDef(BaseModel): - name: str - description: str | None = None - parameters: FunctionParameters - - -class ToolType(Enum): - function = "function" - - -class NamedToolChoice(BaseModel): - type: ToolType - function: FunctionCall - - -ToolChoice: TypeAlias = Union[Literal["none", "auto"], NamedToolChoice] - - -class Type(Enum): - function = "function" - webhook = "webhook" - - -class Tool(BaseModel): - type: Type - function: FunctionDef - id: Optional[str] = None - - -class SamplingParams(SamplingParams): - _properties = [ - "n", - "best_of", - "presence_penalty", - "frequency_penalty", - "repetition_penalty", - "temperature", - "top_p", - "top_k", - "min_p", - "seed", - "use_beam_search", - "length_penalty", - "early_stopping", - "stop", - "stop_token_ids", - "include_stop_str_in_output", - "ignore_eos", - "max_tokens", - "logprobs", - "prompt_logprobs", - "skip_special_tokens", - "spaces_between_special_tokens", - "logits_processors", - ] - - def __init__( - self, - n: int = 1, - best_of: int | None = None, - presence_penalty: float = 0.0, - frequency_penalty: float = 0.01, # Custom - repetition_penalty: float = 1.0, - temperature: float = 0.0, # Custom - top_p: float = 0.99, # Custom - top_k: int = -1, - min_p: float = 0.01, # Custom - seed: int | None = None, - use_beam_search: bool = False, - length_penalty: float = 1.0, - early_stopping: bool | str = False, - stop: str | list[str] | None = None, - stop_token_ids: list[int] | None = None, - include_stop_str_in_output: bool = False, - ignore_eos: bool = False, - max_tokens: int | None = DEFAULT_MAX_TOKENS, # Custom - logprobs: int | None = None, - prompt_logprobs: int | None = None, - skip_special_tokens: bool = True, - spaces_between_special_tokens: bool = False, # Custom - logits_processors: list[LogitsProcessor] | None = None, - ) -> None: - super().__init__( - n=n, - best_of=best_of, - presence_penalty=presence_penalty, - frequency_penalty=frequency_penalty, - repetition_penalty=repetition_penalty, - temperature=temperature, - top_p=top_p, - top_k=top_k, - min_p=min_p, - seed=seed, - use_beam_search=use_beam_search, - length_penalty=length_penalty, - early_stopping=early_stopping, - stop=stop, - stop_token_ids=stop_token_ids, - include_stop_str_in_output=include_stop_str_in_output, - ignore_eos=ignore_eos, - max_tokens=max_tokens, - logprobs=logprobs, - prompt_logprobs=prompt_logprobs, - skip_special_tokens=skip_special_tokens, - spaces_between_special_tokens=spaces_between_special_tokens, - logits_processors=logits_processors, - ) - - def __eq__(self, other): - for p in self._properties: - if getattr(self, p) != getattr(other, p): - return False - - return True - - -class Preset(str, Enum): - problem_solving = "problem_solving" - conversational = "conversational" - fun = "fun" - prose = "prose" - creative = "creative" - business = "business" - deterministic = "deterministic" - code = "code" - multilingual = "multilingual" - - def get_settings(self): - return getattr(self, f"_get_settings_{self.name}", "_get_settings_default")() - - def _get_settings_problem_solving(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.0, - repetition_penalty=1.0, - temperature=0, - top_p=1.0, - min_p=0.0, - best_of=10, - top_k=-1, - use_beam_search=True, - length_penalty=1.0, - seed=None, - ) - - def _get_settings_conversational(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.0, - repetition_penalty=1.02, - temperature=0.7, - top_p=0.99, - min_p=0.01, - best_of=1, - top_k=-1, - use_beam_search=False, - length_penalty=1.0, - seed=None, - ) - - def _get_settings_fun(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.0, - repetition_penalty=1.05, - temperature=1.2, - top_p=1.0, - min_p=0.015, - best_of=2, - top_k=-1, - use_beam_search=False, - length_penalty=1.0, - seed=None, - ) - - def _get_settings_prose(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.0, - repetition_penalty=1.025, - temperature=0.9, - top_p=1.0, - min_p=0.02, - best_of=2, - top_k=50, - use_beam_search=False, - length_penalty=1.0, - seed=None, - ) - - def _get_settings_creative(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.0, - repetition_penalty=1.1, - temperature=1.2, - top_p=1.0, - min_p=0.02, - best_of=3, - top_k=10, - use_beam_search=False, - length_penalty=1.0, - seed=None, - ) - - def _get_settings_business(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.1, - repetition_penalty=1.1, - temperature=0.5, - top_p=0.98, - min_p=0.05, - best_of=2, - top_k=5, - use_beam_search=False, - length_penalty=1.0, - seed=1, - ) - - def _get_settings_deterministic(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.0, - repetition_penalty=1.0, - temperature=0.0, - top_p=1.0, - min_p=0.0, - best_of=1, - top_k=-1, - use_beam_search=False, - length_penalty=1.0, - seed=1, - ) - - def _get_settings_code(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.0, - repetition_penalty=1.0, - temperature=0, - top_p=1.0, - min_p=0.0, - best_of=3, - top_k=-1, - use_beam_search=True, - length_penalty=1.0, - seed=1, - ) - - def _get_settings_multilingual(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.0, - repetition_penalty=1.0, - temperature=None, - top_p=None, - min_p=None, - best_of=1, - top_k=-1, - use_beam_search=False, - length_penalty=1.0, - seed=None, - ) - - def _get_settings_default(self): - return dict( - n=1, - presence_penalty=0.0, - frequency_penalty=0.0, - repetition_penalty=1.0, - temperature=0.0, - top_p=0.99, - min_p=0.01, - best_of=1, - top_k=-1, - use_beam_search=False, - length_penalty=1.0, - seed=None, - ) - - -class ChatCompletionRequest(ChatCompletionRequest): - model_config = ConfigDict(extra="forbid", validate_assignment=True) - - functions: list[FunctionDef] | None = None - function_call: RequestFunctionCall | None = None - tools: list[Tool] | None = None - tool_choice: ToolChoice | None = None - response_format: ResponseFormat | None = None - messages: ChatML - - spaces_between_special_tokens: bool | None = False # Custom - max_tokens: int | None = DEFAULT_MAX_TOKENS # Custom - temperature: float | None = 0.0 # Custom - frequency_penalty: float | None = 0.01 # Custom - top_p: float | None = 0.99 # Custom - min_p: float | None = 0.01 # Custom - preset: Preset | None = None - - def to_sampling_params(self) -> SamplingParams: - settings = dict( - n=self.n or 1, - presence_penalty=self.presence_penalty or 0.0, - frequency_penalty=self.frequency_penalty or 0.0, - repetition_penalty=self.repetition_penalty or 1.0, - temperature=self.temperature or 0.0, - top_p=self.top_p or 0.99, - min_p=self.min_p or 0.01, - best_of=self.best_of, - top_k=self.top_k or -1, - use_beam_search=self.use_beam_search or False, - length_penalty=self.length_penalty or 1.0, - seed=self.seed, - ) - if self.preset is not None: - settings = self.preset.get_settings() - - echo_without_generation = self.echo and self.max_tokens == 0 - - if self.logit_bias is not None: - raise ValueError("logit_bias is not supported currently.") - - return SamplingParams( - stop=self.stop, - stop_token_ids=self.stop_token_ids, - max_tokens=( - (self.max_tokens or DEFAULT_MAX_TOKENS) - if not echo_without_generation - else 1 - ), - ignore_eos=self.ignore_eos or False, - skip_special_tokens=self.skip_special_tokens or True, - spaces_between_special_tokens=self.spaces_between_special_tokens or False, - include_stop_str_in_output=self.include_stop_str_in_output or False, - **settings, - ) - - @validator("max_tokens") - def set_max_tokens(cls, max_tokens): - return max_tokens if max_tokens is not None else DEFAULT_MAX_TOKENS - - @validator("stream") - def set_stream(cls, stream): - return stream or False - - -class CompletionRequest(CompletionRequest): - model_config = ConfigDict(extra="forbid") - - spaces_between_special_tokens: bool | None = False # Custom - max_tokens: int | None = DEFAULT_MAX_TOKENS # Custom - temperature: float | None = 0.0 # Custom - frequency_penalty: float | None = 0.01 # Custom - top_p: float | None = 0.99 # Custom - min_p: float | None = 0.01 # Custom - preset: Preset | None = None - - def to_sampling_params(self) -> SamplingParams: - echo_without_generation = self.echo and self.max_tokens == 0 - - if self.logit_bias is not None: - raise ValueError("logit_bias is not supported currently.") - - settings = dict( - n=self.n or 1, - presence_penalty=self.presence_penalty or 0.0, - frequency_penalty=self.frequency_penalty or 0.0, - repetition_penalty=self.repetition_penalty or 1.0, - temperature=self.temperature or 0.0, - top_p=self.top_p or 0.99, - min_p=self.min_p or 0.01, - best_of=self.best_of, - top_k=self.top_k or -1, - use_beam_search=self.use_beam_search or False, - length_penalty=self.length_penalty or 1.0, - seed=self.seed, - ) - if self.preset is not None: - settings = self.preset.get_settings() - - return SamplingParams( - stop=self.stop, - stop_token_ids=self.stop_token_ids, - ignore_eos=self.ignore_eos or False, - max_tokens=( - (self.max_tokens or DEFAULT_MAX_TOKENS) - if not echo_without_generation - else 1 - ), - logprobs=self.logprobs, - prompt_logprobs=self.logprobs if self.echo else None, - skip_special_tokens=self.skip_special_tokens or True, - spaces_between_special_tokens=self.spaces_between_special_tokens or False, - include_stop_str_in_output=self.include_stop_str_in_output or False, - **settings, - ) - - -class ChatCompletionResponse(ChatCompletionResponse): - choices: list[ChatCompletionResponseChoice] diff --git a/model-serving/model_api/tokens.py b/model-serving/model_api/tokens.py deleted file mode 100644 index 7dfcb95ab..000000000 --- a/model-serving/model_api/tokens.py +++ /dev/null @@ -1,17 +0,0 @@ -bos_token = "" -eos_token = "<|im_end|>" - -bos_token_id: int = 1 # -eos_token_id: int = 32000 # <|im_end|> - -tag_ids_map = { - "me": [528], - "function_call": [908, 28730, 2845], - "thought": [1654], - "situation": [4620], - "person": [1338], - "functions": [5572], - "information": [1871], -} - -tag_start_id_map = {tag: ids[0] for tag, ids in tag_ids_map.items()} diff --git a/model-serving/model_api/utils.py b/model-serving/model_api/utils.py deleted file mode 100644 index 1b495e538..000000000 --- a/model-serving/model_api/utils.py +++ /dev/null @@ -1,101 +0,0 @@ -import re -import string -import random -from typing import AsyncIterator, Any - -from interegular.patterns import _ParsePattern -from lmformatenforcer import CharacterLevelParser -from lmformatenforcer.integrations.vllm import ( - build_vllm_logits_processor, -) -from lmformatenforcer.integrations.transformers import ( - build_token_enforcer_tokenizer_data, -) -from lmformatenforcer import TokenEnforcerTokenizerData -from pydantic import BaseModel -from vllm import LLM -from vllm.outputs import RequestOutput - -from .protocol import SamplingParams -from .conversion.datatypes import ChatML - - -ListOrStrList = str | list[str] - -remove_last_space_re = re.compile(r"[^ ]+ {1}$") - - -def build_vllm_token_enforcer_tokenizer_data(tokenizer) -> TokenEnforcerTokenizerData: - # In some vLLM versions the tokenizer is wrapped in a TokenizerGroup - if tokenizer.__class__.__name__ == "TokenizerGroup": - tokenizer = tokenizer.tokenizer # noqa - return build_token_enforcer_tokenizer_data(tokenizer) - - -def vllm_with_character_level_parser( - engine: LLM, - tokenizer, - prompt: ListOrStrList, - sampling_params: SamplingParams, - request_id: str, - parser: CharacterLevelParser | None = None, -) -> AsyncIterator[RequestOutput]: - tokenizer_data = build_vllm_token_enforcer_tokenizer_data(tokenizer) - - if parser: - logits_processor = build_vllm_logits_processor(tokenizer_data, parser) - sampling_params.logits_processors = [logits_processor] - - return engine.generate(prompt, sampling_params, request_id) - - -class FunctionCallResult(BaseModel): - name: str - arguments: dict[str, Any] - - -def rescale_temperature( - temperature: float, - scaling_factor: float, - power: float = 1.0, -) -> float: - return (temperature**power) * scaling_factor - - -def validate_interegular_regex(pattern: str) -> bool: - try: - _ParsePattern(pattern).parse() - return True - except Exception: - return False - - -def random_tool_id(n: int = 8) -> str: - return "tool-" + "".join(random.choices(string.digits, k=n)) - - -def remove_last_space(prompt: str): - if remove_last_space_re.search(prompt): - return prompt[:-1] - - return prompt - - -def flatten(lst): - result = [] - for i in lst: - if isinstance(i, list): - result.extend(flatten(i)) - else: - result.append(i) - - return result - - -def escape_special_tokens(messages: ChatML, tokens: list[str]): - for m in messages: - if m.content is None: - continue - - for t in tokens: - m.content = m.content.replace(t, f"{t[0]} {t[1:]}") diff --git a/model-serving/model_api/web.py b/model-serving/model_api/web.py deleted file mode 100644 index 701eae3c9..000000000 --- a/model-serving/model_api/web.py +++ /dev/null @@ -1,1022 +0,0 @@ -import argparse -import asyncio -from contextlib import suppress -from http import HTTPStatus -import json -import logging -import time -from typing import AsyncGenerator, Annotated - -from aioprometheus.asgi.starlette import metrics -from fastapi.middleware.cors import CORSMiddleware -from fastapi import FastAPI, BackgroundTasks, Request, Depends -from fastapi.responses import Response, JSONResponse, StreamingResponse -from fastapi.exceptions import RequestValidationError -from jsonschema.exceptions import ValidationError -from pydantic import ValidationError as PydanticValidationError -from lmformatenforcer import JsonSchemaParser -from pydantic import UUID4 -import sentry_sdk - -# from vllm.engine.metrics import add_global_metrics_labels -from vllm.engine.arg_utils import AsyncEngineArgs -from vllm.engine.async_llm_engine import AsyncLLMEngine -from vllm.utils import random_uuid -from vllm.transformers_utils.tokenizer import get_tokenizer -from vllm.entrypoints.openai.protocol import ( - CompletionResponse, - CompletionResponseChoice, - CompletionResponseStreamChoice, - CompletionStreamResponse, - ErrorResponse, - LogProbs, - ModelCard, - ModelList, - ModelPermission, - UsageInfo, -) -from vllm.outputs import RequestOutput - -from .conversion.conversions import to_prompt, parse_message -from .conversion.datatypes import ChatMLMessage - -from .conversion.exceptions import ( - InvalidPromptException, - InvalidFunctionName, -) -from .logger import logger -from .env import ( - sentry_dsn, - temperature_scaling_factor, - temperature_scaling_power, -) - -from .metrics import ( - tokens_per_user_metric, - generation_time_metric, - generated_tokens_per_second_metric, - MetricsMiddleware, -) -from .dependencies.auth import get_api_key -from .dependencies.developer import get_developer_id, get_developer_email -from .dependencies.exceptions import InvalidHeaderFormat -from .utils import ( - vllm_with_character_level_parser, - FunctionCallResult, - rescale_temperature, - random_tool_id, - remove_last_space, - escape_special_tokens, - flatten, -) -from .protocol import ( - CompletionRequest, - ChatCompletionRequest, - ChatCompletionStreamResponse, - ChatCompletionResponseChoice, - ChatCompletionResponseStreamChoice, - ChatMessage, - DeltaMessage, - Type, - ToolCall, - NamedToolChoice, - FunctionCall, - ChatCompletionResponse, -) -from .logits_processors import drop_disallowed_start_tags, fix_function_call_prediction - - -DEFAULT_BOS = "<|im_start|>" -DEFAULT_EOS = "<|im_end|>" - - -engine = None -engine_model_config = None -tokenizer = None -served_model = None - - -model_settings = { - "julep-ai/samantha-1-turbo": { - "section_start_tag": "<|im_start|>", - "section_end_tag": "<|im_end|>", - } -} - - -if not sentry_dsn: - print("Sentry DSN not found. Sentry will not be enabled.") -else: - sentry_sdk.init( - dsn=sentry_dsn, - enable_tracing=True, - ) - - -class EndpointFilter(logging.Filter): - def __init__(self, endpoints: list[str], *args, **kwargs): - super().__init__(*args, **kwargs) - self._endpoints = endpoints - - def filter(self, record: logging.LogRecord) -> bool: - return all([record.getMessage().find(e) == -1 for e in self._endpoints]) - - -logging.getLogger("uvicorn.access").addFilter( - EndpointFilter(["/docs", "/status", "/metrics"]), -) - - -app = FastAPI(dependencies=[Depends(get_api_key)]) - - -TIMEOUT_KEEP_ALIVE = 30 # seconds. -AGENT_NAME = "Samantha" - - -# QUESTION: Can we have a detailed explanation of the logprobs creation process? -def create_logprobs( - token_ids: list[int], - id_logprobs: list[dict[int, float]], - initial_text_offset: int = 0, -) -> LogProbs: - """Create OpenAI-style logprobs.""" - logprobs = LogProbs() - last_token_len = 0 - for token_id, id_logprob in zip(token_ids, id_logprobs): - token = tokenizer.convert_ids_to_tokens(token_id) - logprobs.tokens.append(token) - logprobs.token_logprobs.append(id_logprob[token_id]) - if len(logprobs.text_offset) == 0: - logprobs.text_offset.append(initial_text_offset) - else: - logprobs.text_offset.append(logprobs.text_offset[-1] + last_token_len) - last_token_len = len(token) - - logprobs.top_logprobs.append( - {tokenizer.convert_ids_to_tokens(i): p for i, p in id_logprob.items()} - ) - return logprobs - - -# QUESTION: Please clarify how the maximum context length is determined for different model configurations. -async def check_length(request, prompt, model_config): - if hasattr(model_config.hf_config, "max_sequence_length"): - context_len = model_config.hf_config.max_sequence_length - elif hasattr(model_config.hf_config, "seq_length"): - context_len = model_config.hf_config.seq_length - elif hasattr(model_config.hf_config, "max_position_embeddings"): - context_len = model_config.hf_config.max_position_embeddings - elif hasattr(model_config.hf_config, "seq_length"): - context_len = model_config.hf_config.seq_length - else: - context_len = 2048 - - input_ids = tokenizer(prompt).input_ids - token_num = len(input_ids) - - if token_num + request.max_tokens > context_len: - return create_error_response( - HTTPStatus.BAD_REQUEST, - f"This model's maximum context length is {context_len} tokens. " - f"However, you requested {request.max_tokens + token_num} tokens " - f"({token_num} in the messages, " - f"{request.max_tokens} in the completion). " - f"Please reduce the length of the messages or completion.", - ) - else: - return None - - -@app.exception_handler(InvalidPromptException) -async def invalid_prompt_exception_handler( - request: Request, exc: InvalidPromptException -): - return JSONResponse( - status_code=400, - content={"error": {"message": str(exc), "code": "invalid prompt"}}, - ) - - -@app.exception_handler(json.decoder.JSONDecodeError) -async def json_decode_error_handler( - request: Request, exc: json.decoder.JSONDecodeError -): - return JSONResponse( - status_code=400, - content={"error": {"message": str(exc), "code": "invalid json input"}}, - ) - - -@app.exception_handler(InvalidFunctionName) -async def invalid_function_name_handler(request: Request, exc: InvalidFunctionName): - return JSONResponse( - status_code=400, - content={"error": {"message": str(exc), "code": "invalid function call"}}, - ) - - -@app.exception_handler(ValidationError) -async def validation_error_handler(request: Request, exc: ValidationError): - return JSONResponse( - status_code=400, - content={"error": {"message": str(exc), "code": "invalid functions parameter"}}, - ) - - -@app.exception_handler(PydanticValidationError) -async def pydantic_validation_error_handler( - request: Request, exc: PydanticValidationError -): - return JSONResponse( - status_code=400, - content={ - "error": {"message": str(exc), "code": "invalid request parameter(s)"} - }, - ) - - -@app.exception_handler(InvalidHeaderFormat) -async def invalid_dev_header_error_handler(request: Request, exc: InvalidHeaderFormat): - return JSONResponse( - status_code=400, - content={ - "error": { - "message": "The API key used has invalid metadata. Please contact support for fixing this issue", - "code": "invalid API key", - } - }, - ) - - -def create_error_response( - status_code: HTTPStatus, - message: str, -) -> JSONResponse: - return JSONResponse( - ErrorResponse( - message=message, - type="invalid_request_error", - code=status_code.value, - ).dict(), - status_code=status_code.value, - ) - - -@app.exception_handler(RequestValidationError) -async def validation_exception_handler(request, exc): # pylint: disable=unused-argument - return create_error_response(HTTPStatus.BAD_REQUEST, str(exc)) - - -async def check_model(request) -> JSONResponse | None: - if request.model == served_model: - return - ret = create_error_response( - HTTPStatus.NOT_FOUND, - f"The model `{request.model}` does not exist.", - ) - return ret - - -@app.get("/v1/models") -async def show_available_models(): - """Show available models. Right now we only have one model.""" - model_cards = [ - ModelCard( - id=served_model, - root=served_model, - permission=[ModelPermission()], - ) - ] - return ModelList(data=model_cards) - - -def _write_metrics( - total_gen_time: float, - total_tokens: float, - developer: UUID4 | None = None, - email: UUID4 | None = None, -): - developer = str(developer) - email = str(email) - generation_time_metric.set({"developer": developer, "email": email}, total_gen_time) - tokens_per_user_metric.add({"developer": developer, "email": email}, total_tokens) - generated_tokens_per_second_metric.set( - {"developer": developer, "email": email}, total_tokens / total_gen_time - ) - - -# QUESTION: Could the logic for handling unsupported features (echo, suffix, logit_bias) be simplified or modularized? -@app.post("/v1/completions") -async def completions( - raw_request: Request, - background_tasks: BackgroundTasks, - x_developer_id: Annotated[UUID4 | None, Depends(get_developer_id)] = None, - x_developer_email: Annotated[UUID4 | None, Depends(get_developer_email)] = None, -) -> Response: - """Completion API similar to OpenAI's API. - - See https://platform.openai.com/docs/api-reference/completions/create - for the API specification. This API mimics the OpenAI Completion API. - - NOTE: Currently we do not support the following features: - - echo (since the vLLM engine does not currently support - getting the logprobs of prompt tokens) - - suffix (the language models we currently support do not support - suffix) - - logit_bias (to be supported by vLLM engine) - """ - request = CompletionRequest(**await raw_request.json()) - logger.info(f"Received completion request: {request}") - - error_check_ret = await check_model(request) - if error_check_ret is not None: - return error_check_ret - - if request.echo: - # We do not support echo since the vLLM engine does not - # currently support getting the logprobs of prompt tokens. - return create_error_response( - HTTPStatus.BAD_REQUEST, "echo is not currently supported" - ) - - if request.suffix is not None: - # The language models we currently support do not support suffix. - return create_error_response( - HTTPStatus.BAD_REQUEST, "suffix is not currently supported" - ) - - if request.logit_bias is not None: - # TODO: support logit_bias in vLLM engine. - return create_error_response( - HTTPStatus.BAD_REQUEST, "logit_bias is not currently supported" - ) - - model_name = request.model - request_id = f"cmpl-{random_uuid()}" - if isinstance(request.prompt, list): - if len(request.prompt) == 0: - return create_error_response( - HTTPStatus.BAD_REQUEST, "please provide at least one prompt" - ) - if len(request.prompt) > 1: - return create_error_response( - HTTPStatus.BAD_REQUEST, - "multiple prompts in a batch is not currently supported", - ) - prompt = request.prompt[0] - else: - prompt = request.prompt - created_time = int(time.time()) - - try: - sampling_params = request.to_sampling_params() - except ValueError as e: - return create_error_response(HTTPStatus.BAD_REQUEST, str(e)) - - # Rescale the temperature - sampling_params.temperature = rescale_temperature( - sampling_params.temperature, - temperature_scaling_factor, - power=temperature_scaling_power, # Set it to lower than 1.0 to punish high temperatures more - ) - - prompt = remove_last_space(prompt) - - bos = model_settings.get(request.model, {}).get("section_start_tag", DEFAULT_BOS) - if prompt.endswith(bos): - if sampling_params.logits_processors is None: - sampling_params.logits_processors = [] - - sampling_params.logits_processors.append(drop_disallowed_start_tags) - - result_generator = engine.generate( - prompt, - sampling_params, - request_id, - ) - - # Similar to the OpenAI API, when n != best_of, we do not stream the - # results. In addition, we do not stream the results when use beam search. - stream = ( - request.stream - and (request.best_of is None or request.n == request.best_of) - and not request.use_beam_search - ) - - async def abort_request() -> None: - await engine.abort(request_id) - - def create_stream_response_json( - index: int, - text: str, - logprobs: LogProbs | None = None, - finish_reason: str | None = None, - ) -> str: - choice_data = CompletionResponseStreamChoice( - index=index, - text=text, - logprobs=logprobs, - finish_reason=finish_reason, - ) - response = CompletionStreamResponse( - id=request_id, - created=created_time, - model=model_name, - choices=[choice_data], - ) - response_json = response.json() - - return response_json - - async def completion_stream_generator() -> AsyncGenerator[str, None]: - previous_texts = [""] * request.n - previous_num_tokens = [0] * request.n - start = time.time() - async for res in result_generator: - res: RequestOutput - for output in res.outputs: - i = output.index - delta_text = output.text[len(previous_texts[i]) :] - if request.logprobs is not None: - logprobs = create_logprobs( - output.token_ids[previous_num_tokens[i] :], - output.logprobs[previous_num_tokens[i] :], - len(previous_texts[i]), - ) - else: - logprobs = None - previous_texts[i] = output.text - previous_num_tokens[i] = len(output.token_ids) - response_json = create_stream_response_json( - index=i, - text=delta_text, - logprobs=logprobs, - ) - yield f"data: {response_json}\n\n" - if output.finish_reason is not None: - logprobs = LogProbs() if request.logprobs is not None else None - response_json = create_stream_response_json( - index=i, - text="", - logprobs=logprobs, - finish_reason=output.finish_reason, - ) - yield f"data: {response_json}\n\n" - - total_gen_time = time.time() - start - total_tokens = sum(previous_num_tokens) - background_tasks.add_task( - _write_metrics, - total_gen_time, - total_tokens, - x_developer_id, - x_developer_email, - ) - - yield "data: [DONE]\n\n" - - # Streaming response - if stream: - background_tasks = BackgroundTasks() - # Abort the request if the client disconnects. - background_tasks.add_task(abort_request) - return StreamingResponse( - completion_stream_generator(), - media_type="text/event-stream", - background=background_tasks, - ) - - # Non-streaming response - final_res: RequestOutput = None - start = time.time() - async for res in result_generator: - if await raw_request.is_disconnected(): - # Abort the request if the client disconnects. - await abort_request() - return create_error_response(HTTPStatus.BAD_REQUEST, "Client disconnected") - final_res = res - - tokens_gen_time = time.time() - start - - assert final_res is not None - choices = [] - for output in final_res.outputs: - if request.logprobs is not None: - logprobs = create_logprobs(output.token_ids, output.logprobs) - else: - logprobs = None - choice_data = CompletionResponseChoice( - index=output.index, - text=output.text, - logprobs=logprobs, - finish_reason=output.finish_reason, - ) - choices.append(choice_data) - - num_prompt_tokens = len(final_res.prompt_token_ids) - num_generated_tokens = sum(len(output.token_ids) for output in final_res.outputs) - total_tokens = num_prompt_tokens + num_generated_tokens - - background_tasks.add_task( - _write_metrics, - tokens_gen_time, - total_tokens, - x_developer_id, - x_developer_email, - ) - - usage = UsageInfo( - prompt_tokens=num_prompt_tokens, - completion_tokens=num_generated_tokens, - total_tokens=total_tokens, - ) - - response = CompletionResponse( - id=request_id, - created=created_time, - model=model_name, - choices=choices, - usage=usage, - ) - - if request.stream: - # When user requests streaming but we don't stream, we still need to - # return a streaming response with a single event. - response_json = response.json() - - async def fake_stream_generator() -> AsyncGenerator[str, None]: - yield f"data: {response_json}\n\n" - yield "data: [DONE]\n\n" - - return StreamingResponse( - fake_stream_generator(), media_type="text/event-stream" - ) - - return response - - -# QUESTION: How does the chat completion process differ from the standard completion process, and why are certain features unsupported here? -@app.post("/v1/chat/completions") -async def chat_completions( - raw_request: Request, - background_tasks: BackgroundTasks, - x_developer_id: Annotated[UUID4 | None, Depends(get_developer_id)] = None, - x_developer_email: Annotated[UUID4 | None, Depends(get_developer_email)] = None, -) -> Response: - """Completion API similar to OpenAI's API. - - See https://platform.openai.com/docs/api-reference/chat/create - for the API specification. This API mimics the OpenAI ChatCompletion API. - - NOTE: Currently we do not support the following features: - - function_call (Users should implement this by themselves) - - logit_bias (to be supported by vLLM engine) - """ - request = ChatCompletionRequest(**await raw_request.json()) - logger.info(f"Received chat completion request: {request}") - - error_check_ret = await check_model(request) - if error_check_ret is not None: - return error_check_ret - - if request.logit_bias is not None: - # TODO: support logit_bias in vLLM engine. - return create_error_response( - HTTPStatus.BAD_REQUEST, - "logit_bias is not currently supported", - ) - - append_fcall_prefix = False - - if request.functions and request.tools: - raise InvalidPromptException("can not accept both 'functions' and 'tools'") - - if request.tools: - request.functions = [ - t.function for t in request.tools if t.type == Type.function - ] - - request.function_call = ( - request.tool_choice.function - if isinstance(request.tool_choice, NamedToolChoice) - else request.tool_choice - ) - - bos = model_settings.get(request.model, {}).get("section_start_tag", DEFAULT_BOS) - eos = model_settings.get(request.model, {}).get("section_end_tag", DEFAULT_EOS) - - if ( - request.messages - and request.messages[0].role != "system" - and request.messages[0].name not in (None, "situation") - ): - request.messages.insert( - 0, - ChatMLMessage( - name="situation", - role="system", - content="You are a helpful AI Assistant", - ), - ) - - escape_special_tokens( - request.messages, - flatten(engine.engine.tokenizer.tokenizer.special_tokens_map.values()), - ) - - prompt = remove_last_space( - to_prompt( - request.messages, - bos=bos, - eos=eos, - functions=request.functions, - function_call=request.function_call, - ) - ) - - if ( - request.functions - and request.function_call - and request.function_call not in ("none", "auto", None) - ): - with suppress(IndexError): - if prompt.split("\n")[-1].startswith('{"name":'): - append_fcall_prefix = True - - # prompt = await get_gen_prompt(request) - error_check_ret = await check_length(request, prompt, engine_model_config) - if error_check_ret is not None: - return error_check_ret - - model_name = request.model - request_id = f"cmpl-{random_uuid()}" - created_time = int(time.time()) - - try: - sampling_params = request.to_sampling_params() - except ValueError as e: - return create_error_response(HTTPStatus.BAD_REQUEST, str(e)) - - # Rescale the temperature - sampling_params.temperature = rescale_temperature( - sampling_params.temperature, - temperature_scaling_factor, - power=temperature_scaling_power, # Set it to lower than 1.0 to punish high temperatures more - ) - - if prompt.endswith(bos): - func_call_possible = ( - request.functions and request.function_call != "none" - ) or (request.tools and request.tool_choice != "none") - if sampling_params.logits_processors is None: - sampling_params.logits_processors = [] - - sampling_params.logits_processors.append( - fix_function_call_prediction - if func_call_possible - else drop_disallowed_start_tags - ) - - if ( - request.response_format is not None - and request.response_format.type_ == "json_object" - ): - result_generator = vllm_with_character_level_parser( - engine, - tokenizer, - prompt, - sampling_params, - request_id, - parser=JsonSchemaParser( - ( - FunctionCallResult.model_json_schema() - if request.function_call is not None - and request.function_call not in ("none", "auto") - else {} - ), - ), - ) - - else: - result_generator = engine.generate( - prompt, - sampling_params, - request_id, - ) - - async def abort_request() -> None: - await engine.abort(request_id) - - def create_stream_response_json( - index: int, - text: str, - role: str = "assistant", - name: str | None = None, - finish_reason: str | None = None, - is_function_call: bool | None = None, - is_tool_call: bool | None = None, - ) -> str: - choice_data = ChatCompletionResponseStreamChoice( - index=index, - delta=DeltaMessage( - role=role, - content=text if not (is_function_call or is_tool_call) else None, - name=name, - function_call=text if is_function_call else None, - tool_calls=( - [ - ToolCall( - id=random_tool_id(), - type="function", - function=text, - ) - ] - if is_tool_call - else None - ), - ), - finish_reason=finish_reason, - ) - response = ChatCompletionStreamResponse( - id=request_id, - created=created_time, - model=model_name, - choices=[choice_data], - ) - response_json = response.json() - - return response_json - - async def completion_stream_generator() -> AsyncGenerator[str, None]: - previous_texts = [""] * request.n - previous_num_tokens = [0] * request.n - start = time.time() - role = "assistant" - name = None - is_function_call = False - is_tool_call = False - async for res in result_generator: - res: RequestOutput - for idx, output in enumerate(res.outputs): - i = output.index - delta_text = output.text[len(previous_texts[i]) :] - if not idx: - if append_fcall_prefix: - delta_text = f"""function_call\n{delta_text}""" - - msg = parse_message(delta_text) - role = msg.role or "assistant" - name = msg.name - is_function_call = bool( - request.functions and msg.function_call and not request.tools - ) - is_tool_call = bool(request.tools and msg.function_call) - - for i in range(request.n): - choice_data = ChatCompletionResponseStreamChoice( - index=i, - delta=DeltaMessage(role=role), - finish_reason=None, - ) - chunk = ChatCompletionStreamResponse( - id=request_id, choices=[choice_data], model=model_name - ) - data = chunk.json(exclude_unset=True) - yield f"data: {data}\n\n" - - previous_texts[i] = output.text - previous_num_tokens[i] = len(output.token_ids) - response_json = create_stream_response_json( - index=i, - text=delta_text, - role=role, - name=name, - is_function_call=is_function_call, - is_tool_call=is_tool_call, - ) - yield f"data: {response_json}\n\n" - if output.finish_reason is not None: - finish_reason = output.finish_reason - if is_function_call: - finish_reason = "function_call" - if is_tool_call: - finish_reason = "tool_calls" - response_json = create_stream_response_json( - index=i, - text="", - role=role, - name=name, - finish_reason=finish_reason, - is_function_call=is_function_call, - is_tool_call=is_tool_call, - ) - yield f"data: {response_json}\n\n" - - total_gen_time = time.time() - start - total_tokens = sum(previous_num_tokens) - - background_tasks.add_task( - _write_metrics, - total_gen_time, - total_tokens, - x_developer_id, - x_developer_email, - ) - - yield "data: [DONE]\n\n" - - # Streaming response - if request.stream: - background_tasks = BackgroundTasks() - # Abort the request if the client disconnects. - background_tasks.add_task(abort_request) - return StreamingResponse( - completion_stream_generator(), - media_type="text/event-stream", - background=background_tasks, - ) - - # Non-streaming response - final_res: RequestOutput = None - start = time.time() - async for res in result_generator: - if await raw_request.is_disconnected(): - # Abort the request if the client disconnects. - await abort_request() - return create_error_response(HTTPStatus.BAD_REQUEST, "Client disconnected") - final_res = res - - tokens_gen_time = time.time() - start - - assert final_res is not None - choices = [] - for output in final_res.outputs: - msg = parse_message( - output.text - if not append_fcall_prefix - else f"""function_call\n{output.text}""" - ) - finish_reason = output.finish_reason - is_function_call = bool( - request.functions and msg.function_call and not request.tools - ) - is_tool_call = bool(request.tools and msg.function_call) - if is_function_call: - finish_reason = "function_call" - if is_tool_call: - finish_reason = "tool_calls" - - func_name = ( - request.function_call.name - if isinstance(request.function_call, FunctionCall) - else request.function_call or "" - ) - tool_func_name = ( - request.tool_choice.function - if isinstance(request.tool_choice, NamedToolChoice) - else request.tool_choice or "" - ) - choice_data = ChatCompletionResponseChoice( - index=output.index, - message=ChatMessage( - role=msg.role or "assistant", - name=msg.name, - content=( - None if is_function_call or is_tool_call else msg.content or "" - ), - function_call=( - f'{{"name": "{func_name}",{msg.function_call or ""}' - if is_function_call - else None - ), - tool_calls=( - [ - ToolCall( - id=random_tool_id(), - type="function", - function=f'{{"name": "{tool_func_name}",{msg.function_call or ""}', - ) - ] - if is_tool_call - else None - ), - ), - finish_reason=finish_reason, - ) - choices.append(choice_data) - - num_prompt_tokens = len(final_res.prompt_token_ids) - num_generated_tokens = sum(len(output.token_ids) for output in final_res.outputs) - total_tokens = num_prompt_tokens + num_generated_tokens - usage = UsageInfo( - prompt_tokens=num_prompt_tokens, - completion_tokens=num_generated_tokens, - total_tokens=total_tokens, - ) - - background_tasks.add_task( - _write_metrics, - tokens_gen_time, - total_tokens, - x_developer_id, - x_developer_email, - ) - - response = ChatCompletionResponse( - id=request_id, - created=created_time, - model=model_name, - choices=choices, - usage=usage, - ) - - if request.stream: - # When user requests streaming but we don't stream, we still need to - # return a streaming response with a single event. - response_json = response.json() - - async def fake_stream_generator() -> AsyncGenerator[str, None]: - yield f"data: {response_json}\n\n" - yield "data: [DONE]\n\n" - - return StreamingResponse( - fake_stream_generator(), - media_type="text/event-stream", - ) - - return response - - -@app.get("/status") -async def status(): - return {"status": "ok"} - - -@app.post("/me") -async def me(): - return {"status": "ok"} - - -# QUESTION: How does the MetricsMiddleware work, and what metrics are being excluded from tracking? -app.add_middleware( - MetricsMiddleware, - exclude_paths=["/metrics", "/docs", "/status"], -) - -app.add_route("/metrics", metrics) - - -# QUESTION: Please explain the CORS policy applied here and its implications for cross-origin requests. -app.add_middleware( - CORSMiddleware, - allow_credentials=True, - allow_origins="*", - allow_methods="*", - allow_headers="*", -) - - -# QUESTION: Can we have an explanation on how the app configuration is dynamically set based on command-line arguments? -def create_app(args=None): - global engine, engine_model_config, tokenizer, served_model - - parser = argparse.ArgumentParser( - description="vLLM OpenAI-Compatible RESTful API server." - ) - parser.add_argument("--host", type=str, default=None, help="host name") - parser.add_argument("--port", type=int, default=8000, help="port number") - parser.add_argument( - "--log-stats", type=bool, default=True, help="log stats metrics" - ) - parser.add_argument( - "--served-model-name", - type=str, - default=None, - help="The model name used in the API. If not " - "specified, the model name will be the same as " - "the huggingface name.", - ) - - parser = AsyncEngineArgs.add_cli_args(parser) - args = parser.parse_args(args=args) - - logger.info(f"args: {args}") - - if args.served_model_name is not None: - served_model = args.served_model_name - else: - served_model = args.model - - engine_args = AsyncEngineArgs.from_cli_args(args) - engine = AsyncLLMEngine.from_engine_args(engine_args) - engine_model_config = asyncio.run(engine.get_model_config()) - - # A separate tokenizer to map token IDs to strings. - tokenizer = get_tokenizer( - engine_args.tokenizer, - tokenizer_mode=engine_args.tokenizer_mode, - trust_remote_code=engine_args.trust_remote_code, - ) - - return app diff --git a/model-serving/poetry.lock b/model-serving/poetry.lock deleted file mode 100644 index da70c4133..000000000 --- a/model-serving/poetry.lock +++ /dev/null @@ -1,6270 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. - -[[package]] -name = "aiohttp" -version = "3.9.4" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"}, - {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"}, - {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"}, - {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"}, - {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"}, - {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"}, - {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"}, - {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"}, - {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"}, - {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"}, - {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"}, - {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"}, - {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"}, - {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"}, - {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"}, - {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"}, - {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"}, - {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"}, - {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"}, - {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"}, - {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"}, - {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"}, - {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"}, - {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"}, - {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"}, - {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"}, - {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} -attrs = ">=17.3.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] - -[[package]] -name = "aioprometheus" -version = "23.12.0" -description = "A Prometheus Python client library for asyncio-based applications" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "aioprometheus-23.12.0-py3-none-any.whl", hash = "sha256:b1a77259131153ef820b494e76439b278434eaf2a5e25dc0b8bf3d835f455960"}, -] - -[package.dependencies] -orjson = "*" -quantile-python = ">=1.1" -starlette = {version = ">=0.14.2", optional = true, markers = "extra == \"starlette\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.3.2)"] -quart = ["quart (>=0.15.1)"] -starlette = ["starlette (>=0.14.2)"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "aiostream" -version = "0.5.2" -description = "Generator-based operators for asynchronous iteration" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiostream-0.5.2-py3-none-any.whl", hash = "sha256:054660370be9d37f6fe3ece3851009240416bd082e469fd90cc8673d3818cf71"}, - {file = "aiostream-0.5.2.tar.gz", hash = "sha256:b71b519a2d66c38f0872403ab86417955b77352f08d9ad02ad46fc3926b389f4"}, -] - -[package.dependencies] -typing-extensions = "*" - -[[package]] -name = "annotated-types" -version = "0.6.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, -] - -[[package]] -name = "anyio" -version = "4.3.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "argon2-cffi" -version = "23.1.0" -description = "Argon2 for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, -] - -[package.dependencies] -argon2-cffi-bindings = "*" - -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -description = "Low-level CFFI bindings for Argon2" -optional = false -python-versions = ">=3.6" -files = [ - {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, - {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, - {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, - {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, - {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, -] - -[package.dependencies] -cffi = ">=1.0.1" - -[package.extras] -dev = ["cogapp", "pre-commit", "pytest", "wheel"] -tests = ["pytest"] - -[[package]] -name = "arrow" -version = "1.3.0" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" - -[package.extras] -doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] - -[[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" -optional = false -python-versions = "*" -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[package.dependencies] -six = ">=1.12.0" - -[package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "async-lru" -version = "2.0.4" -description = "Simple LRU cache for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "attrs" -version = "23.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] - -[[package]] -name = "babel" -version = "2.14.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, -] - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "beartype" -version = "0.18.2" -description = "Unbearably fast runtime type checking in pure Python." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "beartype-0.18.2-py3-none-any.whl", hash = "sha256:561aa7858e92289b952a6fc5faf15ea32f9519c07cdc0f4df7a01b59fc4bbeaf"}, - {file = "beartype-0.18.2.tar.gz", hash = "sha256:a6fbc0be9269889312388bfec6a9ddf41bf8fe31b68bcf9c8239db35cd38f411"}, -] - -[package.extras] -all = ["typing-extensions (>=3.10.0.0)"] -dev = ["autoapi (>=0.9.0)", "coverage (>=5.5)", "equinox", "mypy (>=0.800)", "numpy", "pandera", "pydata-sphinx-theme (<=0.7.2)", "pytest (>=4.0.0)", "sphinx", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)", "tox (>=3.20.1)", "typing-extensions (>=3.10.0.0)"] -doc-rtd = ["autoapi (>=0.9.0)", "pydata-sphinx-theme (<=0.7.2)", "sphinx (>=4.2.0,<6.0.0)", "sphinxext-opengraph (>=0.7.5)"] -test-tox = ["equinox", "mypy (>=0.800)", "numpy", "pandera", "pytest (>=4.0.0)", "sphinx", "typing-extensions (>=3.10.0.0)"] -test-tox-coverage = ["coverage (>=5.5)"] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "black" -version = "24.4.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436"}, - {file = "black-24.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf"}, - {file = "black-24.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad"}, - {file = "black-24.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb"}, - {file = "black-24.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8"}, - {file = "black-24.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745"}, - {file = "black-24.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070"}, - {file = "black-24.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397"}, - {file = "black-24.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2"}, - {file = "black-24.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33"}, - {file = "black-24.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965"}, - {file = "black-24.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd"}, - {file = "black-24.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1"}, - {file = "black-24.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8"}, - {file = "black-24.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d"}, - {file = "black-24.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3"}, - {file = "black-24.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665"}, - {file = "black-24.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6"}, - {file = "black-24.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e"}, - {file = "black-24.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702"}, - {file = "black-24.4.0-py3-none-any.whl", hash = "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e"}, - {file = "black-24.4.0.tar.gz", hash = "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "bleach" -version = "6.1.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, -] - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "cloudpickle" -version = "3.0.0" -description = "Pickler class to extend the standard pickle.Pickler functionality" -optional = false -python-versions = ">=3.8" -files = [ - {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, - {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "comm" -version = "0.2.2" -description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -optional = false -python-versions = ">=3.8" -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[package.dependencies] -traitlets = ">=4" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "contourpy" -version = "1.2.1" -description = "Python library for calculating contours of 2D quadrilateral grids" -optional = false -python-versions = ">=3.9" -files = [ - {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, - {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, - {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, - {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, - {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, - {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, - {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, - {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, - {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, - {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, - {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, - {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, - {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, - {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, - {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, - {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, - {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, - {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, - {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, - {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, - {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, - {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, - {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, - {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, - {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, -] - -[package.dependencies] -numpy = ">=1.20" - -[package.extras] -bokeh = ["bokeh", "selenium"] -docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] -mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] -test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] -test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] - -[[package]] -name = "cupy-cuda12x" -version = "12.1.0" -description = "CuPy: NumPy & SciPy for GPU" -optional = false -python-versions = ">=3.8" -files = [ - {file = "cupy_cuda12x-12.1.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a3dbc76ce0f697a943061ddd2c47bc2138bc23ab56a020f1f5ff9141861b5245"}, - {file = "cupy_cuda12x-12.1.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:c432a0020f6d6e3399149e73128a9a581c29e4f996a4b63614811c47a82cf44e"}, - {file = "cupy_cuda12x-12.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a559f42db28ed10aea9642b9084dcb31860b46243714a464089daffe6c0a7e8f"}, - {file = "cupy_cuda12x-12.1.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:60a71296f8530a65e7fb693635f6d5963557789a34a42a7d8ca9f31b40c35920"}, - {file = "cupy_cuda12x-12.1.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:752a49c40654311d53a8953d8c16f7e216e10e8514599a476ea80c6f6b2b0163"}, - {file = "cupy_cuda12x-12.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:89a2f089cb04783dcfbca1c4e82338953fb933f1e6d838ec50713b9b8bd0a9c8"}, - {file = "cupy_cuda12x-12.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d81dfdc7f6f47c392f24aa504e3b64732eb76a90b1e7ca31ad7265371be0ac42"}, - {file = "cupy_cuda12x-12.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:dc479d0397bb196a62c05322c0ff81a57af4dbbd020a7fbbb4b0843c35f61c27"}, - {file = "cupy_cuda12x-12.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ea86c085fca8e41579aced5a5fef45cc2dd90c270e030c32213cea2c471bb40"}, - {file = "cupy_cuda12x-12.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b52144ebbb3e1de1ca3da8c18b7c61066ac1f6d82e6252b7ea37ad11c66b5c3a"}, - {file = "cupy_cuda12x-12.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:1ecd027d279553a9e170c3724f9d1eb091dbf81b1eabbd2165add0da5d68a5bc"}, - {file = "cupy_cuda12x-12.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:259fccac3eeca4b9a04e1d2d32a1f79e74436d2b299a6b6bea7b84435c1dad0e"}, -] - -[package.dependencies] -fastrlock = ">=0.5" -numpy = ">=1.20,<1.27" - -[package.extras] -all = ["Cython (>=0.29.22,<3)", "optuna (>=2.0)", "scipy (>=1.6,<1.13)"] -stylecheck = ["autopep8 (==1.5.5)", "flake8 (==3.8.4)", "mypy (==0.950)", "pbr (==5.5.1)", "pycodestyle (==2.6.0)", "types-setuptools (==57.4.14)"] -test = ["hypothesis (>=6.37.2,<6.55.0)", "pytest (>=7.2)"] - -[[package]] -name = "cycler" -version = "0.12.1" -description = "Composable style cycles" -optional = false -python-versions = ">=3.8" -files = [ - {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, - {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, -] - -[package.extras] -docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] -tests = ["pytest", "pytest-cov", "pytest-xdist"] - -[[package]] -name = "datasets" -version = "2.18.0" -description = "HuggingFace community-driven open-source library of datasets" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "datasets-2.18.0-py3-none-any.whl", hash = "sha256:f1bbf0e2896917a914de01cbd37075b14deea3837af87ad0d9f697388ccaeb50"}, - {file = "datasets-2.18.0.tar.gz", hash = "sha256:cdf8b8c6abf7316377ba4f49f9589a4c74556d6b481afd0abd2284f3d69185cb"}, -] - -[package.dependencies] -aiohttp = "*" -dill = ">=0.3.0,<0.3.9" -filelock = "*" -fsspec = {version = ">=2023.1.0,<=2024.2.0", extras = ["http"]} -huggingface-hub = ">=0.19.4" -multiprocess = "*" -numpy = ">=1.17" -packaging = "*" -pandas = "*" -pyarrow = ">=12.0.0" -pyarrow-hotfix = "*" -pyyaml = ">=5.1" -requests = ">=2.19.0" -tqdm = ">=4.62.1" -xxhash = "*" - -[package.extras] -apache-beam = ["apache-beam (>=2.26.0)"] -audio = ["librosa", "soundfile (>=0.12.1)"] -benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"] -dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "ruff (>=0.3.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"] -docs = ["s3fs", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "transformers"] -jax = ["jax (>=0.3.14)", "jaxlib (>=0.3.14)"] -metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] -quality = ["ruff (>=0.3.0)"] -s3 = ["s3fs"] -tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] -tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"] -torch = ["torch"] -vision = ["Pillow (>=6.2.1)"] - -[[package]] -name = "debugpy" -version = "1.8.1" -description = "An implementation of the Debug Adapter Protocol for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741"}, - {file = "debugpy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e"}, - {file = "debugpy-1.8.1-cp310-cp310-win32.whl", hash = "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0"}, - {file = "debugpy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd"}, - {file = "debugpy-1.8.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb"}, - {file = "debugpy-1.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099"}, - {file = "debugpy-1.8.1-cp311-cp311-win32.whl", hash = "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146"}, - {file = "debugpy-1.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8"}, - {file = "debugpy-1.8.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539"}, - {file = "debugpy-1.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace"}, - {file = "debugpy-1.8.1-cp312-cp312-win32.whl", hash = "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0"}, - {file = "debugpy-1.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98"}, - {file = "debugpy-1.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39"}, - {file = "debugpy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7"}, - {file = "debugpy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9"}, - {file = "debugpy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234"}, - {file = "debugpy-1.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42"}, - {file = "debugpy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703"}, - {file = "debugpy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23"}, - {file = "debugpy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3"}, - {file = "debugpy-1.8.1-py2.py3-none-any.whl", hash = "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242"}, - {file = "debugpy-1.8.1.zip", hash = "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "dill" -version = "0.3.8" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - -[[package]] -name = "diskcache" -version = "5.6.3" -description = "Disk Cache -- Disk and file backed persistent cache." -optional = false -python-versions = ">=3" -files = [ - {file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"}, - {file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"}, -] - -[[package]] -name = "distro" -version = "1.9.0" -description = "Distro - an OS platform information API" -optional = false -python-versions = ">=3.6" -files = [ - {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, - {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, -] - -[[package]] -name = "dnspython" -version = "2.6.1" -description = "DNS toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, -] - -[package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] - -[[package]] -name = "email-validator" -version = "2.1.1" -description = "A robust email address syntax and deliverability validation library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"}, - {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"}, -] - -[package.dependencies] -dnspython = ">=2.0.0" -idna = ">=2.0.0" - -[[package]] -name = "environs" -version = "10.3.0" -description = "simplified environment variable parsing" -optional = false -python-versions = ">=3.8" -files = [ - {file = "environs-10.3.0-py3-none-any.whl", hash = "sha256:feeaf28f17fd0499f9cd7c0fcf408c6d82c308e69e335eb92d09322fc9ed8138"}, - {file = "environs-10.3.0.tar.gz", hash = "sha256:cc421ddb143fa30183568164755aa113a160e555cd19e97e664c478662032c24"}, -] - -[package.dependencies] -marshmallow = ">=3.0.0" -python-dotenv = "*" - -[package.extras] -dev = ["environs[lint,tests]", "tox"] -django = ["dj-database-url", "dj-email-url", "django-cache-url"] -lint = ["flake8 (==7.0.0)", "flake8-bugbear (==23.11.28)", "mypy (==1.8.0)", "pre-commit (>=3.6,<4.0)"] -tests = ["environs[django]", "pytest"] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "executing" -version = "2.0.1" -description = "Get the currently executing AST node of a frame, and other information" -optional = false -python-versions = ">=3.5" -files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, -] - -[package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] - -[[package]] -name = "fastapi" -version = "0.110.1" -description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"}, - {file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"}, -] - -[package.dependencies] -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" -starlette = ">=0.37.2,<0.38.0" -typing-extensions = ">=4.8.0" - -[package.extras] -all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] - -[[package]] -name = "fastjsonschema" -version = "2.19.1" -description = "Fastest Python implementation of JSON schema" -optional = false -python-versions = "*" -files = [ - {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, - {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "fastrlock" -version = "0.8.2" -description = "Fast, re-entrant optimistic lock implemented in Cython" -optional = false -python-versions = "*" -files = [ - {file = "fastrlock-0.8.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:94e348c72a1fd1f8191f25ea056448e4f5a87b8fbf005b39d290dcb0581a48cd"}, - {file = "fastrlock-0.8.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5595903444c854b99c42122b87edfe8a37cd698a4eae32f4fd1d2a7b6c115d"}, - {file = "fastrlock-0.8.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e4bbde174a0aff5f6eeba75cf8c4c5d2a316316bc21f03a0bddca0fc3659a6f3"}, - {file = "fastrlock-0.8.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7a2ccaf88ac0db153e84305d1ef0aa138cea82c6a88309066f6eaa3bc98636cd"}, - {file = "fastrlock-0.8.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:31a27a2edf482df72b91fe6c6438314d2c65290aa7becc55589d156c9b91f0da"}, - {file = "fastrlock-0.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:e9904b5b37c3e5bb4a245c56bc4b7e497da57ffb8528f4fc39af9dcb168ee2e1"}, - {file = "fastrlock-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:43a241655e83e4603a152192cf022d5ca348c2f4e56dfb02e5c9c4c1a32f9cdb"}, - {file = "fastrlock-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9121a894d74e65557e47e777060a495ab85f4b903e80dd73a3c940ba042920d7"}, - {file = "fastrlock-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:11bbbbc526363955aeddb9eec4cee2a0012322b7b2f15b54f44454fcf4fd398a"}, - {file = "fastrlock-0.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:27786c62a400e282756ae1b090bcd7cfa35f28270cff65a9e7b27a5327a32561"}, - {file = "fastrlock-0.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:08315bde19d0c2e6b06593d5a418be3dc8f9b1ee721afa96867b9853fceb45cf"}, - {file = "fastrlock-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8b49b5743ede51e0bcf6805741f39f5e0e0fd6a172ba460cb39e3097ba803bb"}, - {file = "fastrlock-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b443e73a4dfc7b6e0800ea4c13567b9694358e86f53bb2612a51c9e727cac67b"}, - {file = "fastrlock-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:b3853ed4ce522598dc886160a7bab432a093051af85891fa2f5577c1dcac8ed6"}, - {file = "fastrlock-0.8.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:790fc19bccbd39426060047e53629f171a44745613bf360a045e9f9c8c4a2cea"}, - {file = "fastrlock-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:dbdce852e6bb66e1b8c36679d482971d69d93acf1785657522e51b7de30c3356"}, - {file = "fastrlock-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d47713ffe6d4a627fbf078be9836a95ac106b4a0543e3841572c91e292a5d885"}, - {file = "fastrlock-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:ea96503b918fceaf40443182742b8964d47b65c5ebdea532893cb9479620000c"}, - {file = "fastrlock-0.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c6bffa978793bea5e1b00e677062e53a62255439339591b70e209fa1552d5ee0"}, - {file = "fastrlock-0.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:75c07726c8b1a52147fd7987d6baaa318c5dced1416c3f25593e40f56e10755b"}, - {file = "fastrlock-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88f079335e9da631efa64486c8207564a7bcd0c00526bb9e842e9d5b7e50a6cc"}, - {file = "fastrlock-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4fb2e77ff04bc4beb71d63c8e064f052ce5a6ea1e001d528d4d7f4b37d736f2e"}, - {file = "fastrlock-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:b4c9083ea89ab236b06e9ef2263971db3b4b507195fc7d5eecab95828dcae325"}, - {file = "fastrlock-0.8.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:98195866d3a9949915935d40a88e4f1c166e82e378f622c88025f2938624a90a"}, - {file = "fastrlock-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b22ea9bf5f9fad2b0077e944a7813f91593a4f61adf8faf734a70aed3f2b3a40"}, - {file = "fastrlock-0.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc1bf0ac8a194313cf6e645e300a8a379674ceed8e0b1e910a2de3e3c28989e"}, - {file = "fastrlock-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a3dcc876050b8f5cbc0ee84ef1e7f0c1dfe7c148f10098828bc4403683c33f10"}, - {file = "fastrlock-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:685e656048b59d8dfde8c601f188ad53a4d719eb97080cafc8696cda6d75865e"}, - {file = "fastrlock-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:fb5363cf0fddd9b50525ddbf64a1e1b28ec4c6dfb28670a940cb1cf988a6786b"}, - {file = "fastrlock-0.8.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:a74f5a92fa6e51c4f3c69b29c4662088b97be12f40652a21109605a175c81824"}, - {file = "fastrlock-0.8.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ccf39ad5702e33e4d335b48ef9d56e21619b529b7f7471b5211419f380329b62"}, - {file = "fastrlock-0.8.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:66f2662c640bb71a1016a031eea6eef9d25c2bcdf7ffd1d1ddc5a58f9a1ced04"}, - {file = "fastrlock-0.8.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:17734e2e5af4c07ddb0fb10bd484e062c22de3be6b67940b9cc6ec2f18fa61ba"}, - {file = "fastrlock-0.8.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:ab91b0c36e95d42e1041a4907e3eefd06c482d53af3c7a77be7e214cc7cd4a63"}, - {file = "fastrlock-0.8.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b32fdf874868326351a75b1e4c02f97e802147119ae44c52d3d9da193ec34f5b"}, - {file = "fastrlock-0.8.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:2074548a335fcf7d19ebb18d9208da9e33b06f745754466a7e001d2b1c58dd19"}, - {file = "fastrlock-0.8.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fb04442b6d1e2b36c774919c6bcbe3339c61b337261d4bd57e27932589095af"}, - {file = "fastrlock-0.8.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1fed2f4797ad68e9982038423018cf08bec5f4ce9fed63a94a790773ed6a795c"}, - {file = "fastrlock-0.8.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e380ec4e6d8b26e389713995a43cb7fe56baea2d25fe073d4998c4821a026211"}, - {file = "fastrlock-0.8.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:25945f962c7bd808415cfde3da624d4399d4ea71ed8918538375f16bceb79e1c"}, - {file = "fastrlock-0.8.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c1719ddc8218b01e82fb2e82e8451bd65076cb96d7bef4477194bbb4305a968"}, - {file = "fastrlock-0.8.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5460c5ee6ced6d61ec8cd2324ebbe793a4960c4ffa2131ffff480e3b61c99ec5"}, - {file = "fastrlock-0.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:33145acbad8317584cd64588131c7e1e286beef6280c0009b4544c91fce171d2"}, - {file = "fastrlock-0.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:59344c1d46b7dec97d3f22f1cc930fafe8980b3c5bc9c9765c56738a5f1559e4"}, - {file = "fastrlock-0.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2a1c354f13f22b737621d914f3b4a8434ae69d3027a775e94b3e671756112f9"}, - {file = "fastrlock-0.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:cf81e0278b645004388873e0a1f9e3bc4c9ab8c18e377b14ed1a544be4b18c9a"}, - {file = "fastrlock-0.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1b15430b93d7eb3d56f6ff690d2ebecb79ed0e58248427717eba150a508d1cd7"}, - {file = "fastrlock-0.8.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:067edb0a0805bf61e17a251d5046af59f6e9d2b8ad01222e0ef7a0b7937d5548"}, - {file = "fastrlock-0.8.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb31fe390f03f7ae886dcc374f1099ec88526631a4cb891d399b68181f154ff0"}, - {file = "fastrlock-0.8.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:643e1e65b4f5b284427e61a894d876d10459820e93aa1e724dfb415117be24e0"}, - {file = "fastrlock-0.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5dfb78dd600a12f23fc0c3ec58f81336229fdc74501ecf378d1ce5b3f2f313ea"}, - {file = "fastrlock-0.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8ca0fe21458457077e4cb2d81e1ebdb146a00b3e9e2db6180a773f7ea905032"}, - {file = "fastrlock-0.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d918dfe473291e8bfd8e13223ea5cb9b317bd9f50c280923776c377f7c64b428"}, - {file = "fastrlock-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:c393af77c659a38bffbca215c0bcc8629ba4299568308dd7e4ff65d62cabed39"}, - {file = "fastrlock-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:73426f5eb2ecc10626c67cf86bd0af9e00d53e80e5c67d5ce8e18376d6abfa09"}, - {file = "fastrlock-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:320fd55bafee3eb069cfb5d6491f811a912758387ef2193840e2663e80e16f48"}, - {file = "fastrlock-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8c1c91a68926421f5ccbc82c85f83bd3ba593b121a46a1b9a554b3f0dd67a4bf"}, - {file = "fastrlock-0.8.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ad1bc61c7f6b0e58106aaab034916b6cb041757f708b07fbcdd9d6e1ac629225"}, - {file = "fastrlock-0.8.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:87f4e01b042c84e6090dbc4fbe3415ddd69f6bc0130382323f9d3f1b8dd71b46"}, - {file = "fastrlock-0.8.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d34546ad2e4a480b94b6797bcc5a322b3c705c4c74c3e4e545c4a3841c1b2d59"}, - {file = "fastrlock-0.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ebb32d776b61acd49f859a1d16b9e3d84e7b46d0d92aebd58acd54dc38e96664"}, - {file = "fastrlock-0.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30bdbe4662992348132d03996700e1cf910d141d629179b967b146a22942264e"}, - {file = "fastrlock-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:07ed3c7b3867c05a3d6be4ced200c7767000f3431b9be6da66972822dd86e8be"}, - {file = "fastrlock-0.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:ddf5d247f686aec853ddcc9a1234bfcc6f57b0a0670d2ad82fc25d8ae7e6a15f"}, - {file = "fastrlock-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7269bb3fc15587b0c191eecd95831d771a7d80f0c48929e560806b038ff3066c"}, - {file = "fastrlock-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:adcb9e77aa132cc6c9de2ffe7cf880a20aa8cdba21d367d1da1a412f57bddd5d"}, - {file = "fastrlock-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:a3b8b5d2935403f1b4b25ae324560e94b59593a38c0d2e7b6c9872126a9622ed"}, - {file = "fastrlock-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2587cedbb36c7988e707d83f0f1175c1f882f362b5ebbee25d70218ea33d220d"}, - {file = "fastrlock-0.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9af691a9861027181d4de07ed74f0aee12a9650ac60d0a07f4320bff84b5d95f"}, - {file = "fastrlock-0.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99dd6652bd6f730beadf74ef769d38c6bbd8ee6d1c15c8d138ea680b0594387f"}, - {file = "fastrlock-0.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4d63b6596368dab9e0cc66bf047e7182a56f33b34db141816a4f21f5bf958228"}, - {file = "fastrlock-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ff75c90663d6e8996610d435e71487daa853871ad1770dd83dc0f2fc4997241e"}, - {file = "fastrlock-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e27c3cd27fbd25e5223c5c992b300cd4ee8f0a75c6f222ce65838138d853712c"}, - {file = "fastrlock-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:dd961a32a7182c3891cdebca417fda67496d5d5de6ae636962254d22723bdf52"}, - {file = "fastrlock-0.8.2.tar.gz", hash = "sha256:644ec9215cf9c4df8028d8511379a15d9c1af3e16d80e47f1b6fdc6ba118356a"}, -] - -[[package]] -name = "filelock" -version = "3.13.4" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, - {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] - -[[package]] -name = "fire" -version = "0.5.0" -description = "A library for automatically generating command line interfaces." -optional = false -python-versions = "*" -files = [ - {file = "fire-0.5.0.tar.gz", hash = "sha256:a6b0d49e98c8963910021f92bba66f65ab440da2982b78eb1bbf95a0a34aacc6"}, -] - -[package.dependencies] -six = "*" -termcolor = "*" - -[[package]] -name = "fonttools" -version = "4.51.0" -description = "Tools to manipulate font files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, - {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, - {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, - {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, - {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, - {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, - {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, - {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, - {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, - {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, - {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, - {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, - {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, - {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, - {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, - {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, - {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, - {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, - {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, - {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, - {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, - {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, - {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, - {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, - {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, - {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, - {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, - {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, - {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, - {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, - {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, - {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, - {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, - {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, - {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, - {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, - {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, - {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, - {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, - {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, - {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, - {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, -] - -[package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] -graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "pycairo", "scipy"] -lxml = ["lxml (>=4.0)"] -pathops = ["skia-pathops (>=0.5.0)"] -plot = ["matplotlib"] -repacker = ["uharfbuzz (>=0.23.0)"] -symfont = ["sympy"] -type1 = ["xattr"] -ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.1.0)"] -woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] - -[[package]] -name = "fqdn" -version = "1.5.1" -description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -optional = false -python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" -files = [ - {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, - {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, -] - -[[package]] -name = "frozenlist" -version = "1.4.1" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, -] - -[[package]] -name = "fsspec" -version = "2024.2.0" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2024.2.0-py3-none-any.whl", hash = "sha256:817f969556fa5916bc682e02ca2045f96ff7f586d45110fcb76022063ad2c7d8"}, - {file = "fsspec-2024.2.0.tar.gz", hash = "sha256:b6ad1a679f760dda52b1168c859d01b7b80648ea6f7f7c7f5a8a91dc3f3ecb84"}, -] - -[package.dependencies] -aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -devel = ["pytest", "pytest-cov"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -tqdm = ["tqdm"] - -[[package]] -name = "grpclib" -version = "0.4.7" -description = "Pure-Python gRPC implementation for asyncio" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpclib-0.4.7.tar.gz", hash = "sha256:2988ef57c02b22b7a2e8e961792c41ccf97efc2ace91ae7a5b0de03c363823c3"}, -] - -[package.dependencies] -h2 = ">=3.1.0,<5" -multidict = "*" - -[package.extras] -protobuf = ["protobuf (>=3.20.0)"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "h2" -version = "4.1.0" -description = "HTTP/2 State-Machine based protocol implementation" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, - {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, -] - -[package.dependencies] -hpack = ">=4.0,<5" -hyperframe = ">=6.0,<7" - -[[package]] -name = "hpack" -version = "4.0.0" -description = "Pure-Python HPACK header compression" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, - {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, -] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httptools" -version = "0.6.1" -description = "A collection of framework independent HTTP protocol utils." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, -] - -[package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] - -[[package]] -name = "httpx" -version = "0.27.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] - -[[package]] -name = "huggingface-hub" -version = "0.22.2" -description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "huggingface_hub-0.22.2-py3-none-any.whl", hash = "sha256:3429e25f38ccb834d310804a3b711e7e4953db5a9e420cc147a5e194ca90fd17"}, - {file = "huggingface_hub-0.22.2.tar.gz", hash = "sha256:32e9a9a6843c92f253ff9ca16b9985def4d80a93fb357af5353f770ef74a81be"}, -] - -[package.dependencies] -filelock = "*" -fsspec = ">=2023.5.0" -packaging = ">=20.9" -pyyaml = ">=5.1" -requests = "*" -tqdm = ">=4.42.1" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] -hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] -tensorflow = ["graphviz", "pydot", "tensorflow"] -tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] -torch = ["safetensors", "torch"] -typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] - -[[package]] -name = "hyperframe" -version = "6.0.1" -description = "HTTP/2 framing layer for Python" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, - {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, -] - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "imbalanced-learn" -version = "0.12.2" -description = "Toolbox for imbalanced dataset in machine learning." -optional = false -python-versions = "*" -files = [ - {file = "imbalanced-learn-0.12.2.tar.gz", hash = "sha256:a80c56cedcb07124f266be62d3a5d2ab5b5779909a7343fdf1b993479662f6c1"}, - {file = "imbalanced_learn-0.12.2-py3-none-any.whl", hash = "sha256:8523b3ee6c10c1d25f6bebe3faa73a0bca28d1fed55e0435b49af22802cdc259"}, -] - -[package.dependencies] -joblib = ">=1.1.1" -numpy = ">=1.17.3" -scikit-learn = ">=1.0.2" -scipy = ">=1.5.0" -threadpoolctl = ">=2.0.0" - -[package.extras] -docs = ["keras (>=2.4.3)", "matplotlib (>=3.1.2)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.5.0)", "pandas (>=1.0.5)", "pydata-sphinx-theme (>=0.13.3)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.13.0)", "sphinxcontrib-bibtex (>=2.4.1)", "tensorflow (>=2.4.3)"] -examples = ["keras (>=2.4.3)", "matplotlib (>=3.1.2)", "pandas (>=1.0.5)", "seaborn (>=0.9.0)", "tensorflow (>=2.4.3)"] -optional = ["keras (>=2.4.3)", "pandas (>=1.0.5)", "tensorflow (>=2.4.3)"] -tests = ["black (>=23.3.0)", "flake8 (>=3.8.2)", "keras (>=2.4.3)", "mypy (>=1.3.0)", "pandas (>=1.0.5)", "pytest (>=5.0.1)", "pytest-cov (>=2.9.0)", "tensorflow (>=2.4.3)"] - -[[package]] -name = "immutabledict" -version = "4.2.0" -description = "Immutable wrapper around dictionaries (a fork of frozendict)" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "immutabledict-4.2.0-py3-none-any.whl", hash = "sha256:d728b2c2410d698d95e6200237feb50a695584d20289ad3379a439aa3d90baba"}, - {file = "immutabledict-4.2.0.tar.gz", hash = "sha256:e003fd81aad2377a5a758bf7e1086cf3b70b63e9a5cc2f46bce8d0a2b4727c5f"}, -] - -[[package]] -name = "importlab" -version = "0.8.1" -description = "A library to calculate python dependency graphs." -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "importlab-0.8.1-py2.py3-none-any.whl", hash = "sha256:124cfa00e8a34fefe8aac1a5e94f56c781b178c9eb61a1d3f60f7e03b77338d3"}, - {file = "importlab-0.8.1.tar.gz", hash = "sha256:b3893853b1f6eb027da509c3b40e6787e95dd66b4b66f1b3613aad77556e1465"}, -] - -[package.dependencies] -networkx = ">=2" - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "interegular" -version = "0.3.3" -description = "a regex intersection checker" -optional = false -python-versions = ">=3.7" -files = [ - {file = "interegular-0.3.3-py37-none-any.whl", hash = "sha256:b0c07007d48c89d6d19f7204972d369b2a77222722e126b6aa63aa721dc3b19c"}, - {file = "interegular-0.3.3.tar.gz", hash = "sha256:d9b697b21b34884711399ba0f0376914b81899ce670032486d0d048344a76600"}, -] - -[[package]] -name = "ipykernel" -version = "6.29.4" -description = "IPython Kernel for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ipykernel-6.29.4-py3-none-any.whl", hash = "sha256:1181e653d95c6808039c509ef8e67c4126b3b3af7781496c7cbfb5ed938a27da"}, - {file = "ipykernel-6.29.4.tar.gz", hash = "sha256:3d44070060f9475ac2092b760123fadf105d2e2493c24848b6691a7c4f42af5c"}, -] - -[package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} -comm = ">=0.1.1" -debugpy = ">=1.6.5" -ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" -traitlets = ">=5.4.0" - -[package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] -pyqt5 = ["pyqt5"] -pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "ipympl" -version = "0.9.4" -description = "Matplotlib Jupyter Extension" -optional = false -python-versions = ">=3.9" -files = [ - {file = "ipympl-0.9.4-py3-none-any.whl", hash = "sha256:5b0c08c6f4f6ea655ba58239363457c10fb921557f5038c1a46db4457d6d6b0e"}, - {file = "ipympl-0.9.4.tar.gz", hash = "sha256:cfb53c5b4fcbcee6d18f095eecfc6c6c474303d5b744e72cc66e7a2804708907"}, -] - -[package.dependencies] -ipython = "<9" -ipython-genutils = "*" -ipywidgets = ">=7.6.0,<9" -matplotlib = ">=3.4.0,<4" -numpy = "*" -pillow = "*" -traitlets = "<6" - -[package.extras] -docs = ["myst-nb", "sphinx (>=1.5)", "sphinx-book-theme", "sphinx-copybutton", "sphinx-thebe", "sphinx-togglebutton"] - -[[package]] -name = "ipython" -version = "8.23.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ipython-8.23.0-py3-none-any.whl", hash = "sha256:07232af52a5ba146dc3372c7bf52a0f890a23edf38d77caef8d53f9cdc2584c1"}, - {file = "ipython-8.23.0.tar.gz", hash = "sha256:7468edaf4f6de3e1b912e57f66c241e6fd3c7099f2ec2136e239e142e800274d"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5.13.0" -typing-extensions = {version = "*", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "stack-data", "typing-extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "ipython-genutils" -version = "0.2.0" -description = "Vestigial utilities from IPython" -optional = false -python-versions = "*" -files = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] - -[[package]] -name = "ipywidgets" -version = "8.1.2" -description = "Jupyter interactive widgets" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ipywidgets-8.1.2-py3-none-any.whl", hash = "sha256:bbe43850d79fb5e906b14801d6c01402857996864d1e5b6fa62dd2ee35559f60"}, - {file = "ipywidgets-8.1.2.tar.gz", hash = "sha256:d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9"}, -] - -[package.dependencies] -comm = ">=0.1.3" -ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.10,<3.1.0" -traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.10,<4.1.0" - -[package.extras] -test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] - -[[package]] -name = "isoduration" -version = "20.11.0" -description = "Operations with ISO 8601 durations" -optional = false -python-versions = ">=3.7" -files = [ - {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, - {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, -] - -[package.dependencies] -arrow = ">=0.15.0" - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "jinja2" -version = "3.1.3" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "joblib" -version = "1.4.0" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, - {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, -] - -[[package]] -name = "json5" -version = "0.9.25" -description = "A Python implementation of the JSON5 data format." -optional = false -python-versions = ">=3.8" -files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, -] - -[[package]] -name = "jsonpointer" -version = "2.4" -description = "Identify specific nodes in a JSON document (RFC 6901)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" -files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, -] - -[[package]] -name = "jsonschema" -version = "4.21.1" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, - {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -rpds-py = ">=0.7.1" -uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - -[[package]] -name = "julep" -version = "0.2.11" -description = "Julep is a platform for creating agents with long-term memory" -optional = false -python-versions = "<3.14,>=3.8" -files = [ - {file = "julep-0.2.11-py3-none-any.whl", hash = "sha256:31c742a8d7bb4e4ae8ded9642dc15e784e4fbd43f1f74386c2a99940014c3810"}, - {file = "julep-0.2.11.tar.gz", hash = "sha256:977eaeb1b2cd3d3192662ee76d544e8270bc8c82638efea066b203f8ec80374f"}, -] - -[package.dependencies] -beartype = ">=0.14.0,<1.0.0" -environs = ">=9.0.0,<11.0.0" -httpx = ">=0.20.0,<1.0.0" -openai = ">=1.0.1,<2.0.0" -pydantic = ">=2.0.1,<3.0.0" -typing-extensions = ">=4.0.0,<5.0.0" - -[[package]] -name = "jupyter-client" -version = "8.6.1" -description = "Jupyter protocol implementation and client libraries" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_client-8.6.1-py3-none-any.whl", hash = "sha256:3b7bd22f058434e3b9a7ea4b1500ed47de2713872288c0d511d19926f99b459f"}, - {file = "jupyter_client-8.6.1.tar.gz", hash = "sha256:e842515e2bab8e19186d89fdfea7abd15e39dd581f94e399f00e2af5a1652d3f"}, -] - -[package.dependencies] -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -python-dateutil = ">=2.8.2" -pyzmq = ">=23.0" -tornado = ">=6.2" -traitlets = ">=5.3" - -[package.extras] -docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -description = "Jupyter core package. A base package on which Jupyter projects rely." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[package.dependencies] -platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} -traitlets = ">=5.3" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] - -[[package]] -name = "jupyter-events" -version = "0.10.0" -description = "Jupyter Event System library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, - {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, -] - -[package.dependencies] -jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} -python-json-logger = ">=2.0.4" -pyyaml = ">=5.3" -referencing = "*" -rfc3339-validator = "*" -rfc3986-validator = ">=0.1.1" -traitlets = ">=5.3" - -[package.extras] -cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] - -[[package]] -name = "jupyter-lsp" -version = "2.2.5" -description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, - {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, -] - -[package.dependencies] -jupyter-server = ">=1.1.2" - -[[package]] -name = "jupyter-server" -version = "2.14.0" -description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server-2.14.0-py3-none-any.whl", hash = "sha256:fb6be52c713e80e004fac34b35a0990d6d36ba06fd0a2b2ed82b899143a64210"}, - {file = "jupyter_server-2.14.0.tar.gz", hash = "sha256:659154cea512083434fd7c93b7fe0897af7a2fd0b9dd4749282b42eaac4ae677"}, -] - -[package.dependencies] -anyio = ">=3.1.0" -argon2-cffi = ">=21.1" -jinja2 = ">=3.0.3" -jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -jupyter-events = ">=0.9.0" -jupyter-server-terminals = ">=0.4.4" -nbconvert = ">=6.4.4" -nbformat = ">=5.3.0" -overrides = ">=5.0" -packaging = ">=22.0" -prometheus-client = ">=0.9" -pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} -pyzmq = ">=24" -send2trash = ">=1.8.2" -terminado = ">=0.8.3" -tornado = ">=6.2.0" -traitlets = ">=5.6.0" -websocket-client = ">=1.7" - -[package.extras] -docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] - -[[package]] -name = "jupyter-server-terminals" -version = "0.5.3" -description = "A Jupyter Server Extension Providing Terminals." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, - {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, -] - -[package.dependencies] -pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} -terminado = ">=0.8.3" - -[package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] - -[[package]] -name = "jupyterlab" -version = "4.1.6" -description = "JupyterLab computational environment" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab-4.1.6-py3-none-any.whl", hash = "sha256:cf3e862bc10dbf4331e4eb37438634f813c238cfc62c71c640b3b3b2caa089a8"}, - {file = "jupyterlab-4.1.6.tar.gz", hash = "sha256:7935f36ba26eb615183a4f5c2bbca5791b5108ce2a00b5505f8cfd100d53648e"}, -] - -[package.dependencies] -async-lru = ">=1.0.0" -httpx = ">=0.25.0" -ipykernel = ">=6.5.0" -jinja2 = ">=3.0.3" -jupyter-core = "*" -jupyter-lsp = ">=2.0.0" -jupyter-server = ">=2.4.0,<3" -jupyterlab-server = ">=2.19.0,<3" -notebook-shim = ">=0.2" -packaging = "*" -tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} -tornado = ">=6.2.0" -traitlets = "*" - -[package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.2.0)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.2.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.1)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post6)", "matplotlib (==3.8.2)", "nbconvert (>=7.0.0)", "pandas (==2.2.0)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] -test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] -upgrade-extension = ["copier (>=8.0,<9.0)", "jinja2-time (<0.3)", "pydantic (<2.0)", "pyyaml-include (<2.0)", "tomli-w (<2.0)"] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -description = "Pygments theme using JupyterLab CSS variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, -] - -[[package]] -name = "jupyterlab-server" -version = "2.26.0" -description = "A set of server components for JupyterLab and JupyterLab like applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "jupyterlab_server-2.26.0-py3-none-any.whl", hash = "sha256:54622cbd330526a385ee0c1fdccdff3a1e7219bf3e864a335284a1270a1973df"}, - {file = "jupyterlab_server-2.26.0.tar.gz", hash = "sha256:9b3ba91cf2837f7f124fca36d63f3ca80ace2bed4898a63dd47e6598c1ab006f"}, -] - -[package.dependencies] -babel = ">=2.10" -jinja2 = ">=3.0.3" -json5 = ">=0.9.0" -jsonschema = ">=4.18.0" -jupyter-server = ">=1.21,<3" -packaging = ">=21.3" -requests = ">=2.31" - -[package.extras] -docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] -openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] - -[[package]] -name = "jupyterlab-widgets" -version = "3.0.10" -description = "Jupyter interactive widgets for JupyterLab" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jupyterlab_widgets-3.0.10-py3-none-any.whl", hash = "sha256:dd61f3ae7a5a7f80299e14585ce6cf3d6925a96c9103c978eda293197730cb64"}, - {file = "jupyterlab_widgets-3.0.10.tar.gz", hash = "sha256:04f2ac04976727e4f9d0fa91cdc2f1ab860f965e504c29dbd6a65c882c9d04c0"}, -] - -[[package]] -name = "kiwisolver" -version = "1.4.5" -description = "A fast implementation of the Cassowary constraint solver" -optional = false -python-versions = ">=3.7" -files = [ - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, - {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, -] - -[[package]] -name = "lark" -version = "1.1.9" -description = "a modern parsing library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "lark-1.1.9-py3-none-any.whl", hash = "sha256:a0dd3a87289f8ccbb325901e4222e723e7d745dbfc1803eaf5f3d2ace19cf2db"}, - {file = "lark-1.1.9.tar.gz", hash = "sha256:15fa5236490824c2c4aba0e22d2d6d823575dcaf4cdd1848e34b6ad836240fba"}, -] - -[package.extras] -atomic-cache = ["atomicwrites"] -interegular = ["interegular (>=0.3.1,<0.4.0)"] -nearley = ["js2py"] -regex = ["regex"] - -[[package]] -name = "libcst" -version = "1.3.1" -description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.12 programs." -optional = false -python-versions = ">=3.9" -files = [ - {file = "libcst-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:de93193cba6d54f2a4419e94ba2de642b111f52e4fa01bb6e2c655914585f65b"}, - {file = "libcst-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2d64d86dcd6c80a5dac2e243c5ed7a7a193242209ac33bad4b0639b24f6d131"}, - {file = "libcst-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db084f7bbf825c7bd5ed256290066d0336df6a7dc3a029c9870a64cd2298b87f"}, - {file = "libcst-1.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16880711be03a1f5da7028fe791ba5b482a50d830225a70272dc332dfd927652"}, - {file = "libcst-1.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:189bb28c19c5dd3c64583f969b72f7732dbdb1dee9eca3acc85099e4cef9148b"}, - {file = "libcst-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:181372386c986e3de07d7a93f269214cd825adc714f1f9da8252b44f05e181c4"}, - {file = "libcst-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c2020f7449270e3ff0bdc481ae244d812f2d9a8b7dbff0ea66b830f4b350f54"}, - {file = "libcst-1.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:be3bf9aaafebda6a21e241e819f0ab67e186e898c3562704e41241cf8738353a"}, - {file = "libcst-1.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a0d250fb6a2c1d158f30d25ba5e33e3ed3672d2700d480dd47beffd1431a008"}, - {file = "libcst-1.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad5741b251d901f3da1819ac539192230cc6f8f81aaf04eb4ec0009c1c97285"}, - {file = "libcst-1.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b740dc0c3d1adbd91442fb878343d5a11e23a0e3ac5484be301fd8d148bcb085"}, - {file = "libcst-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9e6bc95fa7dde79cde63a34a0412cd4a3d9fcc27be781a590f8c45c840c83658"}, - {file = "libcst-1.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4186076ce12141609ce950d61867b2a73ea199a7a9870dbafa76ad600e075b3c"}, - {file = "libcst-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ed52a1a2fe4d8603de51649db5e438317b8116ebb9fc09ec68703535fe6c1c8"}, - {file = "libcst-1.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0886a9963597367b227345f19b24931b3ed6a4703fff237760745f90f0e6a20"}, - {file = "libcst-1.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:904c4cc5c801a5747e64b43e0accc87c67a4c804842d977ee215872c4cf8cf88"}, - {file = "libcst-1.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cdb7e8a118b60e064a02f6cbfa4d328212a3a115d125244495190f405709d5f"}, - {file = "libcst-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:431badf0e544b79c0ac9682dbd291ff63ddbc3c3aca0d13d3cc7a10c3a9db8a2"}, - {file = "libcst-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:701f5335e4fd566871497b9af1e871c98e1ef10c30b3b244f39343d709213401"}, - {file = "libcst-1.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7c6e709623b68ca9148e8ecbdc145f7b83befb26032e4bf6a8122500ba558b17"}, - {file = "libcst-1.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ede0f026a82b03b33a559ec566860085ece2e76d8f9bc21cb053eedf9cde8c79"}, - {file = "libcst-1.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c12b7b01d8745f82dd86a82acd2a9f8e8e7d6c94ddcfda996896e83d1a8d5c42"}, - {file = "libcst-1.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2995ca687118a9d3d41876f7270bc29305a2d402f4b8c81a3cff0aeee6d4c81"}, - {file = "libcst-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:2dbac1ac0a9d59ea7bbc3f87cdcca5bfe98835e31c668e95cb6f3d907ffc53fc"}, - {file = "libcst-1.3.1.tar.gz", hash = "sha256:03b1df1ae02456f1d465fcd5ead1d0d454bb483caefd8c8e6bde515ffdb53d1b"}, -] - -[package.dependencies] -pyyaml = ">=5.2" - -[package.extras] -dev = ["Sphinx (>=5.1.1)", "black (==23.12.1)", "build (>=0.10.0)", "coverage (>=4.5.4)", "fixit (==2.1.0)", "flake8 (==7.0.0)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<1.5)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.18)", "setuptools-rust (>=1.5.2)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.5.1)", "usort (==1.0.8.post1)"] - -[[package]] -name = "llvmlite" -version = "0.42.0" -description = "lightweight wrapper around basic LLVM functionality" -optional = false -python-versions = ">=3.9" -files = [ - {file = "llvmlite-0.42.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3366938e1bf63d26c34fbfb4c8e8d2ded57d11e0567d5bb243d89aab1eb56098"}, - {file = "llvmlite-0.42.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c35da49666a21185d21b551fc3caf46a935d54d66969d32d72af109b5e7d2b6f"}, - {file = "llvmlite-0.42.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70f44ccc3c6220bd23e0ba698a63ec2a7d3205da0d848804807f37fc243e3f77"}, - {file = "llvmlite-0.42.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763f8d8717a9073b9e0246998de89929071d15b47f254c10eef2310b9aac033d"}, - {file = "llvmlite-0.42.0-cp310-cp310-win_amd64.whl", hash = "sha256:8d90edf400b4ceb3a0e776b6c6e4656d05c7187c439587e06f86afceb66d2be5"}, - {file = "llvmlite-0.42.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ae511caed28beaf1252dbaf5f40e663f533b79ceb408c874c01754cafabb9cbf"}, - {file = "llvmlite-0.42.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81e674c2fe85576e6c4474e8c7e7aba7901ac0196e864fe7985492b737dbab65"}, - {file = "llvmlite-0.42.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb3975787f13eb97629052edb5017f6c170eebc1c14a0433e8089e5db43bcce6"}, - {file = "llvmlite-0.42.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5bece0cdf77f22379f19b1959ccd7aee518afa4afbd3656c6365865f84903f9"}, - {file = "llvmlite-0.42.0-cp311-cp311-win_amd64.whl", hash = "sha256:7e0c4c11c8c2aa9b0701f91b799cb9134a6a6de51444eff5a9087fc7c1384275"}, - {file = "llvmlite-0.42.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:08fa9ab02b0d0179c688a4216b8939138266519aaa0aa94f1195a8542faedb56"}, - {file = "llvmlite-0.42.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b2fce7d355068494d1e42202c7aff25d50c462584233013eb4470c33b995e3ee"}, - {file = "llvmlite-0.42.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebe66a86dc44634b59a3bc860c7b20d26d9aaffcd30364ebe8ba79161a9121f4"}, - {file = "llvmlite-0.42.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d47494552559e00d81bfb836cf1c4d5a5062e54102cc5767d5aa1e77ccd2505c"}, - {file = "llvmlite-0.42.0-cp312-cp312-win_amd64.whl", hash = "sha256:05cb7e9b6ce69165ce4d1b994fbdedca0c62492e537b0cc86141b6e2c78d5888"}, - {file = "llvmlite-0.42.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdd3888544538a94d7ec99e7c62a0cdd8833609c85f0c23fcb6c5c591aec60ad"}, - {file = "llvmlite-0.42.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0936c2067a67fb8816c908d5457d63eba3e2b17e515c5fe00e5ee2bace06040"}, - {file = "llvmlite-0.42.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a78ab89f1924fc11482209f6799a7a3fc74ddc80425a7a3e0e8174af0e9e2301"}, - {file = "llvmlite-0.42.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7599b65c7af7abbc978dbf345712c60fd596aa5670496561cc10e8a71cebfb2"}, - {file = "llvmlite-0.42.0-cp39-cp39-win_amd64.whl", hash = "sha256:43d65cc4e206c2e902c1004dd5418417c4efa6c1d04df05c6c5675a27e8ca90e"}, - {file = "llvmlite-0.42.0.tar.gz", hash = "sha256:f92b09243c0cc3f457da8b983f67bd8e1295d0f5b3746c7a1861d7a99403854a"}, -] - -[[package]] -name = "lm-format-enforcer" -version = "0.8.3" -description = "Enforce the output format (JSON Schema, Regex etc) of a language model" -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "lm_format_enforcer-0.8.3-py3-none-any.whl", hash = "sha256:d12364a90d8dd3a824c3b6a2cdbc8ec40dd6fff12f5e566100f4a9e79a847092"}, - {file = "lm_format_enforcer-0.8.3.tar.gz", hash = "sha256:3f9eac9f14af43ec77ebe99b251793bbbbffd3566dacffadd4a217d8be90760c"}, -] - -[package.dependencies] -interegular = ">=0.3.2" -pydantic = ">=1.10.8" - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "marshmallow" -version = "3.21.1" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.8" -files = [ - {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, - {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] - -[[package]] -name = "matplotlib" -version = "3.8.4" -description = "Python plotting package" -optional = false -python-versions = ">=3.9" -files = [ - {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, - {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, - {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, - {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, - {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, - {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, - {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, - {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, - {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, - {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, - {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, - {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, - {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, - {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, - {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, - {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, - {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, - {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, - {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, - {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, - {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, - {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, - {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, - {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, - {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, - {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, - {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, - {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, -] - -[package.dependencies] -contourpy = ">=1.0.1" -cycler = ">=0.10" -fonttools = ">=4.22.0" -kiwisolver = ">=1.3.1" -numpy = ">=1.21" -packaging = ">=20.0" -pillow = ">=8" -pyparsing = ">=2.3.1" -python-dateutil = ">=2.7" - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "mistune" -version = "3.0.2" -description = "A sane and fast Markdown parser with useful plugins and renderers" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, -] - -[[package]] -name = "modal" -version = "0.62.68" -description = "Python client library for Modal" -optional = false -python-versions = ">=3.8" -files = [ - {file = "modal-0.62.68-py3-none-any.whl", hash = "sha256:8d8e2032825e4cce68161b5620c20290fc339186733b91f1666018670e8f3a78"}, -] - -[package.dependencies] -aiohttp = "*" -aiostream = ">=0.5.2,<0.6.0" -certifi = "*" -click = ">=8.1.0" -fastapi = "*" -grpclib = "0.4.7" -protobuf = ">=3.19,<4.24.0 || >4.24.0,<6.0" -rich = ">=12.0.0" -synchronicity = ">=0.6.5,<0.7.0" -toml = "*" -typer = ">=0.9.0,<0.10.0" -types-certifi = "*" -types-toml = "*" -typing-extensions = ">=4.6,<5.0" -watchfiles = "*" - -[[package]] -name = "mplcursors" -version = "0.5.3" -description = "Interactive data selection cursors for Matplotlib." -optional = false -python-versions = ">=3.7" -files = [ - {file = "mplcursors-0.5.3.tar.gz", hash = "sha256:cb220c8dffaafde195e138faec5654737267f7685d1f0503618cdc333aca0966"}, -] - -[package.dependencies] -matplotlib = ">=3.1,<3.7.1 || >3.7.1" - -[package.extras] -docs = ["pandas", "pydata_sphinx_theme (!=0.10.1)", "sphinx", "sphinx-gallery"] - -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] -tests = ["pytest (>=4.6)"] - -[[package]] -name = "msgpack" -version = "1.0.8" -description = "MessagePack serializer" -optional = false -python-versions = ">=3.8" -files = [ - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, - {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, - {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, - {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, - {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, - {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, - {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, - {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, - {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, - {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, - {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, -] - -[[package]] -name = "msgspec" -version = "0.18.6" -description = "A fast serialization and validation library, with builtin support for JSON, MessagePack, YAML, and TOML." -optional = false -python-versions = ">=3.8" -files = [ - {file = "msgspec-0.18.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77f30b0234eceeff0f651119b9821ce80949b4d667ad38f3bfed0d0ebf9d6d8f"}, - {file = "msgspec-0.18.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a76b60e501b3932782a9da039bd1cd552b7d8dec54ce38332b87136c64852dd"}, - {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06acbd6edf175bee0e36295d6b0302c6de3aaf61246b46f9549ca0041a9d7177"}, - {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40a4df891676d9c28a67c2cc39947c33de516335680d1316a89e8f7218660410"}, - {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a6896f4cd5b4b7d688018805520769a8446df911eb93b421c6c68155cdf9dd5a"}, - {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3ac4dd63fd5309dd42a8c8c36c1563531069152be7819518be0a9d03be9788e4"}, - {file = "msgspec-0.18.6-cp310-cp310-win_amd64.whl", hash = "sha256:fda4c357145cf0b760000c4ad597e19b53adf01382b711f281720a10a0fe72b7"}, - {file = "msgspec-0.18.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e77e56ffe2701e83a96e35770c6adb655ffc074d530018d1b584a8e635b4f36f"}, - {file = "msgspec-0.18.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5351afb216b743df4b6b147691523697ff3a2fc5f3d54f771e91219f5c23aaa"}, - {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3232fabacef86fe8323cecbe99abbc5c02f7698e3f5f2e248e3480b66a3596b"}, - {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b524df6ea9998bbc99ea6ee4d0276a101bcc1aa8d14887bb823914d9f60d07"}, - {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:37f67c1d81272131895bb20d388dd8d341390acd0e192a55ab02d4d6468b434c"}, - {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d0feb7a03d971c1c0353de1a8fe30bb6579c2dc5ccf29b5f7c7ab01172010492"}, - {file = "msgspec-0.18.6-cp311-cp311-win_amd64.whl", hash = "sha256:41cf758d3f40428c235c0f27bc6f322d43063bc32da7b9643e3f805c21ed57b4"}, - {file = "msgspec-0.18.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d86f5071fe33e19500920333c11e2267a31942d18fed4d9de5bc2fbab267d28c"}, - {file = "msgspec-0.18.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce13981bfa06f5eb126a3a5a38b1976bddb49a36e4f46d8e6edecf33ccf11df1"}, - {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97dec6932ad5e3ee1e3c14718638ba333befc45e0661caa57033cd4cc489466"}, - {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad237100393f637b297926cae1868b0d500f764ccd2f0623a380e2bcfb2809ca"}, - {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db1d8626748fa5d29bbd15da58b2d73af25b10aa98abf85aab8028119188ed57"}, - {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d70cb3d00d9f4de14d0b31d38dfe60c88ae16f3182988246a9861259c6722af6"}, - {file = "msgspec-0.18.6-cp312-cp312-win_amd64.whl", hash = "sha256:1003c20bfe9c6114cc16ea5db9c5466e49fae3d7f5e2e59cb70693190ad34da0"}, - {file = "msgspec-0.18.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f7d9faed6dfff654a9ca7d9b0068456517f63dbc3aa704a527f493b9200b210a"}, - {file = "msgspec-0.18.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9da21f804c1a1471f26d32b5d9bc0480450ea77fbb8d9db431463ab64aaac2cf"}, - {file = "msgspec-0.18.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46eb2f6b22b0e61c137e65795b97dc515860bf6ec761d8fb65fdb62aa094ba61"}, - {file = "msgspec-0.18.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8355b55c80ac3e04885d72db515817d9fbb0def3bab936bba104e99ad22cf46"}, - {file = "msgspec-0.18.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9080eb12b8f59e177bd1eb5c21e24dd2ba2fa88a1dbc9a98e05ad7779b54c681"}, - {file = "msgspec-0.18.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc001cf39becf8d2dcd3f413a4797c55009b3a3cdbf78a8bf5a7ca8fdb76032c"}, - {file = "msgspec-0.18.6-cp38-cp38-win_amd64.whl", hash = "sha256:fac5834e14ac4da1fca373753e0c4ec9c8069d1fe5f534fa5208453b6065d5be"}, - {file = "msgspec-0.18.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:974d3520fcc6b824a6dedbdf2b411df31a73e6e7414301abac62e6b8d03791b4"}, - {file = "msgspec-0.18.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fd62e5818731a66aaa8e9b0a1e5543dc979a46278da01e85c3c9a1a4f047ef7e"}, - {file = "msgspec-0.18.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7481355a1adcf1f08dedd9311193c674ffb8bf7b79314b4314752b89a2cf7f1c"}, - {file = "msgspec-0.18.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6aa85198f8f154cf35d6f979998f6dadd3dc46a8a8c714632f53f5d65b315c07"}, - {file = "msgspec-0.18.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e24539b25c85c8f0597274f11061c102ad6b0c56af053373ba4629772b407be"}, - {file = "msgspec-0.18.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c61ee4d3be03ea9cd089f7c8e36158786cd06e51fbb62529276452bbf2d52ece"}, - {file = "msgspec-0.18.6-cp39-cp39-win_amd64.whl", hash = "sha256:b5c390b0b0b7da879520d4ae26044d74aeee5144f83087eb7842ba59c02bc090"}, - {file = "msgspec-0.18.6.tar.gz", hash = "sha256:a59fc3b4fcdb972d09138cb516dbde600c99d07c38fd9372a6ef500d2d031b4e"}, -] - -[package.extras] -dev = ["attrs", "coverage", "furo", "gcovr", "ipython", "msgpack", "mypy", "pre-commit", "pyright", "pytest", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "tomli", "tomli-w"] -doc = ["furo", "ipython", "sphinx", "sphinx-copybutton", "sphinx-design"] -test = ["attrs", "msgpack", "mypy", "pyright", "pytest", "pyyaml", "tomli", "tomli-w"] -toml = ["tomli", "tomli-w"] -yaml = ["pyyaml"] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "multiprocess" -version = "0.70.16" -description = "better multiprocessing and multithreading in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}, - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37b55f71c07e2d741374998c043b9520b626a8dddc8b3129222ca4f1a06ef67a"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba8c31889abf4511c7308a8c52bb4a30b9d590e7f58523302ba00237702ca054"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"}, - {file = "multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}, - {file = "multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}, - {file = "multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}, - {file = "multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435"}, - {file = "multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"}, - {file = "multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}, -] - -[package.dependencies] -dill = ">=0.3.8" - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "nbclient" -version = "0.10.0" -description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "7.16.3" -description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbconvert-7.16.3-py3-none-any.whl", hash = "sha256:ddeff14beeeedf3dd0bc506623e41e4507e551736de59df69a91f86700292b3b"}, - {file = "nbconvert-7.16.3.tar.gz", hash = "sha256:a6733b78ce3d47c3f85e504998495b07e6ea9cf9bf6ec1c98dda63ec6ad19142"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -bleach = "!=5.0.0" -defusedxml = "*" -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -markupsafe = ">=2.0" -mistune = ">=2.0.3,<4" -nbclient = ">=0.5.0" -nbformat = ">=5.7" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.1" - -[package.extras] -all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] -docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["nbconvert[qtpng]"] -qtpng = ["pyqtwebengine (>=5.15)"] -serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] -webpdf = ["playwright"] - -[[package]] -name = "nbformat" -version = "5.10.4" -description = "The Jupyter Notebook format" -optional = false -python-versions = ">=3.8" -files = [ - {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, - {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, -] - -[package.dependencies] -fastjsonschema = ">=2.15" -jsonschema = ">=2.6" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -traitlets = ">=5.1" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["pep440", "pre-commit", "pytest", "testpath"] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" -optional = false -python-versions = ">=3.5" -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "networkx" -version = "3.1" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.8" -files = [ - {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, - {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, -] - -[package.extras] -default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "ninja" -version = "1.11.1.1" -description = "Ninja is a small build system with a focus on speed" -optional = false -python-versions = "*" -files = [ - {file = "ninja-1.11.1.1-py2.py3-none-macosx_10_9_universal2.macosx_10_9_x86_64.macosx_11_0_arm64.macosx_11_0_universal2.whl", hash = "sha256:376889c76d87b95b5719fdd61dd7db193aa7fd4432e5d52d2e44e4c497bdbbee"}, - {file = "ninja-1.11.1.1-py2.py3-none-manylinux1_i686.manylinux_2_5_i686.whl", hash = "sha256:ecf80cf5afd09f14dcceff28cb3f11dc90fb97c999c89307aea435889cb66877"}, - {file = "ninja-1.11.1.1-py2.py3-none-manylinux1_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:84502ec98f02a037a169c4b0d5d86075eaf6afc55e1879003d6cab51ced2ea4b"}, - {file = "ninja-1.11.1.1-py2.py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:73b93c14046447c7c5cc892433d4fae65d6364bec6685411cb97a8bcf815f93a"}, - {file = "ninja-1.11.1.1-py2.py3-none-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:18302d96a5467ea98b68e1cae1ae4b4fb2b2a56a82b955193c637557c7273dbd"}, - {file = "ninja-1.11.1.1-py2.py3-none-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:aad34a70ef15b12519946c5633344bc775a7656d789d9ed5fdb0d456383716ef"}, - {file = "ninja-1.11.1.1-py2.py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:d491fc8d89cdcb416107c349ad1e3a735d4c4af5e1cb8f5f727baca6350fdaea"}, - {file = "ninja-1.11.1.1-py2.py3-none-musllinux_1_1_i686.whl", hash = "sha256:7563ce1d9fe6ed5af0b8dd9ab4a214bf4ff1f2f6fd6dc29f480981f0f8b8b249"}, - {file = "ninja-1.11.1.1-py2.py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:9df724344202b83018abb45cb1efc22efd337a1496514e7e6b3b59655be85205"}, - {file = "ninja-1.11.1.1-py2.py3-none-musllinux_1_1_s390x.whl", hash = "sha256:3e0f9be5bb20d74d58c66cc1c414c3e6aeb45c35b0d0e41e8d739c2c0d57784f"}, - {file = "ninja-1.11.1.1-py2.py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:76482ba746a2618eecf89d5253c0d1e4f1da1270d41e9f54dfbd91831b0f6885"}, - {file = "ninja-1.11.1.1-py2.py3-none-win32.whl", hash = "sha256:fa2ba9d74acfdfbfbcf06fad1b8282de8a7a8c481d9dee45c859a8c93fcc1082"}, - {file = "ninja-1.11.1.1-py2.py3-none-win_amd64.whl", hash = "sha256:95da904130bfa02ea74ff9c0116b4ad266174fafb1c707aa50212bc7859aebf1"}, - {file = "ninja-1.11.1.1-py2.py3-none-win_arm64.whl", hash = "sha256:185e0641bde601e53841525c4196278e9aaf4463758da6dd1e752c0a0f54136a"}, - {file = "ninja-1.11.1.1.tar.gz", hash = "sha256:9d793b08dd857e38d0b6ffe9e6b7145d7c485a42dcfea04905ca0cdb6017cc3c"}, -] - -[package.extras] -test = ["codecov (>=2.0.5)", "coverage (>=4.2)", "flake8 (>=3.0.4)", "pytest (>=4.5.0)", "pytest-cov (>=2.7.1)", "pytest-runner (>=5.1)", "pytest-virtualenv (>=1.7.0)", "virtualenv (>=15.0.3)"] - -[[package]] -name = "notebook-shim" -version = "0.2.4" -description = "A shim layer for notebook traits and config" -optional = false -python-versions = ">=3.7" -files = [ - {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, - {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, -] - -[package.dependencies] -jupyter-server = ">=1.8,<3" - -[package.extras] -test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] - -[[package]] -name = "numba" -version = "0.59.1" -description = "compiling Python code using LLVM" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numba-0.59.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97385a7f12212c4f4bc28f648720a92514bee79d7063e40ef66c2d30600fd18e"}, - {file = "numba-0.59.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b77aecf52040de2a1eb1d7e314497b9e56fba17466c80b457b971a25bb1576d"}, - {file = "numba-0.59.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3476a4f641bfd58f35ead42f4dcaf5f132569c4647c6f1360ccf18ee4cda3990"}, - {file = "numba-0.59.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:525ef3f820931bdae95ee5379c670d5c97289c6520726bc6937a4a7d4230ba24"}, - {file = "numba-0.59.1-cp310-cp310-win_amd64.whl", hash = "sha256:990e395e44d192a12105eca3083b61307db7da10e093972ca285c85bef0963d6"}, - {file = "numba-0.59.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43727e7ad20b3ec23ee4fc642f5b61845c71f75dd2825b3c234390c6d8d64051"}, - {file = "numba-0.59.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:411df625372c77959570050e861981e9d196cc1da9aa62c3d6a836b5cc338966"}, - {file = "numba-0.59.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2801003caa263d1e8497fb84829a7ecfb61738a95f62bc05693fcf1733e978e4"}, - {file = "numba-0.59.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dd2842fac03be4e5324ebbbd4d2d0c8c0fc6e0df75c09477dd45b288a0777389"}, - {file = "numba-0.59.1-cp311-cp311-win_amd64.whl", hash = "sha256:0594b3dfb369fada1f8bb2e3045cd6c61a564c62e50cf1f86b4666bc721b3450"}, - {file = "numba-0.59.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1cce206a3b92836cdf26ef39d3a3242fec25e07f020cc4feec4c4a865e340569"}, - {file = "numba-0.59.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8c8b4477763cb1fbd86a3be7050500229417bf60867c93e131fd2626edb02238"}, - {file = "numba-0.59.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d80bce4ef7e65bf895c29e3889ca75a29ee01da80266a01d34815918e365835"}, - {file = "numba-0.59.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f7ad1d217773e89a9845886401eaaab0a156a90aa2f179fdc125261fd1105096"}, - {file = "numba-0.59.1-cp312-cp312-win_amd64.whl", hash = "sha256:5bf68f4d69dd3a9f26a9b23548fa23e3bcb9042e2935257b471d2a8d3c424b7f"}, - {file = "numba-0.59.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e0318ae729de6e5dbe64c75ead1a95eb01fabfe0e2ebed81ebf0344d32db0ae"}, - {file = "numba-0.59.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0f68589740a8c38bb7dc1b938b55d1145244c8353078eea23895d4f82c8b9ec1"}, - {file = "numba-0.59.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:649913a3758891c77c32e2d2a3bcbedf4a69f5fea276d11f9119677c45a422e8"}, - {file = "numba-0.59.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9712808e4545270291d76b9a264839ac878c5eb7d8b6e02c970dc0ac29bc8187"}, - {file = "numba-0.59.1-cp39-cp39-win_amd64.whl", hash = "sha256:8d51ccd7008a83105ad6a0082b6a2b70f1142dc7cfd76deb8c5a862367eb8c86"}, - {file = "numba-0.59.1.tar.gz", hash = "sha256:76f69132b96028d2774ed20415e8c528a34e3299a40581bae178f0994a2f370b"}, -] - -[package.dependencies] -llvmlite = "==0.42.*" -numpy = ">=1.22,<1.27" - -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - -[[package]] -name = "nvidia-cublas-cu12" -version = "12.1.3.1" -description = "CUBLAS native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.1.105" -description = "CUDA profiling tools runtime libs." -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.1.105" -description = "NVRTC native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.1.105" -description = "CUDA Runtime native Libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "8.9.2.26" -description = "cuDNN runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl", hash = "sha256:5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" - -[[package]] -name = "nvidia-cufft-cu12" -version = "11.0.2.54" -description = "CUFFT native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, -] - -[[package]] -name = "nvidia-curand-cu12" -version = "10.3.2.106" -description = "CURAND native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, -] - -[[package]] -name = "nvidia-cusolver-cu12" -version = "11.4.5.107" -description = "CUDA solver native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, -] - -[package.dependencies] -nvidia-cublas-cu12 = "*" -nvidia-cusparse-cu12 = "*" -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-cusparse-cu12" -version = "12.1.0.106" -description = "CUSPARSE native runtime libraries" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, -] - -[package.dependencies] -nvidia-nvjitlink-cu12 = "*" - -[[package]] -name = "nvidia-nccl-cu12" -version = "2.18.1" -description = "NVIDIA Collective Communication Library (NCCL) Runtime" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nccl_cu12-2.18.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:1a6c4acefcbebfa6de320f412bf7866de856e786e0462326ba1bac40de0b5e71"}, -] - -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.4.127" -description = "Nvidia JIT LTO Library" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"}, - {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:fd9020c501d27d135f983c6d3e244b197a7ccad769e34df53a42e276b0e25fa1"}, -] - -[[package]] -name = "nvidia-nvtx-cu12" -version = "12.1.105" -description = "NVIDIA Tools Extension" -optional = false -python-versions = ">=3" -files = [ - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, -] - -[[package]] -name = "openai" -version = "1.19.0" -description = "The official Python library for the openai API" -optional = false -python-versions = ">=3.7.1" -files = [ - {file = "openai-1.19.0-py3-none-any.whl", hash = "sha256:fef51776830930f98401fc867c24b969e3bc121f5326edbb72ed56cdfdc4ffd0"}, - {file = "openai-1.19.0.tar.gz", hash = "sha256:6a1c3538e1fa1907f19d82a0017d792d5180533ecfe1a8f22b4b5119d7a3f5a0"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -tqdm = ">4" -typing-extensions = ">=4.7,<5" - -[package.extras] -datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] - -[[package]] -name = "orjson" -version = "3.10.1" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.10.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8ec2fc456d53ea4a47768f622bb709be68acd455b0c6be57e91462259741c4f3"}, - {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e900863691d327758be14e2a491931605bd0aded3a21beb6ce133889830b659"}, - {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab6ecbd6fe57785ebc86ee49e183f37d45f91b46fc601380c67c5c5e9c0014a2"}, - {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af7c68b01b876335cccfb4eee0beef2b5b6eae1945d46a09a7c24c9faac7a77"}, - {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:915abfb2e528677b488a06eba173e9d7706a20fdfe9cdb15890b74ef9791b85e"}, - {file = "orjson-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3fd4a36eff9c63d25503b439531d21828da9def0059c4f472e3845a081aa0b"}, - {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d229564e72cfc062e6481a91977a5165c5a0fdce11ddc19ced8471847a67c517"}, - {file = "orjson-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9e00495b18304173ac843b5c5fbea7b6f7968564d0d49bef06bfaeca4b656f4e"}, - {file = "orjson-3.10.1-cp310-none-win32.whl", hash = "sha256:fd78ec55179545c108174ba19c1795ced548d6cac4d80d014163033c047ca4ea"}, - {file = "orjson-3.10.1-cp310-none-win_amd64.whl", hash = "sha256:50ca42b40d5a442a9e22eece8cf42ba3d7cd4cd0f2f20184b4d7682894f05eec"}, - {file = "orjson-3.10.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b345a3d6953628df2f42502297f6c1e1b475cfbf6268013c94c5ac80e8abc04c"}, - {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caa7395ef51af4190d2c70a364e2f42138e0e5fcb4bc08bc9b76997659b27dab"}, - {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b01d701decd75ae092e5f36f7b88a1e7a1d3bb7c9b9d7694de850fb155578d5a"}, - {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5028981ba393f443d8fed9049211b979cadc9d0afecf162832f5a5b152c6297"}, - {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31ff6a222ea362b87bf21ff619598a4dc1106aaafaea32b1c4876d692891ec27"}, - {file = "orjson-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e852a83d7803d3406135fb7a57cf0c1e4a3e73bac80ec621bd32f01c653849c5"}, - {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2567bc928ed3c3fcd90998009e8835de7c7dc59aabcf764b8374d36044864f3b"}, - {file = "orjson-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4ce98cac60b7bb56457bdd2ed7f0d5d7f242d291fdc0ca566c83fa721b52e92d"}, - {file = "orjson-3.10.1-cp311-none-win32.whl", hash = "sha256:813905e111318acb356bb8029014c77b4c647f8b03f314e7b475bd9ce6d1a8ce"}, - {file = "orjson-3.10.1-cp311-none-win_amd64.whl", hash = "sha256:03a3ca0b3ed52bed1a869163a4284e8a7b0be6a0359d521e467cdef7e8e8a3ee"}, - {file = "orjson-3.10.1-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f02c06cee680b1b3a8727ec26c36f4b3c0c9e2b26339d64471034d16f74f4ef5"}, - {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1aa2f127ac546e123283e437cc90b5ecce754a22306c7700b11035dad4ccf85"}, - {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2cf29b4b74f585225196944dffdebd549ad2af6da9e80db7115984103fb18a96"}, - {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1b130c20b116f413caf6059c651ad32215c28500dce9cd029a334a2d84aa66f"}, - {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d31f9a709e6114492136e87c7c6da5e21dfedebefa03af85f3ad72656c493ae9"}, - {file = "orjson-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1d169461726f271ab31633cf0e7e7353417e16fb69256a4f8ecb3246a78d6e"}, - {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57c294d73825c6b7f30d11c9e5900cfec9a814893af7f14efbe06b8d0f25fba9"}, - {file = "orjson-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7f11dbacfa9265ec76b4019efffabaabba7a7ebf14078f6b4df9b51c3c9a8ea"}, - {file = "orjson-3.10.1-cp312-none-win32.whl", hash = "sha256:d89e5ed68593226c31c76ab4de3e0d35c760bfd3fbf0a74c4b2be1383a1bf123"}, - {file = "orjson-3.10.1-cp312-none-win_amd64.whl", hash = "sha256:aa76c4fe147fd162107ce1692c39f7189180cfd3a27cfbc2ab5643422812da8e"}, - {file = "orjson-3.10.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a2c6a85c92d0e494c1ae117befc93cf8e7bca2075f7fe52e32698da650b2c6d1"}, - {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9813f43da955197d36a7365eb99bed42b83680801729ab2487fef305b9ced866"}, - {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec917b768e2b34b7084cb6c68941f6de5812cc26c6f1a9fecb728e36a3deb9e8"}, - {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5252146b3172d75c8a6d27ebca59c9ee066ffc5a277050ccec24821e68742fdf"}, - {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:536429bb02791a199d976118b95014ad66f74c58b7644d21061c54ad284e00f4"}, - {file = "orjson-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dfed3c3e9b9199fb9c3355b9c7e4649b65f639e50ddf50efdf86b45c6de04b5"}, - {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2b230ec35f188f003f5b543644ae486b2998f6afa74ee3a98fc8ed2e45960afc"}, - {file = "orjson-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:01234249ba19c6ab1eb0b8be89f13ea21218b2d72d496ef085cfd37e1bae9dd8"}, - {file = "orjson-3.10.1-cp38-none-win32.whl", hash = "sha256:8a884fbf81a3cc22d264ba780920d4885442144e6acaa1411921260416ac9a54"}, - {file = "orjson-3.10.1-cp38-none-win_amd64.whl", hash = "sha256:dab5f802d52b182163f307d2b1f727d30b1762e1923c64c9c56dd853f9671a49"}, - {file = "orjson-3.10.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a51fd55d4486bc5293b7a400f9acd55a2dc3b5fc8420d5ffe9b1d6bb1a056a5e"}, - {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53521542a6db1411b3bfa1b24ddce18605a3abdc95a28a67b33f9145f26aa8f2"}, - {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:27d610df96ac18ace4931411d489637d20ab3b8f63562b0531bba16011998db0"}, - {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79244b1456e5846d44e9846534bd9e3206712936d026ea8e6a55a7374d2c0694"}, - {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d751efaa8a49ae15cbebdda747a62a9ae521126e396fda8143858419f3b03610"}, - {file = "orjson-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27ff69c620a4fff33267df70cfd21e0097c2a14216e72943bd5414943e376d77"}, - {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ebc58693464146506fde0c4eb1216ff6d4e40213e61f7d40e2f0dde9b2f21650"}, - {file = "orjson-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5be608c3972ed902e0143a5b8776d81ac1059436915d42defe5c6ae97b3137a4"}, - {file = "orjson-3.10.1-cp39-none-win32.whl", hash = "sha256:4ae10753e7511d359405aadcbf96556c86e9dbf3a948d26c2c9f9a150c52b091"}, - {file = "orjson-3.10.1-cp39-none-win_amd64.whl", hash = "sha256:fb5bc4caa2c192077fdb02dce4e5ef8639e7f20bec4e3a834346693907362932"}, - {file = "orjson-3.10.1.tar.gz", hash = "sha256:a883b28d73370df23ed995c466b4f6c708c1f7a9bdc400fe89165c96c7603204"}, -] - -[[package]] -name = "outlines" -version = "0.0.37" -description = "Probabilistic Generative Model Programming" -optional = false -python-versions = ">=3.8" -files = [ - {file = "outlines-0.0.37-py3-none-any.whl", hash = "sha256:795ef2b3bcf58f6ddb44012f66d943385a7a1ba5efea205bc36745f82e7f597f"}, - {file = "outlines-0.0.37.tar.gz", hash = "sha256:0d2708587c98822469c40994308590929afebeaba36611f8db970752fd283c7d"}, -] - -[package.dependencies] -cloudpickle = "*" -diskcache = "*" -interegular = "*" -jinja2 = "*" -joblib = "*" -jsonschema = "*" -lark = "*" -nest-asyncio = "*" -numba = "*" -numpy = "*" -pydantic = ">=2.0" -referencing = "*" -requests = "*" -scipy = "*" -torch = ">=2.1.0" -transformers = "*" - -[package.extras] -serve = ["fastapi", "pydantic (>=2.0)", "ray (==2.9.0)", "uvicorn", "vllm (>=0.3.0)"] -test = ["accelerate", "beartype (<0.16.0)", "coverage[toml] (>=5.1)", "datasets", "diff-cover", "huggingface-hub", "llama-cpp-python", "openai (>=1.0.0)", "pre-commit", "pytest", "pytest-benchmark", "pytest-cov", "pytest-mock", "responses", "transformers"] - -[[package]] -name = "overrides" -version = "7.7.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = ">=3.6" -files = [ - {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, - {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, -] - -[[package]] -name = "packaging" -version = "24.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, -] - -[[package]] -name = "pandas" -version = "2.2.2" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, -] - -[package.dependencies] -numpy = {version = ">=1.22.4", markers = "python_version < \"3.11\""} -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -description = "Utilities for writing pandoc filters in python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, -] - -[[package]] -name = "parso" -version = "0.8.4" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[package.extras] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["docopt", "pytest"] - -[[package]] -name = "pastel" -version = "0.2.1" -description = "Bring colors to your terminal." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, - {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pillow" -version = "10.3.0" -description = "Python Imaging Library (Fork)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, - {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, - {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, - {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, - {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, - {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, - {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, - {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, - {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, - {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, - {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, - {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, - {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, - {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, - {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, - {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, - {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - -[[package]] -name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, -] - -[package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "poethepoet" -version = "0.25.1" -description = "A task runner that works well with poetry." -optional = false -python-versions = ">=3.8" -files = [ - {file = "poethepoet-0.25.1-py3-none-any.whl", hash = "sha256:fee433f68424593bca6b357f0bf997d64edf42c7305c0d5d335bd570b8d2352b"}, - {file = "poethepoet-0.25.1.tar.gz", hash = "sha256:98f4446533a4b2bdb08843e211f918b1f2e7f8baf6d1803ef78f64661ed62463"}, -] - -[package.dependencies] -pastel = ">=0.2.1,<0.3.0" -tomli = ">=1.2.2" - -[package.extras] -poetry-plugin = ["poetry (>=1.0,<2.0)"] - -[[package]] -name = "prometheus-client" -version = "0.20.0" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.43" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "protobuf" -version = "5.26.1" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-5.26.1-cp310-abi3-win32.whl", hash = "sha256:3c388ea6ddfe735f8cf69e3f7dc7611e73107b60bdfcf5d0f024c3ccd3794e23"}, - {file = "protobuf-5.26.1-cp310-abi3-win_amd64.whl", hash = "sha256:e6039957449cb918f331d32ffafa8eb9255769c96aa0560d9a5bf0b4e00a2a33"}, - {file = "protobuf-5.26.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:38aa5f535721d5bb99861166c445c4105c4e285c765fbb2ac10f116e32dcd46d"}, - {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fbfe61e7ee8c1860855696e3ac6cfd1b01af5498facc6834fcc345c9684fb2ca"}, - {file = "protobuf-5.26.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f7417703f841167e5a27d48be13389d52ad705ec09eade63dfc3180a959215d7"}, - {file = "protobuf-5.26.1-cp38-cp38-win32.whl", hash = "sha256:d693d2504ca96750d92d9de8a103102dd648fda04540495535f0fec7577ed8fc"}, - {file = "protobuf-5.26.1-cp38-cp38-win_amd64.whl", hash = "sha256:9b557c317ebe6836835ec4ef74ec3e994ad0894ea424314ad3552bc6e8835b4e"}, - {file = "protobuf-5.26.1-cp39-cp39-win32.whl", hash = "sha256:b9ba3ca83c2e31219ffbeb9d76b63aad35a3eb1544170c55336993d7a18ae72c"}, - {file = "protobuf-5.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ee014c2c87582e101d6b54260af03b6596728505c79f17c8586e7523aaa8f8c"}, - {file = "protobuf-5.26.1-py3-none-any.whl", hash = "sha256:da612f2720c0183417194eeaa2523215c4fcc1a1949772dc65f05047e08d5932"}, - {file = "protobuf-5.26.1.tar.gz", hash = "sha256:8ca2a1d97c290ec7b16e4e5dff2e5ae150cc1582f55b5ab300d45cb0dfa90e51"}, -] - -[[package]] -name = "psutil" -version = "5.9.8" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.2" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "pyarrow" -version = "15.0.2" -description = "Python library for Apache Arrow" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyarrow-15.0.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:88b340f0a1d05b5ccc3d2d986279045655b1fe8e41aba6ca44ea28da0d1455d8"}, - {file = "pyarrow-15.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eaa8f96cecf32da508e6c7f69bb8401f03745c050c1dd42ec2596f2e98deecac"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c6753ed4f6adb8461e7c383e418391b8d8453c5d67e17f416c3a5d5709afbd"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f639c059035011db8c0497e541a8a45d98a58dbe34dc8fadd0ef128f2cee46e5"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:290e36a59a0993e9a5224ed2fb3e53375770f07379a0ea03ee2fce2e6d30b423"}, - {file = "pyarrow-15.0.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06c2bb2a98bc792f040bef31ad3e9be6a63d0cb39189227c08a7d955db96816e"}, - {file = "pyarrow-15.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:f7a197f3670606a960ddc12adbe8075cea5f707ad7bf0dffa09637fdbb89f76c"}, - {file = "pyarrow-15.0.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5f8bc839ea36b1f99984c78e06e7a06054693dc2af8920f6fb416b5bca9944e4"}, - {file = "pyarrow-15.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5e81dfb4e519baa6b4c80410421528c214427e77ca0ea9461eb4097c328fa33"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4f240852b302a7af4646c8bfe9950c4691a419847001178662a98915fd7ee7"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e7d9cfb5a1e648e172428c7a42b744610956f3b70f524aa3a6c02a448ba853e"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2d4f905209de70c0eb5b2de6763104d5a9a37430f137678edfb9a675bac9cd98"}, - {file = "pyarrow-15.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90adb99e8ce5f36fbecbbc422e7dcbcbed07d985eed6062e459e23f9e71fd197"}, - {file = "pyarrow-15.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b116e7fd7889294cbd24eb90cd9bdd3850be3738d61297855a71ac3b8124ee38"}, - {file = "pyarrow-15.0.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:25335e6f1f07fdaa026a61c758ee7d19ce824a866b27bba744348fa73bb5a440"}, - {file = "pyarrow-15.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90f19e976d9c3d8e73c80be84ddbe2f830b6304e4c576349d9360e335cd627fc"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a22366249bf5fd40ddacc4f03cd3160f2d7c247692945afb1899bab8a140ddfb"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2a335198f886b07e4b5ea16d08ee06557e07db54a8400cc0d03c7f6a22f785f"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e6d459c0c22f0b9c810a3917a1de3ee704b021a5fb8b3bacf968eece6df098f"}, - {file = "pyarrow-15.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:033b7cad32198754d93465dcfb71d0ba7cb7cd5c9afd7052cab7214676eec38b"}, - {file = "pyarrow-15.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:29850d050379d6e8b5a693098f4de7fd6a2bea4365bfd073d7c57c57b95041ee"}, - {file = "pyarrow-15.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7167107d7fb6dcadb375b4b691b7e316f4368f39f6f45405a05535d7ad5e5058"}, - {file = "pyarrow-15.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e85241b44cc3d365ef950432a1b3bd44ac54626f37b2e3a0cc89c20e45dfd8bf"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:248723e4ed3255fcd73edcecc209744d58a9ca852e4cf3d2577811b6d4b59818"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff3bdfe6f1b81ca5b73b70a8d482d37a766433823e0c21e22d1d7dde76ca33f"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:f3d77463dee7e9f284ef42d341689b459a63ff2e75cee2b9302058d0d98fe142"}, - {file = "pyarrow-15.0.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:8c1faf2482fb89766e79745670cbca04e7018497d85be9242d5350cba21357e1"}, - {file = "pyarrow-15.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:28f3016958a8e45a1069303a4a4f6a7d4910643fc08adb1e2e4a7ff056272ad3"}, - {file = "pyarrow-15.0.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:89722cb64286ab3d4daf168386f6968c126057b8c7ec3ef96302e81d8cdb8ae4"}, - {file = "pyarrow-15.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd0ba387705044b3ac77b1b317165c0498299b08261d8122c96051024f953cd5"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2459bf1f22b6a5cdcc27ebfd99307d5526b62d217b984b9f5c974651398832"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58922e4bfece8b02abf7159f1f53a8f4d9f8e08f2d988109126c17c3bb261f22"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:adccc81d3dc0478ea0b498807b39a8d41628fa9210729b2f718b78cb997c7c91"}, - {file = "pyarrow-15.0.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:8bd2baa5fe531571847983f36a30ddbf65261ef23e496862ece83bdceb70420d"}, - {file = "pyarrow-15.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6669799a1d4ca9da9c7e06ef48368320f5856f36f9a4dd31a11839dda3f6cc8c"}, - {file = "pyarrow-15.0.2.tar.gz", hash = "sha256:9c9bc803cb3b7bfacc1e96ffbfd923601065d9d3f911179d81e72d99fd74a3d9"}, -] - -[package.dependencies] -numpy = ">=1.16.6,<2" - -[[package]] -name = "pyarrow-hotfix" -version = "0.6" -description = "" -optional = false -python-versions = ">=3.5" -files = [ - {file = "pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178"}, - {file = "pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945"}, -] - -[[package]] -name = "pycnite" -version = "2023.10.11" -description = "Python bytecode utilities" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycnite-2023.10.11-py3-none-any.whl", hash = "sha256:7d02eb0ec4b405d8812ce053434dacfc2335dcd458ab58a1a8bf64f72d40bd76"}, - {file = "pycnite-2023.10.11.tar.gz", hash = "sha256:ad8616982beecc39f2090999aa8fe0b044b1f6733ec39484cb5e0900b3c88aa1"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "2.7.0" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"}, - {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"}, -] - -[package.dependencies] -annotated-types = ">=0.4.0" -email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.18.1" -typing-extensions = ">=4.6.1" - -[package.extras] -email = ["email-validator (>=2.0.0)"] - -[[package]] -name = "pydantic-core" -version = "2.18.1" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"}, - {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"}, - {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"}, - {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"}, - {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"}, - {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"}, - {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"}, - {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"}, - {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"}, - {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"}, - {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"}, - {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"}, - {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"}, - {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"}, - {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"}, - {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"}, - {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"}, - {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"}, - {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"}, - {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"}, - {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"}, - {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"}, - {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"}, - {file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"}, - {file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"}, - {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"}, - {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"}, - {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"}, - {file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"}, - {file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"}, - {file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"}, - {file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"}, - {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"}, - {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"}, - {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"}, - {file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"}, - {file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"}, - {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"}, - {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"}, - {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydot" -version = "2.0.0" -description = "Python interface to Graphviz's Dot" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pydot-2.0.0-py3-none-any.whl", hash = "sha256:408a47913ea7bd5d2d34b274144880c1310c4aee901f353cf21fe2e526a4ea28"}, - {file = "pydot-2.0.0.tar.gz", hash = "sha256:60246af215123fa062f21cd791be67dda23a6f280df09f68919e637a1e4f3235"}, -] - -[package.dependencies] -pyparsing = ">=3" - -[package.extras] -dev = ["black", "chardet"] -release = ["zest.releaser[recommended]"] -tests = ["black", "chardet", "tox"] - -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] - -[package.extras] -plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyjwt" -version = "2.8.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, -] - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "pynvml" -version = "11.5.0" -description = "Python Bindings for the NVIDIA Management Library" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pynvml-11.5.0-py3-none-any.whl", hash = "sha256:5cce014ac01b098d08f06178f86c37be409b80b2e903a5a03ce15eed60f55e25"}, - {file = "pynvml-11.5.0.tar.gz", hash = "sha256:d027b21b95b1088b9fc278117f9f61b7c67f8e33a787e9f83f735f0f71ac32d0"}, -] - -[[package]] -name = "pyparsing" -version = "3.1.2" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pytest" -version = "8.1.1" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.4,<2.0" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-mock" -version = "3.14.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, -] - -[package.dependencies] -pytest = ">=6.2.5" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-dotenv" -version = "1.0.1" -description = "Read key-value pairs from a .env file and set them as environment variables" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "python-json-logger" -version = "2.0.7" -description = "A python library adding a json log formatter" -optional = false -python-versions = ">=3.6" -files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, -] - -[[package]] -name = "pytype" -version = "2024.4.11" -description = "Python type inferencer" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytype-2024.4.11-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b3647b554eea009c9069d58440aed6b9e66d2f3ba20ef2674ffd5cd3b0bf83d7"}, - {file = "pytype-2024.4.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd24126c5b7bbda52fb48ad9d5b4811a7b090a944c8f39a45b94246668722e8c"}, - {file = "pytype-2024.4.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae6c825b7067f420810f8846147df4fdc966c340a859f51317ba26c860cda906"}, - {file = "pytype-2024.4.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b140b180cb287bbbce025e4c929cf3b020aecf4f91899ef609073ab02f2ba8e2"}, - {file = "pytype-2024.4.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3483a44f457766be85c2a2ef779f3c23f2a95973e5fdb844a030149fc1d92e3"}, - {file = "pytype-2024.4.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2986961d1a2ef778a9fea2f81c90cbe6f4a196ff783b26a30e7f3c9be1545c10"}, - {file = "pytype-2024.4.11-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:ad59ed48b185d61f5ab8bceb173e6680eeabf109c5f15290ad88b7be1096e493"}, - {file = "pytype-2024.4.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50da73a6a24337d00c9c88697ced74909dd00e3b9238ad619490ff83a6588d52"}, - {file = "pytype-2024.4.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51c5470061ace9908a8306e3aa460e89e8fc15e63557625c387f932b852d3fa8"}, - {file = "pytype-2024.4.11-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:4484bbe24c58c289cea0c7549a507f20eea29caa68dcaf1e857eba584324641e"}, - {file = "pytype-2024.4.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ad2f7cbfc9843a3f6f1d776318a2f4d96b0ca2d054128280be58eef293c0795"}, - {file = "pytype-2024.4.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa1069fcaaa9859273ec31650632e7bba9af0e82083e8f7d08f9b742d6d36055"}, - {file = "pytype-2024.4.11.tar.gz", hash = "sha256:3315f51ce482d8c07e2f1857c47ccd17beb4a9bfde9afa7d1e6669f1e8881081"}, -] - -[package.dependencies] -attrs = ">=21.4.0" -immutabledict = ">=4.1.0" -importlab = ">=0.8" -jinja2 = ">=3.1.2" -libcst = ">=1.0.1" -msgspec = ">=0.18.6" -networkx = "<3.2" -ninja = ">=1.10.0.post2" -pycnite = ">=2023.10.11" -pydot = ">=1.4.2" -tabulate = ">=0.8.10" -toml = ">=0.10.2" -typing-extensions = ">=4.3.0" - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, -] - -[[package]] -name = "pywinpty" -version = "2.0.13" -description = "Pseudo terminal support for Windows from Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, - {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, - {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, - {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, - {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, - {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "pyzmq" -version = "26.0.0" -description = "Python bindings for 0MQ" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.0.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:a86409f3f8eae7af5a47babd831a119bdf552e831f04d2225a313305e8e35e7c"}, - {file = "pyzmq-26.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d36a46975925b8bf14b69fe6d4097bc96c91f94ceb954d56853a2211a5cc3433"}, - {file = "pyzmq-26.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcac700269d081ded42ed3833f9d0effe734148376204af9c0ef0fd25a3fea55"}, - {file = "pyzmq-26.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49efc420e36d2e8adc5dae41c2c1e8bb37a069e40a880cbe414a032136b194b0"}, - {file = "pyzmq-26.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02773b96ef6a17a57680c3609645785c390198be31a4505c01ce0c846f9e7d0e"}, - {file = "pyzmq-26.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ce2c53f4963a358ba91b58ccecb84fab6d5f0622230d105c2589f7556ec53cc9"}, - {file = "pyzmq-26.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:06525d996afdb0da3e8b7df0b654261455f6e86c2c3574c3f00d2bd335be78eb"}, - {file = "pyzmq-26.0.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bd3537f049dc0488adb3df29a77635eaff2a8d1d3d29a09714db6e2d10caba1a"}, - {file = "pyzmq-26.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ce158ab54994c60fdde83300dc1e447446baacbe4ec9e4e80096f9b9a125c13"}, - {file = "pyzmq-26.0.0-cp310-cp310-win32.whl", hash = "sha256:271c9178a94b009651f8ad3ff9bb9ca45778aaf66c9e325a44d81a7498fcaa59"}, - {file = "pyzmq-26.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:4216eee101d104a017042f0e4af0a45875400ff3794f1a59476e210b1a9760e2"}, - {file = "pyzmq-26.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:44271793067025a07d38ad4be11f08187cce850fafd1890b42046abbcdca2fc0"}, - {file = "pyzmq-26.0.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:1e87178437460b6df18e761650ef080d3ad5a41813cc3df7f9fd78714fca04c0"}, - {file = "pyzmq-26.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0397c7431f3fc2bac497992d7447b036bc0d8bb3e15b158b2013201857ff2354"}, - {file = "pyzmq-26.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a5b4dc4d7a3f859026083906724ad1ae743261548b61d0d5abcf2d994122c2b"}, - {file = "pyzmq-26.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:952e85c5e86f9ba100b78b60719b76e1ff3e13bb403cb6de687bb92e7b2179e7"}, - {file = "pyzmq-26.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fdeac8612a9dca6fcad6cb43c7efb75f53ba75da981fbafa949ddcde1d5662"}, - {file = "pyzmq-26.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:39b8ed8d2e5da8b8351c6aa627601b3b52e8eb5e25cf6bcd26b6f012dec7870b"}, - {file = "pyzmq-26.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f6f618d7d7c9c37053a36e6dc5435c53e9e0c7a67e6fd00b69c209d07a8db4dc"}, - {file = "pyzmq-26.0.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72ae3078b1c47552e0e39fd81fc0472e880316897a733dbb3570819be19da48a"}, - {file = "pyzmq-26.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5d7fcc648445dbfd6ce9973ec7b4a33ee9307b7e88cf4816f4403ccbaf8de9ca"}, - {file = "pyzmq-26.0.0-cp311-cp311-win32.whl", hash = "sha256:9982799d7d7807beb1b26f1aa9a192baccb1a14c5d00eca881a42a0ae562671b"}, - {file = "pyzmq-26.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:60f91afc76a3fc5d65dfba4f6b6020c462674b5eab6cbf00dec133d79656072d"}, - {file = "pyzmq-26.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:120887d773e878136e9b33bbba656df0d4c6e2861694d07d058ec60ce1108b24"}, - {file = "pyzmq-26.0.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:469f4febd63c26b20132e54cc40048d5698123794b103758ccd21b8a45890dc3"}, - {file = "pyzmq-26.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c919895132cae5a458d5a17047fd33c9eb271f15bb3485add34429cfd7b76a71"}, - {file = "pyzmq-26.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e0e94ca9a8f23000d54e11ecd727b69fb1994baf3b6b1eedb881cdd3196ecec"}, - {file = "pyzmq-26.0.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a824b3301ddd003cdceb9b537804e751ac5922a845b19d4e50b4789d1cd28b24"}, - {file = "pyzmq-26.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af9f5b1b76753584c871c1c96db8a18650886b3adf9fc8c7d4019343eb329c28"}, - {file = "pyzmq-26.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9691a6ab55d011e83d7438f6711b93b7f8aa21ee8cf3e7ad6d6d9ea26a8f3a1f"}, - {file = "pyzmq-26.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58176e2437462568b5099acf17401be64205e175e72767a8250eef84ee9ec4f5"}, - {file = "pyzmq-26.0.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d492921b398d640a1f796306531bc6911a94ce5528b798ed14e0620abd9b948d"}, - {file = "pyzmq-26.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f85bb2c47b5fd70e3cbb280e380ab97bdf9f02e1a363cb472fe0a297ac24029d"}, - {file = "pyzmq-26.0.0-cp312-cp312-win32.whl", hash = "sha256:c2e36399f0433b14a91f956bd7ecf94799c57a6f992889d45440cb05b3de8025"}, - {file = "pyzmq-26.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:12ca1afb065e5b21a32b1e35bfcbc8762efc0f7555c166acaec36c93b52d7ccf"}, - {file = "pyzmq-26.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:f66c925f62ce28946525c32a094e346dd8da6c828d568d7ecda97f5ae36089c3"}, - {file = "pyzmq-26.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e495ff09514fc657c5fb2cba0aac082ce0494c6217230783297da9008333a8db"}, - {file = "pyzmq-26.0.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5736c9a54c27319a65ffc72dbf684538f2773237e94ba50b7f1f74f4e3cb9115"}, - {file = "pyzmq-26.0.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd62830100b9b1adb51da4094142bd680d51daf9a0f6f3f39e1f80474eddc011"}, - {file = "pyzmq-26.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a7ee271fac41ddc0ba11f4b128ddd5f2bf0a3186d25be331ed8bfbb253536"}, - {file = "pyzmq-26.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:694625c2c22be57149e9439757ee02ee4fb6432f7054dc5008bbbc33ef388d1c"}, - {file = "pyzmq-26.0.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:90ba8f7c6f34c2c11179b293050417c14661035969ef3f8867200ea6901f9000"}, - {file = "pyzmq-26.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab2e55046263c8b24e64116e80b63cf701df747b44aadcf317aa47c8af2dfe67"}, - {file = "pyzmq-26.0.0-cp37-cp37m-win32.whl", hash = "sha256:7353d231686bbc96c458b934f134ff9165a1e9dd0a2ea8f724469e44bcc2c07a"}, - {file = "pyzmq-26.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1df2b992eabc59f078ca916e9ac8b5bd463536bf7828c13940b35b8555ed7861"}, - {file = "pyzmq-26.0.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2397364289334840c81ff1ef95a5a5ee326de01c1437cc38f7e16785a7b653d9"}, - {file = "pyzmq-26.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c952cf06edbbd2d67f627037e2c8e3187ca834d6b9a222e3a3037f80d393a345"}, - {file = "pyzmq-26.0.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:55f390adb763196d75a2e8c18277b4344f8a7f94f223b5d096324c5b47c2471e"}, - {file = "pyzmq-26.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1da5e11862a994360319df4f425e89662563683334e1079684eb77b9a6478ae2"}, - {file = "pyzmq-26.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72340614ea23904cff824109eb025648bdf32775d87f5814d3ba6f2335a853f3"}, - {file = "pyzmq-26.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa7431d12ebb5433a92e99dc326d45eaf52a90046032bac4c558b4bdeee5dc7a"}, - {file = "pyzmq-26.0.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a2b13008a693c0ffccaeeebcc5ab5f2398cced3b5bf482ba89a38fe56b00eb10"}, - {file = "pyzmq-26.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9d68284ce48617c97e675ed8a89db12a098eaa871a026999c9a10351f547f1fe"}, - {file = "pyzmq-26.0.0-cp38-cp38-win32.whl", hash = "sha256:8783857a8c8df648a70c81ea3ff53ee71e5bf18468ca5ac3414f419fe8f3bd93"}, - {file = "pyzmq-26.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:36d0f2fcbdba1fda8ff213bd17db7ddcba848aa70480ade3fe70401dce606511"}, - {file = "pyzmq-26.0.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:dd87df01bc8eca392f0d505924087ccafdc4885a498e68df9f09eca9fdc736f1"}, - {file = "pyzmq-26.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abc08b2e688714216870a6ab974733d4a1fcf0437d250ac8feed59c4c5c3f395"}, - {file = "pyzmq-26.0.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dd13a30454adcf2f361155ea563ec99036678131a17c6b1a3f74426212c14ddc"}, - {file = "pyzmq-26.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a0562054930471b386a44b0887504687c4e7adf4ba89bddc2e5959d16c371764"}, - {file = "pyzmq-26.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc7badded4b025dbc25f34b95503b71c952235e6e40de40995c0c120efb4ff6d"}, - {file = "pyzmq-26.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f971e77358384b8bcf3e9a7577cf84f97adbd6359f943e30cbff66087afcb279"}, - {file = "pyzmq-26.0.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ca4ebbef3f5fbd271eafc7c22ebbb88b74232f08b0e51759113f30a8d01f6843"}, - {file = "pyzmq-26.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc98fbd4ce4ef8a0fbe97ab6d495aaa7764461e5a45f24c04f1d234e7bb80293"}, - {file = "pyzmq-26.0.0-cp39-cp39-win32.whl", hash = "sha256:a5207bc2a923118e9afb57fee679be016ea138c27d1be5747118966e2d5d9450"}, - {file = "pyzmq-26.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:e0c08a6070358a2984900a4518e2dacbfaf24aac018ab086d7ac2f6069b13340"}, - {file = "pyzmq-26.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:eae3dcc185c405cf645480745c45346a1f42afce240f69a589095e41bd2b9e3d"}, - {file = "pyzmq-26.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:71a8f010e23dfd61c531084a2b72a81885017da28352540f0b7799ca8423c044"}, - {file = "pyzmq-26.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b48b7e417c56486932fb0c01fecd24916fe6bc359c03a654aa8c63fa33e3d76"}, - {file = "pyzmq-26.0.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2806942185b40a3477d9b300c6f71354dd2be37e3f61a43193c96caa51e284d1"}, - {file = "pyzmq-26.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed127aff75a3df142ae7a883c49a85b0b2f863b59fa1b8e4280335f5ebab5fd0"}, - {file = "pyzmq-26.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:903b77dd2f17286496fa3ec902bc523f4502b0c64a2892df4b021222a2ba95fe"}, - {file = "pyzmq-26.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:321a6872a9371709a62b3a4a14c1e9b5b47549371197c0c2164d2288510cd6d6"}, - {file = "pyzmq-26.0.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cac954dc83c84e9d9d65f2359d402d7e79ae094d7808d578c9e9cc2c350c5a64"}, - {file = "pyzmq-26.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac6f54c399638858e0b2a3153f23934604f3a8c9bb5a9cf865060cc658b1e096"}, - {file = "pyzmq-26.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40af30c4cd0a046029d7b5272d02a649f9b1f89fb1361bbc90ba08d55ac88273"}, - {file = "pyzmq-26.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:814245422f1c7707634397621dbcbeea7671fdc5c43d1ae592f4e0e45179e7fb"}, - {file = "pyzmq-26.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6d3d7ef786e778351e6c51b45906e16506ad98bb78b99304032cb1876dfc81d2"}, - {file = "pyzmq-26.0.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:36a85da0eab4c5337d0de7f975cca011208a59e9d0637e0c1b571764f1dd4a8f"}, - {file = "pyzmq-26.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d64889bfe4109f4a59a72b1d21416550465020642d6f556efd044951386bd38"}, - {file = "pyzmq-26.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80fdea3e9e34c480bfccbb910f75380196ae9d1c12880c21743c845ebe6b13aa"}, - {file = "pyzmq-26.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7129efc54dc48f566eed5422bc555ba4e472e40a1f9de328577c90ade47ccf5d"}, - {file = "pyzmq-26.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ec5147095d6065b0e3a38a1a34f7859ab46496f3d5ce71134165893e9f83674"}, - {file = "pyzmq-26.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a1cc0445038a394479ad36b7e3cf55a19ee40099c031f65de872b8ee7025e79"}, - {file = "pyzmq-26.0.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b377b520e618c30c827966c274dd62ce7e15c72ce8767fae6193b6bdd1deb502"}, - {file = "pyzmq-26.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc907b26d287e6981d1e531c8fc21a0f94fe46a17493a8322eb3c75f8b561334"}, - {file = "pyzmq-26.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:580dd4b1c2edd51f284df0209bf439899f425ed00cb803a85ddc6cf10c866688"}, - {file = "pyzmq-26.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:08db8071020181173c70cf2dad239e5e21e5b2e95f95b0ece0da39a70f5a483c"}, - {file = "pyzmq-26.0.0.tar.gz", hash = "sha256:10ff405db5cee3bbd7aa143d78b25d90356097aed7864e50f0ae644e08759fe9"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "quantile-python" -version = "1.1" -description = "Python Implementation of Graham Cormode and S. Muthukrishnan's Effective Computation of Biased Quantiles over Data Streams in ICDE'05" -optional = false -python-versions = "*" -files = [ - {file = "quantile-python-1.1.tar.gz", hash = "sha256:558629e88c497ef3b9b1081349c1ae6a61b53590e317724298ff54c674db7969"}, -] - -[[package]] -name = "ray" -version = "2.10.0" -description = "Ray provides a simple, universal API for building distributed applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "ray-2.10.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:8a174268c7b6ca9826e4884b837395b695a45c17049927965d1b4cc370184ba2"}, - {file = "ray-2.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c193deed7e3f604cdb37047f5646cab14f4337693dd32add8bc902dfadb89f75"}, - {file = "ray-2.10.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a3db89d22afc7a0a976249715dd90ffe69f7692d32cb599cd1afbc38482060f7"}, - {file = "ray-2.10.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:cb74f7d2aa5a21e5f9dcb315a4f9bde822328e76ba95cd0ba370cfda098a67f4"}, - {file = "ray-2.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:44ab600fe0b5a12675d0d42d564994ac4e53286217c4de1c4eb00d74ae79ef24"}, - {file = "ray-2.10.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:8eb606b7d247213b377ccca0f8d425f9c61a48b23e9b2e4566bc75f66d797bb5"}, - {file = "ray-2.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8eb11aec8a65946f7546d0e703158c03a85a8be27332dbbf86d9411802700e7e"}, - {file = "ray-2.10.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:5b4ec4b5707e18382685d0703ed04afd1602359a3056f6ae4b37588a0551eef3"}, - {file = "ray-2.10.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:c7d1438cba8726ec9a59c96964e007b60a0728436647f48c383228692c2f2ee0"}, - {file = "ray-2.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:eceecea4133e63f5d607cc9f2a4278de51eeeeef552f694895e381aae9ff8522"}, - {file = "ray-2.10.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:fb92f2d6d4eca602dfb0d3d459a09be59668e1560ce4bd89b692892f25b1933b"}, - {file = "ray-2.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:31aa60373fc7291752ee89a5f5ad8effec682b1f165911f38ae95fc43bc668a9"}, - {file = "ray-2.10.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:5b7d41eb04f6b67c38170edc0406dc71537eabfd6e5d4e3399a36385ff8b0194"}, - {file = "ray-2.10.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:8a44535e6266fa09e3eb4fc9035906decfc9f3aeda86fe66b1e738a01a51939a"}, - {file = "ray-2.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:77ba4120d694e7c3dc7d93a9d3cb33925827d04ad11af2d21fa0db66f227d27a"}, - {file = "ray-2.10.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:6b49a8c2b40f02a56a2af2b6026c1eedd485747c6e4c2cf9ac433af6e572bdbb"}, - {file = "ray-2.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5fe8fb8847304dd3a6e435b95af9e5436309f2b3612c63c56bf4ac8dea73f9f4"}, - {file = "ray-2.10.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f215eb704f2cb72e984d5a85fe435b4d74808c906950176789ba2101ce739082"}, - {file = "ray-2.10.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:32d97e5343578a3d37ab5f30148fa193dec46a21fa21f15b6f23fe48a420831a"}, - {file = "ray-2.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:917d081fc98500f244ebc0e8da836025e1e4fa52f21030b8336cb0a2c79e84e2"}, -] - -[package.dependencies] -aiosignal = "*" -click = ">=7.0" -filelock = "*" -frozenlist = "*" -jsonschema = "*" -msgpack = ">=1.0.0,<2.0.0" -packaging = "*" -protobuf = ">=3.15.3,<3.19.5 || >3.19.5" -pyyaml = "*" -requests = "*" - -[package.extras] -air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.10.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -client = ["grpcio (!=1.56.0)"] -cpp = ["ray-cpp (==2.10.0)"] -data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] -default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] -observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] -rllib = ["dm-tree", "fsspec", "gymnasium (==0.28.1)", "lz4", "pandas", "pyarrow (>=6.0.1)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "tensorboardX (>=1.9)", "typer"] -serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -train = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] -tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] - -[[package]] -name = "referencing" -version = "0.34.0" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"}, - {file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "regex" -version = "2023.12.25" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.7" -files = [ - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, - {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, - {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, - {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, - {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, - {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, - {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, - {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, - {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, - {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, - {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, - {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, - {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, - {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, - {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -description = "Pure python rfc3986 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, - {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, -] - -[[package]] -name = "rich" -version = "13.7.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rpds-py" -version = "0.18.0" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, - {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, - {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, - {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, - {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, - {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, - {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, - {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, - {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, - {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, - {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, - {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, - {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, - {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, - {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, - {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, - {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, - {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, - {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, - {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, - {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, -] - -[[package]] -name = "ruff" -version = "0.1.15" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, - {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, - {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, - {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, - {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, -] - -[[package]] -name = "safetensors" -version = "0.4.3" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "safetensors-0.4.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dcf5705cab159ce0130cd56057f5f3425023c407e170bca60b4868048bae64fd"}, - {file = "safetensors-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb4f8c5d0358a31e9a08daeebb68f5e161cdd4018855426d3f0c23bb51087055"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a5319ef409e7f88686a46607cbc3c428271069d8b770076feaf913664a07ac"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb9c65bd82f9ef3ce4970dc19ee86be5f6f93d032159acf35e663c6bea02b237"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edb5698a7bc282089f64c96c477846950358a46ede85a1c040e0230344fdde10"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efcc860be094b8d19ac61b452ec635c7acb9afa77beb218b1d7784c6d41fe8ad"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d88b33980222085dd6001ae2cad87c6068e0991d4f5ccf44975d216db3b57376"}, - {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5fc6775529fb9f0ce2266edd3e5d3f10aab068e49f765e11f6f2a63b5367021d"}, - {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9c6ad011c1b4e3acff058d6b090f1da8e55a332fbf84695cf3100c649cc452d1"}, - {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c496c5401c1b9c46d41a7688e8ff5b0310a3b9bae31ce0f0ae870e1ea2b8caf"}, - {file = "safetensors-0.4.3-cp310-none-win32.whl", hash = "sha256:38e2a8666178224a51cca61d3cb4c88704f696eac8f72a49a598a93bbd8a4af9"}, - {file = "safetensors-0.4.3-cp310-none-win_amd64.whl", hash = "sha256:393e6e391467d1b2b829c77e47d726f3b9b93630e6a045b1d1fca67dc78bf632"}, - {file = "safetensors-0.4.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:22f3b5d65e440cec0de8edaa672efa888030802e11c09b3d6203bff60ebff05a"}, - {file = "safetensors-0.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c4fa560ebd4522adddb71dcd25d09bf211b5634003f015a4b815b7647d62ebe"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9afd5358719f1b2cf425fad638fc3c887997d6782da317096877e5b15b2ce93"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8c5093206ef4b198600ae484230402af6713dab1bd5b8e231905d754022bec7"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0b2104df1579d6ba9052c0ae0e3137c9698b2d85b0645507e6fd1813b70931a"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cf18888606dad030455d18f6c381720e57fc6a4170ee1966adb7ebc98d4d6a3"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bf4f9d6323d9f86eef5567eabd88f070691cf031d4c0df27a40d3b4aaee755b"}, - {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:585c9ae13a205807b63bef8a37994f30c917ff800ab8a1ca9c9b5d73024f97ee"}, - {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faefeb3b81bdfb4e5a55b9bbdf3d8d8753f65506e1d67d03f5c851a6c87150e9"}, - {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:befdf0167ad626f22f6aac6163477fcefa342224a22f11fdd05abb3995c1783c"}, - {file = "safetensors-0.4.3-cp311-none-win32.whl", hash = "sha256:a7cef55929dcbef24af3eb40bedec35d82c3c2fa46338bb13ecf3c5720af8a61"}, - {file = "safetensors-0.4.3-cp311-none-win_amd64.whl", hash = "sha256:840b7ac0eff5633e1d053cc9db12fdf56b566e9403b4950b2dc85393d9b88d67"}, - {file = "safetensors-0.4.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:22d21760dc6ebae42e9c058d75aa9907d9f35e38f896e3c69ba0e7b213033856"}, - {file = "safetensors-0.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d22c1a10dff3f64d0d68abb8298a3fd88ccff79f408a3e15b3e7f637ef5c980"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1648568667f820b8c48317c7006221dc40aced1869908c187f493838a1362bc"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:446e9fe52c051aeab12aac63d1017e0f68a02a92a027b901c4f8e931b24e5397"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fef5d70683643618244a4f5221053567ca3e77c2531e42ad48ae05fae909f542"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a1f4430cc0c9d6afa01214a4b3919d0a029637df8e09675ceef1ca3f0dfa0df"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d603846a8585b9432a0fd415db1d4c57c0f860eb4aea21f92559ff9902bae4d"}, - {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a844cdb5d7cbc22f5f16c7e2a0271170750763c4db08381b7f696dbd2c78a361"}, - {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:88887f69f7a00cf02b954cdc3034ffb383b2303bc0ab481d4716e2da51ddc10e"}, - {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ee463219d9ec6c2be1d331ab13a8e0cd50d2f32240a81d498266d77d07b7e71e"}, - {file = "safetensors-0.4.3-cp312-none-win32.whl", hash = "sha256:d0dd4a1db09db2dba0f94d15addc7e7cd3a7b0d393aa4c7518c39ae7374623c3"}, - {file = "safetensors-0.4.3-cp312-none-win_amd64.whl", hash = "sha256:d14d30c25897b2bf19b6fb5ff7e26cc40006ad53fd4a88244fdf26517d852dd7"}, - {file = "safetensors-0.4.3-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d1456f814655b224d4bf6e7915c51ce74e389b413be791203092b7ff78c936dd"}, - {file = "safetensors-0.4.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:455d538aa1aae4a8b279344a08136d3f16334247907b18a5c3c7fa88ef0d3c46"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf476bca34e1340ee3294ef13e2c625833f83d096cfdf69a5342475602004f95"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02ef3a24face643456020536591fbd3c717c5abaa2737ec428ccbbc86dffa7a4"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7de32d0d34b6623bb56ca278f90db081f85fb9c5d327e3c18fd23ac64f465768"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a0deb16a1d3ea90c244ceb42d2c6c276059616be21a19ac7101aa97da448faf"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c59d51f182c729f47e841510b70b967b0752039f79f1de23bcdd86462a9b09ee"}, - {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f598b713cc1a4eb31d3b3203557ac308acf21c8f41104cdd74bf640c6e538e3"}, - {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5757e4688f20df083e233b47de43845d1adb7e17b6cf7da5f8444416fc53828d"}, - {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fe746d03ed8d193674a26105e4f0fe6c726f5bb602ffc695b409eaf02f04763d"}, - {file = "safetensors-0.4.3-cp37-none-win32.whl", hash = "sha256:0d5ffc6a80f715c30af253e0e288ad1cd97a3d0086c9c87995e5093ebc075e50"}, - {file = "safetensors-0.4.3-cp37-none-win_amd64.whl", hash = "sha256:a11c374eb63a9c16c5ed146457241182f310902bd2a9c18255781bb832b6748b"}, - {file = "safetensors-0.4.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1e31be7945f66be23f4ec1682bb47faa3df34cb89fc68527de6554d3c4258a4"}, - {file = "safetensors-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:03a4447c784917c9bf01d8f2ac5080bc15c41692202cd5f406afba16629e84d6"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d244bcafeb1bc06d47cfee71727e775bca88a8efda77a13e7306aae3813fa7e4"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53c4879b9c6bd7cd25d114ee0ef95420e2812e676314300624594940a8d6a91f"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74707624b81f1b7f2b93f5619d4a9f00934d5948005a03f2c1845ffbfff42212"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d52c958dc210265157573f81d34adf54e255bc2b59ded6218500c9b15a750eb"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f9568f380f513a60139971169c4a358b8731509cc19112369902eddb33faa4d"}, - {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d9cd8e1560dfc514b6d7859247dc6a86ad2f83151a62c577428d5102d872721"}, - {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:89f9f17b0dacb913ed87d57afbc8aad85ea42c1085bd5de2f20d83d13e9fc4b2"}, - {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1139eb436fd201c133d03c81209d39ac57e129f5e74e34bb9ab60f8d9b726270"}, - {file = "safetensors-0.4.3-cp38-none-win32.whl", hash = "sha256:d9c289f140a9ae4853fc2236a2ffc9a9f2d5eae0cb673167e0f1b8c18c0961ac"}, - {file = "safetensors-0.4.3-cp38-none-win_amd64.whl", hash = "sha256:622afd28968ef3e9786562d352659a37de4481a4070f4ebac883f98c5836563e"}, - {file = "safetensors-0.4.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8651c7299cbd8b4161a36cd6a322fa07d39cd23535b144d02f1c1972d0c62f3c"}, - {file = "safetensors-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e375d975159ac534c7161269de24ddcd490df2157b55c1a6eeace6cbb56903f0"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084fc436e317f83f7071fc6a62ca1c513b2103db325cd09952914b50f51cf78f"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41a727a7f5e6ad9f1db6951adee21bbdadc632363d79dc434876369a17de6ad6"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7dbbde64b6c534548696808a0e01276d28ea5773bc9a2dfb97a88cd3dffe3df"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbae3b4b9d997971431c346edbfe6e41e98424a097860ee872721e176040a893"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01e4b22e3284cd866edeabe4f4d896229495da457229408d2e1e4810c5187121"}, - {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dd37306546b58d3043eb044c8103a02792cc024b51d1dd16bd3dd1f334cb3ed"}, - {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8815b5e1dac85fc534a97fd339e12404db557878c090f90442247e87c8aeaea"}, - {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e011cc162503c19f4b1fd63dfcddf73739c7a243a17dac09b78e57a00983ab35"}, - {file = "safetensors-0.4.3-cp39-none-win32.whl", hash = "sha256:01feb3089e5932d7e662eda77c3ecc389f97c0883c4a12b5cfdc32b589a811c3"}, - {file = "safetensors-0.4.3-cp39-none-win_amd64.whl", hash = "sha256:3f9cdca09052f585e62328c1c2923c70f46814715c795be65f0b93f57ec98a02"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1b89381517891a7bb7d1405d828b2bf5d75528299f8231e9346b8eba092227f9"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cd6fff9e56df398abc5866b19a32124815b656613c1c5ec0f9350906fd798aac"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:840caf38d86aa7014fe37ade5d0d84e23dcfbc798b8078015831996ecbc206a3"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9650713b2cfa9537a2baf7dd9fee458b24a0aaaa6cafcea8bdd5fb2b8efdc34"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4119532cd10dba04b423e0f86aecb96cfa5a602238c0aa012f70c3a40c44b50"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e066e8861eef6387b7c772344d1fe1f9a72800e04ee9a54239d460c400c72aab"}, - {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:90964917f5b0fa0fa07e9a051fbef100250c04d150b7026ccbf87a34a54012e0"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c41e1893d1206aa7054029681778d9a58b3529d4c807002c156d58426c225173"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae7613a119a71a497d012ccc83775c308b9c1dab454806291427f84397d852fd"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9bac020faba7f5dc481e881b14b6425265feabb5bfc552551d21189c0eddc3"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:420a98f593ff9930f5822560d14c395ccbc57342ddff3b463bc0b3d6b1951550"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f5e6883af9a68c0028f70a4c19d5a6ab6238a379be36ad300a22318316c00cb0"}, - {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:cdd0a3b5da66e7f377474599814dbf5cbf135ff059cc73694de129b58a5e8a2c"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9bfb92f82574d9e58401d79c70c716985dc049b635fef6eecbb024c79b2c46ad"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3615a96dd2dcc30eb66d82bc76cda2565f4f7bfa89fcb0e31ba3cea8a1a9ecbb"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868ad1b6fc41209ab6bd12f63923e8baeb1a086814cb2e81a65ed3d497e0cf8f"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffba80aa49bd09195145a7fd233a7781173b422eeb995096f2b30591639517"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0acbe31340ab150423347e5b9cc595867d814244ac14218932a5cf1dd38eb39"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19bbdf95de2cf64f25cd614c5236c8b06eb2cfa47cbf64311f4b5d80224623a3"}, - {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b852e47eb08475c2c1bd8131207b405793bfc20d6f45aff893d3baaad449ed14"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d07cbca5b99babb692d76d8151bec46f461f8ad8daafbfd96b2fca40cadae65"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1ab6527a20586d94291c96e00a668fa03f86189b8a9defa2cdd34a1a01acc7d5"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02318f01e332cc23ffb4f6716e05a492c5f18b1d13e343c49265149396284a44"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec4b52ce9a396260eb9731eb6aea41a7320de22ed73a1042c2230af0212758ce"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:018b691383026a2436a22b648873ed11444a364324e7088b99cd2503dd828400"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:309b10dbcab63269ecbf0e2ca10ce59223bb756ca5d431ce9c9eeabd446569da"}, - {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b277482120df46e27a58082df06a15aebda4481e30a1c21eefd0921ae7e03f65"}, - {file = "safetensors-0.4.3.tar.gz", hash = "sha256:2f85fc50c4e07a21e95c24e07460fe6f7e2859d0ce88092838352b798ce711c2"}, -] - -[package.extras] -all = ["safetensors[jax]", "safetensors[numpy]", "safetensors[paddlepaddle]", "safetensors[pinned-tf]", "safetensors[quality]", "safetensors[testing]", "safetensors[torch]"] -dev = ["safetensors[all]"] -jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "safetensors[numpy]"] -mlx = ["mlx (>=0.0.9)"] -numpy = ["numpy (>=1.21.6)"] -paddlepaddle = ["paddlepaddle (>=2.4.1)", "safetensors[numpy]"] -pinned-tf = ["safetensors[numpy]", "tensorflow (==2.11.0)"] -quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] -tensorflow = ["safetensors[numpy]", "tensorflow (>=2.11.0)"] -testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "hypothesis (>=6.70.2)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "safetensors[numpy]", "setuptools-rust (>=1.5.2)"] -torch = ["safetensors[numpy]", "torch (>=1.10)"] - -[[package]] -name = "scikit-learn" -version = "1.4.0" -description = "A set of python modules for machine learning and data mining" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scikit-learn-1.4.0.tar.gz", hash = "sha256:d4373c984eba20e393216edd51a3e3eede56cbe93d4247516d205643c3b93121"}, - {file = "scikit_learn-1.4.0-1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fce93a7473e2f4ee4cc280210968288d6a7d7ad8dc6fa7bb7892145e407085f9"}, - {file = "scikit_learn-1.4.0-1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d77df3d1e15fc37a9329999979fa7868ba8655dbab21fe97fc7ddabac9e08cc7"}, - {file = "scikit_learn-1.4.0-1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2404659fedec40eeafa310cd14d613e564d13dbf8f3c752d31c095195ec05de6"}, - {file = "scikit_learn-1.4.0-1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e98632da8f6410e6fb6bf66937712c949b4010600ccd3f22a5388a83e610cc3c"}, - {file = "scikit_learn-1.4.0-1-cp310-cp310-win_amd64.whl", hash = "sha256:11b3b140f70fbc9f6a08884631ae8dd60a4bb2d7d6d1de92738ea42b740d8992"}, - {file = "scikit_learn-1.4.0-1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8341eabdc754d5ab91641a7763243845e96b6d68e03e472531e88a4f1b09f21"}, - {file = "scikit_learn-1.4.0-1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d1f6bce875ac2bb6b52514f67c185c564ccd299a05b65b7bab091a4c13dde12d"}, - {file = "scikit_learn-1.4.0-1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c408b46b2fd61952d519ea1af2f8f0a7a703e1433923ab1704c4131520b2083b"}, - {file = "scikit_learn-1.4.0-1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b465dd1dcd237b7b1dcd1a9048ccbf70a98c659474324fa708464c3a2533fad"}, - {file = "scikit_learn-1.4.0-1-cp311-cp311-win_amd64.whl", hash = "sha256:0db8e22c42f7980fe5eb22069b1f84c48966f3e0d23a01afde5999e3987a2501"}, - {file = "scikit_learn-1.4.0-1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e7eef6ea2ed289af40e88c0be9f7704ca8b5de18508a06897c3fe21e0905efdf"}, - {file = "scikit_learn-1.4.0-1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:349669b01435bc4dbf25c6410b0892073befdaec52637d1a1d1ff53865dc8db3"}, - {file = "scikit_learn-1.4.0-1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d439c584e58434d0350701bd33f6c10b309e851fccaf41c121aed55f6851d8cf"}, - {file = "scikit_learn-1.4.0-1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e2427d9ef46477625ab9b55c1882844fe6fc500f418c3f8e650200182457bc"}, - {file = "scikit_learn-1.4.0-1-cp312-cp312-win_amd64.whl", hash = "sha256:d3d75343940e7bf9b85c830c93d34039fa015eeb341c5c0b4cd7a90dadfe00d4"}, - {file = "scikit_learn-1.4.0-1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:76986d22e884ab062b1beecdd92379656e9d3789ecc1f9870923c178de55f9fe"}, - {file = "scikit_learn-1.4.0-1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:e22446ad89f1cb7657f0d849dcdc345b48e2d10afa3daf2925fdb740f85b714c"}, - {file = "scikit_learn-1.4.0-1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74812c9eabb265be69d738a8ea8d4884917a59637fcbf88a5f0e9020498bc6b3"}, - {file = "scikit_learn-1.4.0-1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad2a63e0dd386b92da3270887a29b308af4d7c750d8c4995dfd9a4798691bcc"}, - {file = "scikit_learn-1.4.0-1-cp39-cp39-win_amd64.whl", hash = "sha256:53b9e29177897c37e2ff9d4ba6ca12fdb156e22523e463db05def303f5c72b5c"}, - {file = "scikit_learn-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb8f044a8f5962613ce1feb4351d66f8d784bd072d36393582f351859b065f7d"}, - {file = "scikit_learn-1.4.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:a6372c90bbf302387792108379f1ec77719c1618d88496d0df30cb8e370b4661"}, - {file = "scikit_learn-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:785ce3c352bf697adfda357c3922c94517a9376002971bc5ea50896144bc8916"}, - {file = "scikit_learn-1.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0aba2a20d89936d6e72d95d05e3bf1db55bca5c5920926ad7b92c34f5e7d3bbe"}, - {file = "scikit_learn-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:2bac5d56b992f8f06816f2cd321eb86071c6f6d44bb4b1cb3d626525820d754b"}, - {file = "scikit_learn-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27ae4b0f1b2c77107c096a7e05b33458354107b47775428d1f11b23e30a73e8a"}, - {file = "scikit_learn-1.4.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5c5c62ffb52c3ffb755eb21fa74cc2cbf2c521bd53f5c04eaa10011dbecf5f80"}, - {file = "scikit_learn-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f0d2018ac6fa055dab65fe8a485967990d33c672d55bc254c56c35287b02fab"}, - {file = "scikit_learn-1.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a8918c415c4b4bf1d60c38d32958849a9191c2428ab35d30b78354085c7c7a"}, - {file = "scikit_learn-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:80a21de63275f8bcd7877b3e781679d2ff1eddfed515a599f95b2502a3283d42"}, - {file = "scikit_learn-1.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0f33bbafb310c26b81c4d41ecaebdbc1f63498a3f13461d50ed9a2e8f24d28e4"}, - {file = "scikit_learn-1.4.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:8b6ac1442ec714b4911e5aef8afd82c691b5c88b525ea58299d455acc4e8dcec"}, - {file = "scikit_learn-1.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05fc5915b716c6cc60a438c250108e9a9445b522975ed37e416d5ea4f9a63381"}, - {file = "scikit_learn-1.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:842b7d6989f3c574685e18da6f91223eb32301d0f93903dd399894250835a6f7"}, - {file = "scikit_learn-1.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:88bcb586fdff865372df1bc6be88bb7e6f9e0aa080dab9f54f5cac7eca8e2b6b"}, - {file = "scikit_learn-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f77674647dd31f56cb12ed13ed25b6ed43a056fffef051715022d2ebffd7a7d1"}, - {file = "scikit_learn-1.4.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:833999872e2920ce00f3a50839946bdac7539454e200eb6db54898a41f4bfd43"}, - {file = "scikit_learn-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:970ec697accaef10fb4f51763f3a7b1250f9f0553cf05514d0e94905322a0172"}, - {file = "scikit_learn-1.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:923d778f378ebacca2c672ab1740e5a413e437fb45ab45ab02578f8b689e5d43"}, - {file = "scikit_learn-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:1d041bc95006b545b59e458399e3175ab11ca7a03dc9a74a573ac891f5df1489"}, -] - -[package.dependencies] -joblib = ">=1.2.0" -numpy = ">=1.19.5" -scipy = ">=1.6.0" -threadpoolctl = ">=2.0.0" - -[package.extras] -benchmark = ["matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "pandas (>=1.1.5)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] -examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] -tests = ["black (>=23.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.19.12)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.17.2)"] - -[[package]] -name = "scipy" -version = "1.13.0" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, - {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, - {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, - {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, - {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, - {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, - {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, - {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, - {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, - {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, - {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, - {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, - {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, - {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, - {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, - {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, - {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, - {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, - {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, - {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, - {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, - {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, - {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, - {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, - {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, -] - -[package.dependencies] -numpy = ">=1.22.4,<2.3" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] -test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "send2trash" -version = "1.8.3" -description = "Send file to trash natively under Mac OS X, Windows and Linux" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, - {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, -] - -[package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] - -[[package]] -name = "sentencepiece" -version = "0.2.0" -description = "SentencePiece python wrapper" -optional = false -python-versions = "*" -files = [ - {file = "sentencepiece-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:188779e1298a1c8b8253c7d3ad729cb0a9891e5cef5e5d07ce4592c54869e227"}, - {file = "sentencepiece-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bed9cf85b296fa2b76fc2547b9cbb691a523864cebaee86304c43a7b4cb1b452"}, - {file = "sentencepiece-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d7b67e724bead13f18db6e1d10b6bbdc454af574d70efbb36f27d90387be1ca3"}, - {file = "sentencepiece-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fde4b08cfe237be4484c6c7c2e2c75fb862cfeab6bd5449ce4caeafd97b767a"}, - {file = "sentencepiece-0.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c378492056202d1c48a4979650981635fd97875a00eabb1f00c6a236b013b5e"}, - {file = "sentencepiece-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1380ce6540a368de2ef6d7e6ba14ba8f3258df650d39ba7d833b79ee68a52040"}, - {file = "sentencepiece-0.2.0-cp310-cp310-win32.whl", hash = "sha256:a1151d6a6dd4b43e552394aed0edfe9292820272f0194bd56c7c1660a0c06c3d"}, - {file = "sentencepiece-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:d490142b0521ef22bc1085f061d922a2a6666175bb6b42e588ff95c0db6819b2"}, - {file = "sentencepiece-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17982700c4f6dbb55fa3594f3d7e5dd1c8659a274af3738e33c987d2a27c9d5c"}, - {file = "sentencepiece-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7c867012c0e8bcd5bdad0f791609101cb5c66acb303ab3270218d6debc68a65e"}, - {file = "sentencepiece-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fd6071249c74f779c5b27183295b9202f8dedb68034e716784364443879eaa6"}, - {file = "sentencepiece-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f90c55a65013cbb8f4d7aab0599bf925cde4adc67ae43a0d323677b5a1c6cb"}, - {file = "sentencepiece-0.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b293734059ef656dcd65be62ff771507bea8fed0a711b6733976e1ed3add4553"}, - {file = "sentencepiece-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e58b47f933aca74c6a60a79dcb21d5b9e47416256c795c2d58d55cec27f9551d"}, - {file = "sentencepiece-0.2.0-cp311-cp311-win32.whl", hash = "sha256:c581258cf346b327c62c4f1cebd32691826306f6a41d8c4bec43b010dee08e75"}, - {file = "sentencepiece-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:0993dbc665f4113017892f1b87c3904a44d0640eda510abcacdfb07f74286d36"}, - {file = "sentencepiece-0.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ea5f536e32ea8ec96086ee00d7a4a131ce583a1b18d130711707c10e69601cb2"}, - {file = "sentencepiece-0.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0cb51f53b6aae3c36bafe41e86167c71af8370a039f542c43b0cce5ef24a68c"}, - {file = "sentencepiece-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3212121805afc58d8b00ab4e7dd1f8f76c203ddb9dc94aa4079618a31cf5da0f"}, - {file = "sentencepiece-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a3149e3066c2a75e0d68a43eb632d7ae728c7925b517f4c05c40f6f7280ce08"}, - {file = "sentencepiece-0.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:632f3594d3e7ac8b367bca204cb3fd05a01d5b21455acd097ea4c0e30e2f63d7"}, - {file = "sentencepiece-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f295105c6bdbb05bd5e1b0cafbd78ff95036f5d3641e7949455a3f4e5e7c3109"}, - {file = "sentencepiece-0.2.0-cp312-cp312-win32.whl", hash = "sha256:fb89f811e5efd18bab141afc3fea3de141c3f69f3fe9e898f710ae7fe3aab251"}, - {file = "sentencepiece-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a673a72aab81fef5ebe755c6e0cc60087d1f3a4700835d40537183c1703a45f"}, - {file = "sentencepiece-0.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4547683f330289ec4f093027bfeb87f9ef023b2eb6f879fdc4a8187c7e0ffb90"}, - {file = "sentencepiece-0.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd6175f7eaec7142d2bf6f6597ce7db4c9ac89acf93fcdb17410c3a8b781eeb"}, - {file = "sentencepiece-0.2.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:859ba1acde782609a0910a26a60e16c191a82bf39b5621107552c0cd79fad00f"}, - {file = "sentencepiece-0.2.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbbef6cc277f8f18f36959e305f10b1c620442d75addc79c21d7073ae581b50"}, - {file = "sentencepiece-0.2.0-cp36-cp36m-win32.whl", hash = "sha256:536b934e244829e3fe6c4f198652cd82da48adb9aa145c9f00889542726dee3d"}, - {file = "sentencepiece-0.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:0a91aaa3c769b52440df56fafda683b3aa48e3f2169cf7ee5b8c8454a7f3ae9b"}, - {file = "sentencepiece-0.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:787e480ca4c1d08c9985a7eb1eae4345c107729c99e9b5a9a00f2575fc7d4b4b"}, - {file = "sentencepiece-0.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4d158189eb2ecffea3a51edf6d25e110b3678ec47f1a40f2d541eafbd8f6250"}, - {file = "sentencepiece-0.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1e5ca43013e8935f25457a4fca47e315780172c3e821b4b13a890668911c792"}, - {file = "sentencepiece-0.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7140d9e5a74a0908493bb4a13f1f16a401297bd755ada4c707e842fbf6f0f5bf"}, - {file = "sentencepiece-0.2.0-cp37-cp37m-win32.whl", hash = "sha256:6cf333625234f247ab357b0bd9836638405ea9082e1543d5b8408f014979dcbf"}, - {file = "sentencepiece-0.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ff88712338b01031910e8e61e7239aff3ce8869ee31a47df63cb38aadd591bea"}, - {file = "sentencepiece-0.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20813a68d4c221b1849c62c30e1281ea81687894d894b8d4a0f4677d9311e0f5"}, - {file = "sentencepiece-0.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:926ef920ae2e8182db31d3f5d081ada57804e3e1d3a8c4ef8b117f9d9fb5a945"}, - {file = "sentencepiece-0.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:89f65f69636b7e9c015b79dff9c9985a9bc7d19ded6f79ef9f1ec920fdd73ecf"}, - {file = "sentencepiece-0.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f67eae0dbe6f2d7d6ba50a354623d787c99965f068b81e145d53240198021b0"}, - {file = "sentencepiece-0.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:98501e075f35dd1a1d5a20f65be26839fcb1938752ec61539af008a5aa6f510b"}, - {file = "sentencepiece-0.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d1d2cc4882e8d6a1adf9d5927d7716f80617fc693385661caff21888972269"}, - {file = "sentencepiece-0.2.0-cp38-cp38-win32.whl", hash = "sha256:b99a308a2e5e569031ab164b74e6fab0b6f37dfb493c32f7816225f4d411a6dd"}, - {file = "sentencepiece-0.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:cdb701eec783d3ec86b7cd4c763adad8eaf6b46db37ee1c36e5e6c44b3fe1b5f"}, - {file = "sentencepiece-0.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1e0f9c4d0a6b0af59b613175f019916e28ade076e21242fd5be24340d8a2f64a"}, - {file = "sentencepiece-0.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:298f21cc1366eb60311aedba3169d30f885c363ddbf44214b0a587d2908141ad"}, - {file = "sentencepiece-0.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f1ec95aa1e5dab11f37ac7eff190493fd87770f7a8b81ebc9dd768d1a3c8704"}, - {file = "sentencepiece-0.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b06b70af54daa4b4904cbb90b4eb6d35c9f3252fdc86c9c32d5afd4d30118d8"}, - {file = "sentencepiece-0.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22e37bac44dd6603388cb598c64ff7a76e41ca774646f21c23aadfbf5a2228ab"}, - {file = "sentencepiece-0.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0461324897735512a32d222e3d886e24ad6a499761952b6bda2a9ee6e4313ea5"}, - {file = "sentencepiece-0.2.0-cp39-cp39-win32.whl", hash = "sha256:38aed822fb76435fa1f12185f10465a94ab9e51d5e8a9159e9a540ce926f0ffd"}, - {file = "sentencepiece-0.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:d8cf876516548b5a1d6ac4745d8b554f5c07891d55da557925e5c13ff0b4e6ad"}, - {file = "sentencepiece-0.2.0.tar.gz", hash = "sha256:a52c19171daaf2e697dc6cbe67684e0fa341b1248966f6aebb541de654d15843"}, -] - -[[package]] -name = "sentry-sdk" -version = "1.45.0" -description = "Python client for Sentry (https://sentry.io)" -optional = false -python-versions = "*" -files = [ - {file = "sentry-sdk-1.45.0.tar.gz", hash = "sha256:509aa9678c0512344ca886281766c2e538682f8acfa50fd8d405f8c417ad0625"}, - {file = "sentry_sdk-1.45.0-py2.py3-none-any.whl", hash = "sha256:1ce29e30240cc289a027011103a8c83885b15ef2f316a60bcc7c5300afa144f1"}, -] - -[package.dependencies] -certifi = "*" -fastapi = {version = ">=0.79.0", optional = true, markers = "extra == \"fastapi\""} -urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -arq = ["arq (>=0.23)"] -asyncpg = ["asyncpg (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -celery-redbeat = ["celery-redbeat (>=2)"] -chalice = ["chalice (>=1.16.0)"] -clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -loguru = ["loguru (>=0.5)"] -openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] -pure-eval = ["asttokens", "executing", "pure-eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=5)"] - -[[package]] -name = "sigtools" -version = "4.0.1" -description = "Utilities for working with inspect.Signature objects." -optional = false -python-versions = ">=3.6" -files = [ - {file = "sigtools-4.0.1-py2.py3-none-any.whl", hash = "sha256:d216b4cf920bbab0fce636ddc429ed8463a5b533d9e1492acb45a2a1bc36ac6c"}, - {file = "sigtools-4.0.1.tar.gz", hash = "sha256:4b8e135a9cd4d2ea00da670c093372d74e672ba3abb87f4c98d8e73dea54445c"}, -] - -[package.dependencies] -attrs = "*" - -[package.extras] -test = ["coverage", "mock", "repeated-test (>=2.2.1)", "sphinx"] -tests = ["coverage", "mock", "repeated-test (>=2.2.1)", "sphinx"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "soupsieve" -version = "2.5" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "starlette" -version = "0.37.2" -description = "The little ASGI library that shines." -optional = false -python-versions = ">=3.8" -files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, -] - -[package.dependencies] -anyio = ">=3.4.0,<5" - -[package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] - -[[package]] -name = "starlette-exporter" -version = "0.17.1" -description = "Prometheus metrics exporter for Starlette applications." -optional = false -python-versions = "*" -files = [ - {file = "starlette_exporter-0.17.1-py3-none-any.whl", hash = "sha256:40f8667a6cf6c569c4eb090a7cc0bbd426ff5a732b8fea232100064cec8256d0"}, - {file = "starlette_exporter-0.17.1.tar.gz", hash = "sha256:c9979c87efdef956e880075f4358deec234208c62d71448a58b7a3a45056209d"}, -] - -[package.dependencies] -prometheus-client = ">=0.12" -starlette = "*" - -[[package]] -name = "sympy" -version = "1.12" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, - {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, -] - -[package.dependencies] -mpmath = ">=0.19" - -[[package]] -name = "synchronicity" -version = "0.6.7" -description = "Export blocking and async library versions from a single async implementation" -optional = false -python-versions = ">=3.8" -files = [ - {file = "synchronicity-0.6.7-py3-none-any.whl", hash = "sha256:188397ea96ebbff20d7990259af30f7679e04aebccb89e14eb6cbc3243543f4b"}, -] - -[package.dependencies] -sigtools = "4.0.1" -typing-extensions = ">=4.6" - -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -name = "termcolor" -version = "2.4.0" -description = "ANSI color formatting for output in terminal" -optional = false -python-versions = ">=3.8" -files = [ - {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, - {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, -] - -[package.extras] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "terminado" -version = "0.18.1" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, - {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, -] - -[package.dependencies] -ptyprocess = {version = "*", markers = "os_name != \"nt\""} -pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} -tornado = ">=6.1.0" - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] -typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] - -[[package]] -name = "threadpoolctl" -version = "3.4.0" -description = "threadpoolctl" -optional = false -python-versions = ">=3.8" -files = [ - {file = "threadpoolctl-3.4.0-py3-none-any.whl", hash = "sha256:8f4c689a65b23e5ed825c8436a92b818aac005e0f3715f6a1664d7c7ee29d262"}, - {file = "threadpoolctl-3.4.0.tar.gz", hash = "sha256:f11b491a03661d6dd7ef692dd422ab34185d982466c49c8f98c8f716b5c93196"}, -] - -[[package]] -name = "tinycss2" -version = "1.2.1" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, - {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] - -[[package]] -name = "tokenizers" -version = "0.15.2" -description = "" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, - {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, - {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, - {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, - {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, - {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, - {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, - {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, - {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, - {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, - {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, - {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, - {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, - {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, - {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, - {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, - {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, - {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, - {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, - {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, - {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, - {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, - {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, - {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, - {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, - {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, - {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, - {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, - {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, - {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, - {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, - {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, - {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, - {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, - {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, - {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, - {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, - {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, - {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, - {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, - {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, - {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, - {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, - {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, - {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, - {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, - {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, - {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, - {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, - {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, - {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, - {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, - {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, - {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, - {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, - {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, - {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, - {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, - {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, - {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, - {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, - {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, - {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, - {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, - {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, - {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, - {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, - {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, - {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, - {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, - {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, - {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, - {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, - {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, - {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, - {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, - {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, - {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, - {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, - {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, - {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, - {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, - {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, - {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, - {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, - {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, - {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, - {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, - {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, - {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, - {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, - {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, - {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, - {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, - {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, - {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, - {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, - {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, - {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, - {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, - {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, - {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, - {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, - {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, - {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, - {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, - {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, - {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, - {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, - {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, -] - -[package.dependencies] -huggingface_hub = ">=0.16.4,<1.0" - -[package.extras] -dev = ["tokenizers[testing]"] -docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] -testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "torch" -version = "2.1.2" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "torch-2.1.2-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:3a871edd6c02dae77ad810335c0833391c1a4ce49af21ea8cf0f6a5d2096eea8"}, - {file = "torch-2.1.2-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:bef6996c27d8f6e92ea4e13a772d89611da0e103b48790de78131e308cf73076"}, - {file = "torch-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:0e13034fd5fb323cbbc29e56d0637a3791e50dd589616f40c79adfa36a5a35a1"}, - {file = "torch-2.1.2-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:d9b535cad0df3d13997dbe8bd68ac33e0e3ae5377639c9881948e40794a61403"}, - {file = "torch-2.1.2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:f9a55d55af02826ebfbadf4e9b682f0f27766bc33df8236b48d28d705587868f"}, - {file = "torch-2.1.2-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:a6ebbe517097ef289cc7952783588c72de071d4b15ce0f8b285093f0916b1162"}, - {file = "torch-2.1.2-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:8f32ce591616a30304f37a7d5ea80b69ca9e1b94bba7f308184bf616fdaea155"}, - {file = "torch-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e0ee6cf90c8970e05760f898d58f9ac65821c37ffe8b04269ec787aa70962b69"}, - {file = "torch-2.1.2-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:76d37967c31c99548ad2c4d3f2cf191db48476f2e69b35a0937137116da356a1"}, - {file = "torch-2.1.2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:e2d83f07b4aac983453ea5bf8f9aa9dacf2278a8d31247f5d9037f37befc60e4"}, - {file = "torch-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f41fe0c7ecbf903a568c73486139a75cfab287a0f6c17ed0698fdea7a1e8641d"}, - {file = "torch-2.1.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e3225f47d50bb66f756fe9196a768055d1c26b02154eb1f770ce47a2578d3aa7"}, - {file = "torch-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33d59cd03cb60106857f6c26b36457793637512998666ee3ce17311f217afe2b"}, - {file = "torch-2.1.2-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:8e221deccd0def6c2badff6be403e0c53491805ed9915e2c029adbcdb87ab6b5"}, - {file = "torch-2.1.2-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:05b18594f60a911a0c4f023f38a8bda77131fba5fd741bda626e97dcf5a3dd0a"}, - {file = "torch-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:9ca96253b761e9aaf8e06fb30a66ee301aecbf15bb5a303097de1969077620b6"}, - {file = "torch-2.1.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d93ba70f67b08c2ae5598ee711cbc546a1bc8102cef938904b8c85c2089a51a0"}, - {file = "torch-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:255b50bc0608db177e6a3cc118961d77de7e5105f07816585fa6f191f33a9ff3"}, - {file = "torch-2.1.2-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6984cd5057c0c977b3c9757254e989d3f1124f4ce9d07caa6cb637783c71d42a"}, - {file = "torch-2.1.2-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:bc195d7927feabc0eb7c110e457c955ed2ab616f3c7c28439dd4188cf589699f"}, -] - -[package.dependencies] -filelock = "*" -fsspec = "*" -jinja2 = "*" -networkx = "*" -nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cudnn-cu12 = {version = "8.9.2.26", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.18.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -sympy = "*" -triton = {version = "2.1.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -typing-extensions = "*" - -[package.extras] -dynamo = ["jinja2"] -opt-einsum = ["opt-einsum (>=3.3)"] - -[[package]] -name = "tornado" -version = "6.4" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = false -python-versions = ">= 3.8" -files = [ - {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, - {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, - {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, - {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, - {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, -] - -[[package]] -name = "tqdm" -version = "4.66.2" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, - {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "traitlets" -version = "5.14.2" -description = "Traitlets Python configuration system" -optional = false -python-versions = ">=3.8" -files = [ - {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, - {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, -] - -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] - -[[package]] -name = "transformers" -version = "4.39.3" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "transformers-4.39.3-py3-none-any.whl", hash = "sha256:7838034a12cca3168247f9d2d1dba6724c9de3ae0f73a108258c6b8fc5912601"}, - {file = "transformers-4.39.3.tar.gz", hash = "sha256:2586e5ff4150f122716fc40f5530e92871befc051848fbe82600969c535b762d"}, -] - -[package.dependencies] -filelock = "*" -huggingface-hub = ">=0.19.3,<1.0" -numpy = ">=1.17" -packaging = ">=20.0" -pyyaml = ">=5.1" -regex = "!=2019.12.17" -requests = "*" -safetensors = ">=0.4.1" -tokenizers = ">=0.14,<0.19" -tqdm = ">=4.27" - -[package.extras] -accelerate = ["accelerate (>=0.21.0)"] -agents = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch"] -all = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.21.0)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.21.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.14,<0.19)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "beautifulsoup4", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "ray[tune] (>=2.7.0)", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorboard", "timeout-decorator", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow (>=10.0.1,<=15.0)", "accelerate (>=0.21.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune] (>=2.7.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx", "timm", "tokenizers (>=0.14,<0.19)", "torch", "torchaudio", "torchvision"] -docs-specific = ["hf-doc-builder"] -flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -ftfy = ["ftfy"] -integrations = ["optuna", "ray[tune] (>=2.7.0)", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.6,<0.15.0)"] -onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] -onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (==0.1.5)", "urllib3 (<2.0.0)"] -ray = ["ray[tune] (>=2.7.0)"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] -sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic", "starlette", "uvicorn"] -sigopt = ["sigopt"] -sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pydantic", "pytest (>=7.2.0,<8.0.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (==0.1.5)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "tensorboard", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.16)", "tensorflow-text (<2.16)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] -timm = ["timm"] -tokenizers = ["tokenizers (>=0.14,<0.19)"] -torch = ["accelerate (>=0.21.0)", "torch"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow (>=10.0.1,<=15.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.19.3,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.14,<0.19)", "torch", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow (>=10.0.1,<=15.0)"] - -[[package]] -name = "triton" -version = "2.1.0" -description = "A language and compiler for custom Deep Learning operations" -optional = false -python-versions = "*" -files = [ - {file = "triton-2.1.0-0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:66439923a30d5d48399b08a9eae10370f6c261a5ec864a64983bae63152d39d7"}, - {file = "triton-2.1.0-0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:919b06453f0033ea52c13eaf7833de0e57db3178d23d4e04f9fc71c4f2c32bf8"}, - {file = "triton-2.1.0-0-cp37-cp37m-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ae4bb8a91de790e1866405211c4d618379781188f40d5c4c399766914e84cd94"}, - {file = "triton-2.1.0-0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39f6fb6bdccb3e98f3152e3fbea724f1aeae7d749412bbb1fa9c441d474eba26"}, - {file = "triton-2.1.0-0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21544e522c02005a626c8ad63d39bdff2f31d41069592919ef281e964ed26446"}, - {file = "triton-2.1.0-0-pp37-pypy37_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:143582ca31dd89cd982bd3bf53666bab1c7527d41e185f9e3d8a3051ce1b663b"}, - {file = "triton-2.1.0-0-pp38-pypy38_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82fc5aeeedf6e36be4e4530cbdcba81a09d65c18e02f52dc298696d45721f3bd"}, - {file = "triton-2.1.0-0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:81a96d110a738ff63339fc892ded095b31bd0d205e3aace262af8400d40b6fa8"}, -] - -[package.dependencies] -filelock = "*" - -[package.extras] -build = ["cmake (>=3.18)", "lit"] -tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)"] -tutorials = ["matplotlib", "pandas", "tabulate"] - -[[package]] -name = "typer" -version = "0.9.4" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = false -python-versions = ">=3.6" -files = [ - {file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"}, - {file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"}, -] - -[package.dependencies] -click = ">=7.1.1,<9.0.0" -typing-extensions = ">=3.7.4.3" - -[package.extras] -all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] - -[[package]] -name = "types-certifi" -version = "2021.10.8.3" -description = "Typing stubs for certifi" -optional = false -python-versions = "*" -files = [ - {file = "types-certifi-2021.10.8.3.tar.gz", hash = "sha256:72cf7798d165bc0b76e1c10dd1ea3097c7063c42c21d664523b928e88b554a4f"}, - {file = "types_certifi-2021.10.8.3-py3-none-any.whl", hash = "sha256:b2d1e325e69f71f7c78e5943d410e650b4707bb0ef32e4ddf3da37f54176e88a"}, -] - -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20240316" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, - {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, -] - -[[package]] -name = "types-toml" -version = "0.10.8.20240310" -description = "Typing stubs for toml" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331"}, - {file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"}, -] - -[[package]] -name = "typing-extensions" -version = "4.11.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "uri-template" -version = "1.3.0" -description = "RFC 6570 URI Template Processor" -optional = false -python-versions = ">=3.7" -files = [ - {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, - {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, -] - -[package.extras] -dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] - -[[package]] -name = "urllib3" -version = "2.2.1" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "uvicorn" -version = "0.29.0" -description = "The lightning-fast ASGI server." -optional = false -python-versions = ">=3.8" -files = [ - {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, - {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, -] - -[package.dependencies] -click = ">=7.0" -colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} -h11 = ">=0.8" -httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} -python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} -watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} -websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} - -[package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] - -[[package]] -name = "uvloop" -version = "0.19.0" -description = "Fast implementation of asyncio event loop on top of libuv" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, - {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, - {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, - {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, - {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, - {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, - {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, - {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, - {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, - {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, - {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, - {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, - {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, - {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, - {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, - {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, - {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, -] - -[package.extras] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] - -[[package]] -name = "vllm" -version = "0.3.3" -description = "A high-throughput and memory-efficient inference and serving engine for LLMs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "vllm-0.3.3-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:a49f2de8088b36d74f0a29cfaf0d23c09ac01b707aff1a6b606c0ba991074285"}, - {file = "vllm-0.3.3-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:12676b8fae922fff52bdb029b5295692a1393be800e5eec8fe3ee8eef7c080fd"}, - {file = "vllm-0.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fdbe8d3727312472159921ace712007749600ca557860d03e16a52ac1127c163"}, - {file = "vllm-0.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:128d5906cf10c66c0fc8553344766cbfede6d924ae4c02589d6b66ab556b954a"}, - {file = "vllm-0.3.3.tar.gz", hash = "sha256:24b70159bbcfd441bfa9d3e226ba8f5db74837c5325fea4a2104cf46c5d8246e"}, -] - -[package.dependencies] -cupy-cuda12x = "12.1.0" -fastapi = "*" -ninja = "*" -numpy = "*" -outlines = ">=0.0.27" -prometheus-client = ">=0.18.0" -psutil = "*" -pydantic = ">=2.0" -pynvml = "11.5.0" -ray = ">=2.9" -sentencepiece = "*" -torch = "2.1.2" -transformers = ">=4.38.0" -triton = ">=2.1.0" -uvicorn = {version = "*", extras = ["standard"]} -xformers = "0.0.23.post1" - -[[package]] -name = "watchfiles" -version = "0.21.0" -description = "Simple, modern and high performance file watching and code reload in python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "watchfiles-0.21.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:27b4035013f1ea49c6c0b42d983133b136637a527e48c132d368eb19bf1ac6aa"}, - {file = "watchfiles-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c81818595eff6e92535ff32825f31c116f867f64ff8cdf6562cd1d6b2e1e8f3e"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c107ea3cf2bd07199d66f156e3ea756d1b84dfd43b542b2d870b77868c98c03"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d9ac347653ebd95839a7c607608703b20bc07e577e870d824fa4801bc1cb124"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5eb86c6acb498208e7663ca22dbe68ca2cf42ab5bf1c776670a50919a56e64ab"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f564bf68404144ea6b87a78a3f910cc8de216c6b12a4cf0b27718bf4ec38d303"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d0f32ebfaa9c6011f8454994f86108c2eb9c79b8b7de00b36d558cadcedaa3d"}, - {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d45d9b699ecbac6c7bd8e0a2609767491540403610962968d258fd6405c17c"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aff06b2cac3ef4616e26ba17a9c250c1fe9dd8a5d907d0193f84c499b1b6e6a9"}, - {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d9792dff410f266051025ecfaa927078b94cc7478954b06796a9756ccc7e14a9"}, - {file = "watchfiles-0.21.0-cp310-none-win32.whl", hash = "sha256:214cee7f9e09150d4fb42e24919a1e74d8c9b8a9306ed1474ecaddcd5479c293"}, - {file = "watchfiles-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:1ad7247d79f9f55bb25ab1778fd47f32d70cf36053941f07de0b7c4e96b5d235"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:668c265d90de8ae914f860d3eeb164534ba2e836811f91fecc7050416ee70aa7"}, - {file = "watchfiles-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a23092a992e61c3a6a70f350a56db7197242f3490da9c87b500f389b2d01eef"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e7941bbcfdded9c26b0bf720cb7e6fd803d95a55d2c14b4bd1f6a2772230c586"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11cd0c3100e2233e9c53106265da31d574355c288e15259c0d40a4405cbae317"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78f30cbe8b2ce770160d3c08cff01b2ae9306fe66ce899b73f0409dc1846c1b"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6674b00b9756b0af620aa2a3346b01f8e2a3dc729d25617e1b89cf6af4a54eb1"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd7ac678b92b29ba630d8c842d8ad6c555abda1b9ef044d6cc092dacbfc9719d"}, - {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c873345680c1b87f1e09e0eaf8cf6c891b9851d8b4d3645e7efe2ec20a20cc7"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49f56e6ecc2503e7dbe233fa328b2be1a7797d31548e7a193237dcdf1ad0eee0"}, - {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:02d91cbac553a3ad141db016e3350b03184deaafeba09b9d6439826ee594b365"}, - {file = "watchfiles-0.21.0-cp311-none-win32.whl", hash = "sha256:ebe684d7d26239e23d102a2bad2a358dedf18e462e8808778703427d1f584400"}, - {file = "watchfiles-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:4566006aa44cb0d21b8ab53baf4b9c667a0ed23efe4aaad8c227bfba0bf15cbe"}, - {file = "watchfiles-0.21.0-cp311-none-win_arm64.whl", hash = "sha256:c550a56bf209a3d987d5a975cdf2063b3389a5d16caf29db4bdddeae49f22078"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:51ddac60b96a42c15d24fbdc7a4bfcd02b5a29c047b7f8bf63d3f6f5a860949a"}, - {file = "watchfiles-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:511f0b034120cd1989932bf1e9081aa9fb00f1f949fbd2d9cab6264916ae89b1"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb92d49dbb95ec7a07511bc9efb0faff8fe24ef3805662b8d6808ba8409a71a"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f92944efc564867bbf841c823c8b71bb0be75e06b8ce45c084b46411475a915"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:642d66b75eda909fd1112d35c53816d59789a4b38c141a96d62f50a3ef9b3360"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d23bcd6c8eaa6324fe109d8cac01b41fe9a54b8c498af9ce464c1aeeb99903d6"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18d5b4da8cf3e41895b34e8c37d13c9ed294954907929aacd95153508d5d89d7"}, - {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8d1eae0f65441963d805f766c7e9cd092f91e0c600c820c764a4ff71a0764c"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1fd9a5205139f3c6bb60d11f6072e0552f0a20b712c85f43d42342d162be1235"}, - {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a1e3014a625bcf107fbf38eece0e47fa0190e52e45dc6eee5a8265ddc6dc5ea7"}, - {file = "watchfiles-0.21.0-cp312-none-win32.whl", hash = "sha256:9d09869f2c5a6f2d9df50ce3064b3391d3ecb6dced708ad64467b9e4f2c9bef3"}, - {file = "watchfiles-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:18722b50783b5e30a18a8a5db3006bab146d2b705c92eb9a94f78c72beb94094"}, - {file = "watchfiles-0.21.0-cp312-none-win_arm64.whl", hash = "sha256:a3b9bec9579a15fb3ca2d9878deae789df72f2b0fdaf90ad49ee389cad5edab6"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:4ea10a29aa5de67de02256a28d1bf53d21322295cb00bd2d57fcd19b850ebd99"}, - {file = "watchfiles-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:40bca549fdc929b470dd1dbfcb47b3295cb46a6d2c90e50588b0a1b3bd98f429"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b37a7ba223b2f26122c148bb8d09a9ff312afca998c48c725ff5a0a632145f7"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec8c8900dc5c83650a63dd48c4d1d245343f904c4b64b48798c67a3767d7e165"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ad3fe0a3567c2f0f629d800409cd528cb6251da12e81a1f765e5c5345fd0137"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d353c4cfda586db2a176ce42c88f2fc31ec25e50212650c89fdd0f560ee507b"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83a696da8922314ff2aec02987eefb03784f473281d740bf9170181829133765"}, - {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a03651352fc20975ee2a707cd2d74a386cd303cc688f407296064ad1e6d1562"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ad692bc7792be8c32918c699638b660c0de078a6cbe464c46e1340dadb94c19"}, - {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06247538e8253975bdb328e7683f8515ff5ff041f43be6c40bff62d989b7d0b0"}, - {file = "watchfiles-0.21.0-cp38-none-win32.whl", hash = "sha256:9a0aa47f94ea9a0b39dd30850b0adf2e1cd32a8b4f9c7aa443d852aacf9ca214"}, - {file = "watchfiles-0.21.0-cp38-none-win_amd64.whl", hash = "sha256:8d5f400326840934e3507701f9f7269247f7c026d1b6cfd49477d2be0933cfca"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f762a1a85a12cc3484f77eee7be87b10f8c50b0b787bb02f4e357403cad0c0e"}, - {file = "watchfiles-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e9be3ef84e2bb9710f3f777accce25556f4a71e15d2b73223788d528fcc2052"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c48a10d17571d1275701e14a601e36959ffada3add8cdbc9e5061a6e3579a5d"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c889025f59884423428c261f212e04d438de865beda0b1e1babab85ef4c0f01"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66fac0c238ab9a2e72d026b5fb91cb902c146202bbd29a9a1a44e8db7b710b6f"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a21f71885aa2744719459951819e7bf5a906a6448a6b2bbce8e9cc9f2c8128"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c9198c989f47898b2c22201756f73249de3748e0fc9de44adaf54a8b259cc0c"}, - {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f57c4461cd24fda22493109c45b3980863c58a25b8bec885ca8bea6b8d4b28"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853853cbf7bf9408b404754b92512ebe3e3a83587503d766d23e6bf83d092ee6"}, - {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d5b1dc0e708fad9f92c296ab2f948af403bf201db8fb2eb4c8179db143732e49"}, - {file = "watchfiles-0.21.0-cp39-none-win32.whl", hash = "sha256:59137c0c6826bd56c710d1d2bda81553b5e6b7c84d5a676747d80caf0409ad94"}, - {file = "watchfiles-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:6cb8fdc044909e2078c248986f2fc76f911f72b51ea4a4fbbf472e01d14faa58"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab03a90b305d2588e8352168e8c5a1520b721d2d367f31e9332c4235b30b8994"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:927c589500f9f41e370b0125c12ac9e7d3a2fd166b89e9ee2828b3dda20bfe6f"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd467213195e76f838caf2c28cd65e58302d0254e636e7c0fca81efa4a2e62c"}, - {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02b73130687bc3f6bb79d8a170959042eb56eb3a42df3671c79b428cd73f17cc"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:08dca260e85ffae975448e344834d765983237ad6dc308231aa16e7933db763e"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ccceb50c611c433145502735e0370877cced72a6c70fd2410238bcbc7fe51d8"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57d430f5fb63fea141ab71ca9c064e80de3a20b427ca2febcbfcef70ff0ce895"}, - {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd5fad9b9c0dd89904bbdea978ce89a2b692a7ee8a0ce19b940e538c88a809c"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:be6dd5d52b73018b21adc1c5d28ac0c68184a64769052dfeb0c5d9998e7f56a2"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b3cab0e06143768499384a8a5efb9c4dc53e19382952859e4802f294214f36ec"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6ed10c2497e5fedadf61e465b3ca12a19f96004c15dcffe4bd442ebadc2d85"}, - {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43babacef21c519bc6631c5fce2a61eccdfc011b4bcb9047255e9620732c8097"}, - {file = "watchfiles-0.21.0.tar.gz", hash = "sha256:c76c635fabf542bb78524905718c39f736a98e5ab25b23ec6d4abede1a85a6a3"}, -] - -[package.dependencies] -anyio = ">=3.0.0" - -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "webcolors" -version = "1.13" -description = "A library for working with the color formats defined by HTML and CSS." -optional = false -python-versions = ">=3.7" -files = [ - {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, - {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, -] - -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["pytest", "pytest-cov"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "websocket-client" -version = "1.7.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, - {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "websockets" -version = "12.0" -description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, -] - -[[package]] -name = "widgetsnbextension" -version = "4.0.10" -description = "Jupyter interactive widgets for Jupyter Notebook" -optional = false -python-versions = ">=3.7" -files = [ - {file = "widgetsnbextension-4.0.10-py3-none-any.whl", hash = "sha256:d37c3724ec32d8c48400a435ecfa7d3e259995201fbefa37163124a9fcb393cc"}, - {file = "widgetsnbextension-4.0.10.tar.gz", hash = "sha256:64196c5ff3b9a9183a8e699a4227fb0b7002f252c814098e66c4d1cd0644688f"}, -] - -[[package]] -name = "xformers" -version = "0.0.23.post1" -description = "XFormers: A collection of composable Transformer building blocks." -optional = false -python-versions = ">=3.7" -files = [ - {file = "xformers-0.0.23.post1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:f3491e4b1077314a4535fc78c36b592a13b794eefffaa308db879f7147424a96"}, - {file = "xformers-0.0.23.post1-cp310-cp310-win_amd64.whl", hash = "sha256:ef0744c5d1abcad7f8692b5a30ee72a71215451cbde020e2fb37af20f46ba76f"}, - {file = "xformers-0.0.23.post1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:2aea20e84852fafe87f4103b4adfe5f324915defa403e98fadc5a97f333f7105"}, - {file = "xformers-0.0.23.post1-cp311-cp311-win_amd64.whl", hash = "sha256:372995c113c3505648f0c2d2daac53a6df60a22f30eae98e47daca5efd38fe71"}, - {file = "xformers-0.0.23.post1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:17e26c66cd25ad529705228f62744ed3f86f0fe3c54fa4e23c78cd7da7a71776"}, - {file = "xformers-0.0.23.post1-cp38-cp38-win_amd64.whl", hash = "sha256:aad762aebfe7ea3f6b9132afbf5ae88cdaf87d0c377d199dfee193e1a72d0d24"}, - {file = "xformers-0.0.23.post1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:a117e4cc835d9a19c653d79b5c66e37c72f713241e2d85b6561a15006f84b6e6"}, - {file = "xformers-0.0.23.post1-cp39-cp39-win_amd64.whl", hash = "sha256:e08e4ebbd9fbfe9545de4028b7f604d21dc4e301dc651b3fc1bb95ae6797524f"}, - {file = "xformers-0.0.23.post1.tar.gz", hash = "sha256:b443b158bd7b5275b485d2c6aee94ebc2152878fd784e379b1c8bcb1d67f3b81"}, -] - -[package.dependencies] -numpy = "*" -torch = "2.1.2" - -[[package]] -name = "xxhash" -version = "3.4.1" -description = "Python binding for xxHash" -optional = false -python-versions = ">=3.7" -files = [ - {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, - {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, - {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, - {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, - {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, - {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, - {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, - {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, - {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, - {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, - {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, - {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, - {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, - {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, - {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, - {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, - {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, - {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, - {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, - {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, - {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, - {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, - {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, - {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, - {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, - {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, - {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, - {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, - {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, - {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, - {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, - {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, - {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, - {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, - {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, - {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, - {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, - {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, - {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, - {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, -] - -[[package]] -name = "yarl" -version = "1.9.4" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[metadata] -lock-version = "2.0" -python-versions = ">=3.10,<3.11" -content-hash = "82c667b3887cd7e7ebf4bfda4ca5b1ab42a4b925f75c15b3961eee3e9aa0f9d8" diff --git a/model-serving/pyproject.toml b/model-serving/pyproject.toml deleted file mode 100644 index 5faf1f1c1..000000000 --- a/model-serving/pyproject.toml +++ /dev/null @@ -1,64 +0,0 @@ -[tool.poetry] -name = "model-api" -version = "0.1.0" -description = "" -authors = ["Julep Developers "] -readme = "README.md" -packages = [{include = "model_api"}] - -[tool.poetry.dependencies] -python = ">=3.10,<3.11" -uvicorn = ">=0.25.0,<1.0.0" -fastapi = ">=0.108.0,<1.0.0" -torch = "==2.1.2" -environs = ">=10.0.0,<11.0.0" -pynvml = ">=11.5.0,<12.0.0" -psutil = ">=5.9.8,<6.0.0" -starlette-exporter = "^0.17.1" -sentry-sdk = {extras = ["fastapi"], version = "^1.39.1"} -vllm = "^0.3.3" -aioprometheus = {extras = ["starlette"], version = "^23.12.0"} -lm-format-enforcer = "^0.8.3" -interegular = "^0.3.3" -pydantic = {extras = ["email"], version = ">=2.0.1,<3.0.0"} -scikit-learn = "=1.4.0" - - -[tool.poetry.group.dev.dependencies] -black = "^24.4.0" -pytest = "^8.0.0" -pytest-mock = "^3.12.0" -modal = "^0.62.24" -ipython = "^8.21.0" -ruff = "^0.1.9" -poethepoet = "^0.25.1" -pytype = ">=2024.4.11" -julep = "^0.2.1" -jupyterlab = "^4.1.1" -ipywidgets = "^8.1.2" -matplotlib = "^3.8.2" -ipympl = "^0.9.3" -mplcursors = "^0.5.3" -datasets = "^2.17.0" -imbalanced-learn = "^0.12.0" -pyjwt = "^2.8.0" -fire = "^0.5.0" - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" - -[tool.poe.tasks] -format = "black ." -lint = "ruff model_api/**/*.py tests/**/*.py --fix --unsafe-fixes" -typecheck = "pytype -j auto -k model_api" -check = [ - "format", - "lint", - "typecheck", -] - -test = [ - "check", - { cmd = "pytest" } -] diff --git a/model-serving/test_chat_template.py b/model-serving/test_chat_template.py deleted file mode 100644 index 345875f8d..000000000 --- a/model-serving/test_chat_template.py +++ /dev/null @@ -1,727 +0,0 @@ -from transformers import AutoTokenizer - -model_name = "julep-ai/samantha-1-turbo" -tokenizer = AutoTokenizer.from_pretrained(model_name) - - -# Set the template -template_path = "./model_api/chat_template.jinja" -with open(template_path, "r") as f: - chat_template = f.read() - - -def to_prompt(messages, chat_template=chat_template, **kwargs): - prompt = tokenizer.apply_chat_template( - messages, chat_template=chat_template, tokenize=False, **kwargs - ) - - return prompt - - -def test_function_call_none_last_user_continue(): - messages = [ - {"role": "system", "name": "situation", "content": "I am talking to John"}, - {"role": "assistant", "name": "Samantha", "content": "Hey John"}, - {"role": "user", "name": "John", "content": "Hey!"}, - {"role": "assistant", "name": "Samantha", "continue": True}, - ] - - prompt = to_prompt(messages, add_generation_prompt=True) - - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>me -""" - ) - - -def test_function_call_none_last_not_continue(): - messages = [ - {"role": "system", "name": "situation", "content": "I am talking to John"}, - {"role": "assistant", "name": "Samantha", "content": "Hey John"}, - {"role": "user", "name": "John", "content": "Hey!"}, - ] - - prompt = to_prompt(messages, add_generation_prompt=True) - - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>""" - ) - - -def test_function_call_auto_functions_not_passed(): - messages = [ - {"role": "system", "name": "situation", "content": "I am talking to John"}, - {"role": "assistant", "name": "Samantha", "content": "Hey John"}, - {"role": "user", "name": "John", "content": "Hey!"}, - ] - - prompt = to_prompt(messages, add_generation_prompt=True) - - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>""" - ) - - -def test_function_call_none_functions_not_passed(): - messages = [ - {"role": "system", "name": "situation", "content": "I am talking to John"}, - {"role": "assistant", "name": "Samantha", "content": "Hey John"}, - {"role": "user", "name": "John", "content": "Hey!"}, - ] - - prompt = to_prompt(messages, add_generation_prompt=True) - - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>""" - ) - - -def test_function_call_auto_functions_passed(): - functions = [ - { - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ] - - messages = [ - {"role": "system", "name": "situation", "content": "I am talking to John"}, - {"role": "system", "name": "functions", "content": functions}, - {"role": "assistant", "name": "Samantha", "content": "Hey John"}, - {"role": "user", "name": "John", "content": "Hey!"}, - ] - - prompt = to_prompt(messages, add_generation_prompt=True) - - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>functions -Available functions: - -{ - "description": "Generate an anagram of a given word", - "name": "generate_anagram", - "parameters": { - "properties": { - "word": { - "description": "The word to generate an anagram of", - "type": "string" - } - }, - "required": [ - "word" - ], - "type": "object" - } -}<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>""" - ) - - -# def test_function_call_none_functions_passed(): -# messages = [ -# {"role": "system", "name": "situation", "content": "I am talking to John"}, -# {"role": "assistant", "name": "Samantha", "content": "Hey John"}, -# {"role": "user", "name": "John", "content": "Hey!"}, -# ] -# functions = [ -# { -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ] -# prompt = to_prompt( -# messages, -# bos="<|im_start|>", -# eos="<|im_end|>", -# functions=functions, -# function_call=None, -# ) -# assert ( -# prompt -# == """<|im_start|>situation -# I am talking to John<|im_end|> -# <|im_start|>functions -# Available functions: - -# { -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of" -# } -# }, -# "required": [ -# "word" -# ] -# } -# }<|im_end|> -# <|im_start|>me (Samantha) -# Hey John<|im_end|> -# <|im_start|>person (John) -# Hey!<|im_end|> -# <|im_start|>""" -# ) - - -def test_function_call_none_last_continue(): - messages = [ - {"role": "system", "name": "situation", "content": "I am talking to John"}, - {"role": "assistant", "name": "Samantha", "content": "Hey John"}, - {"role": "user", "name": "John", "content": "Hey!"}, - {"role": "assistant", "name": "Samantha", "content": "Hi", "continue": True}, - ] - - prompt = to_prompt(messages, add_generation_prompt=True) - - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>me -Hi""" - ) - - -def test_function_call_none_last_continue_function_call(): - messages = [ - {"role": "system", "name": "situation", "content": "I am talking to John"}, - {"role": "assistant", "name": "Samantha", "content": "Hey John"}, - {"role": "user", "name": "John", "content": "Hey!"}, - {"role": "function_call", "content": "{}", "continue": True}, - ] - - prompt = to_prompt(messages, add_generation_prompt=True) - - assert ( - prompt - == """<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>function_call -{}""" - ) - - -# def test_function_call_auto_last_not_continue(): -# messages = [ -# {"role": "system", "name": "situation", "content": "I am talking to John"}, -# {"role": "assistant", "name": "Samantha", "content": "Hey John"}, -# {"role": "user", "name": "John", "content": "Hey!"}, -# ] -# functions = [ -# ({ -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ), -# ({ -# "name": "other_func", -# "description": "Logic", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ), -# ] -# prompt = to_prompt( -# messages, -# bos="<|im_start|>", -# eos="<|im_end|>", -# functions=functions, -# function_call="auto", -# ) -# assert ( -# prompt -# == """<|im_start|>situation -# I am talking to John<|im_end|> -# <|im_start|>functions -# Available functions: -# -# { -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of" -# } -# }, -# "required": [ -# "word" -# ] -# } -# } -# { -# "name": "other_func", -# "description": "Logic", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of" -# } -# }, -# "required": [ -# "word" -# ] -# } -# }<|im_end|> -# <|im_start|>me (Samantha) -# Hey John<|im_end|> -# <|im_start|>person (John) -# Hey!<|im_end|> -# <|im_start|>""" -# ) - - -# def test_function_call_auto_last_continue(): -# messages = [ -# {"role": "system", "name": "situation", "content": "I am talking to John"}, -# {"role": "assistant", "name": "Samantha", "content": "Hey John"}, -# {"role": "user", "name": "John", "content": "Hey!"}, -# {"role": "assistant", "name": "Samantha", "continue": True}, -# ] -# functions = [ -# ({ -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ) -# ] -# prompt = to_prompt( -# messages, -# bos="<|im_start|>", -# eos="<|im_end|>", -# functions=functions, -# function_call="auto", -# ) -# assert ( -# prompt -# == """<|im_start|>situation -# I am talking to John<|im_end|> -# <|im_start|>functions -# Available functions: -# -# { -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of" -# } -# }, -# "required": [ -# "word" -# ] -# } -# }<|im_end|> -# <|im_start|>me (Samantha) -# Hey John<|im_end|> -# <|im_start|>person (John) -# Hey!<|im_end|> -# <|im_start|>me (Samantha) -# """ -# ) - - -# def test_function_call_auto_last_continue_function_call(): -# messages = [ -# {"role": "system", "name": "situation", "content": "I am talking to John"}, -# {"role": "assistant", "name": "Samantha", "content": "Hey John"}, -# {"role": "user", "name": "John", "content": "Hey!"}, -# {"role": "function_call", "continue": True}, -# ] -# functions = [ -# ({ -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ) -# ] -# prompt = to_prompt( -# messages, -# bos="<|im_start|>", -# eos="<|im_end|>", -# functions=functions, -# function_call="auto", -# ) -# assert ( -# prompt -# == """<|im_start|>situation -# I am talking to John<|im_end|> -# <|im_start|>functions -# Available functions: -# -# { -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of" -# } -# }, -# "required": [ -# "word" -# ] -# } -# }<|im_end|> -# <|im_start|>me (Samantha) -# Hey John<|im_end|> -# <|im_start|>person (John) -# Hey!<|im_end|> -# <|im_start|>function_call -# """ -# ) - - -# def test_function_call_func_name_last_not_continue(): -# messages = [ -# {"role": "system", "name": "situation", "content": "I am talking to John"}, -# {"role": "assistant", "name": "Samantha", "content": "Hey John"}, -# {"role": "user", "name": "John", "content": "Hey!"}, -# ] -# functions = [ -# ({ -# "name": "other_func", -# "description": "Logic", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ), -# ({ -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ), -# ] -# prompt = to_prompt( -# messages, -# bos="<|im_start|>", -# eos="<|im_end|>", -# functions=functions, -# function_call=FunctionCall(**{"name": "generate_anagram"}, -# ) -# assert ( -# prompt -# == """<|im_start|>situation -# I am talking to John<|im_end|> -# <|im_start|>functions -# Available functions: -# -# { -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of" -# } -# }, -# "required": [ -# "word" -# ] -# } -# }<|im_end|> -# <|im_start|>me (Samantha) -# Hey John<|im_end|> -# <|im_start|>person (John) -# Hey!<|im_end|> -# <|im_start|>function_call -# {"name": "generate_anagram",""" -# ) - - -# def test_function_call_func_name_last_not_continue_invalid_function_name(): -# messages = [ -# {"role": "system", "name": "situation", "content": "I am talking to John"}, -# {"role": "assistant", "name": "Samantha", "content": "Hey John"}, -# {"role": "user", "name": "John", "content": "Hey!"}, -# ] -# functions = [ -# ({ -# "name": "other_func", -# "description": "Logic", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ), -# ({ -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ), -# ] -# with pytest.raises(InvalidFunctionName) as e_info: -# to_prompt( -# messages, -# bos="<|im_start|>", -# eos="<|im_end|>", -# functions=functions, -# function_call=FunctionCall(**{"name": "unknown"}, -# ) -# assert e_info.value.args[0] == "Invalid function name: unknown" - - -# def test_function_call_func_name_last_continue(): -# messages = [ -# {"role": "system", "name": "situation", "content": "I am talking to John"}, -# {"role": "assistant", "name": "Samantha", "content": "Hey John"}, -# {"role": "user", "name": "John", "content": "Hey!"}, -# {"role": "assistant", "name": "Samantha", "continue": True}, -# ] -# functions = [ -# ({ -# "name": "generate_anagram", -# "description": "Generate an anagram of a given word", -# "parameters": { -# "type": "object", -# "properties": { -# "word": { -# "type": "string", -# "description": "The word to generate an anagram of", -# } -# }, -# "required": ["word"], -# }, -# } -# ) -# ] -# with pytest.raises(InvalidPromptException) as e_info: -# to_prompt( -# messages, -# bos="<|im_start|>", -# eos="<|im_end|>", -# functions=functions, -# function_call=FunctionCall(**{"name": "generate_anagram"}, -# ) -# assert e_info.value.args[0] == ( -# "Invalid prompt format: Conflicting instructions, " -# "please remove the last instruction with 'continue' " -# "flag set to 'true' or set the flag to 'false'. " -# "You can either remove `functions` and/or `function_call` parameters." -# ) - - -def test_function_call_func_name_last_continue_function_call(): - functions = [ - { - "name": "generate_anagram", - "description": "Generate an anagram of a given word", - "parameters": { - "type": "object", - "properties": { - "word": { - "type": "string", - "description": "The word to generate an anagram of", - } - }, - "required": ["word"], - }, - } - ] - - messages = [ - {"role": "system", "name": "situation", "content": "I am talking to John"}, - {"role": "system", "name": "functions", "content": functions}, - {"role": "assistant", "name": "Samantha", "content": "Hey John"}, - {"role": "user", "name": "John", "content": "Hey!"}, - { - "role": "function_call", - "content": '{"name": "generate_anagram", ', - "continue": True, - }, - ] - - prompt = to_prompt(messages, add_generation_prompt=True) - - expected = """\ -<|im_start|>situation -I am talking to John<|im_end|> -<|im_start|>functions -Available functions: - -{ - "description": "Generate an anagram of a given word", - "name": "generate_anagram", - "parameters": { - "properties": { - "word": { - "description": "The word to generate an anagram of", - "type": "string" - } - }, - "required": [ - "word" - ], - "type": "object" - } -}<|im_end|> -<|im_start|>me (Samantha) -Hey John<|im_end|> -<|im_start|>person (John) -Hey!<|im_end|> -<|im_start|>function_call -{"name": "generate_anagram",""" - - assert prompt == expected - - -if __name__ == "__main__": - test_function_call_none_last_user_continue() - test_function_call_none_last_not_continue() - test_function_call_auto_functions_not_passed() - test_function_call_none_functions_not_passed() - test_function_call_none_last_continue() - test_function_call_none_last_continue_function_call() - test_function_call_auto_functions_passed() - test_function_call_func_name_last_continue_function_call() diff --git a/model-serving/tests/__init__.py b/model-serving/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/model-serving/tests/fixtures.py b/model-serving/tests/fixtures.py deleted file mode 100644 index 013ecd90e..000000000 --- a/model-serving/tests/fixtures.py +++ /dev/null @@ -1,44 +0,0 @@ -import os -import uuid -import pytest -from fastapi.testclient import TestClient - -auth_key = "myauthkey" -os.environ["API_KEY"] = auth_key -os.environ["TEMPERATURE_SCALING_FACTOR"] = "1.0" -os.environ["TEMPERATURE_SCALING_POWER"] = "1.0" -MODEL_NAME = os.environ.get("MODEL_NAME", "julep-ai/samantha-1-turbo") - -from model_api.web import create_app # noqa: E402 - -args = [ - "--model", - MODEL_NAME, - "--trust-remote-code", - "--max-model-len", - "1024", - "--enforce-eager", - "--dtype", - "bfloat16", - "--gpu-memory-utilization", - "0.97", - "--max-num-seqs", - "1", -] - -app = create_app(args) - - -@pytest.fixture(scope="session") -def unauthorized_client(): - return TestClient(app) - - -@pytest.fixture(scope="session") -def client(): - return TestClient(app, headers={"X-Auth-Key": auth_key}) - - -@pytest.fixture -def request_id(): - return str(uuid.uuid4()) diff --git a/model-serving/tests/test_chat_completions.py b/model-serving/tests/test_chat_completions.py deleted file mode 100644 index 545746721..000000000 --- a/model-serving/tests/test_chat_completions.py +++ /dev/null @@ -1,691 +0,0 @@ -# ruff: noqa: F401, F811 -import pytest -from pytest_mock import mocker - -from model_api.logits_processors import ( - drop_disallowed_start_tags, - fix_function_call_prediction, -) -from model_api.protocol import SamplingParams -import model_api.web -from tests.fixtures import client, unauthorized_client, request_id, MODEL_NAME - - -def test_security(unauthorized_client): - response = unauthorized_client.post("/v1/chat/completions") - assert response.status_code == 403 - - -def test_check_model(client): - body = dict( - model="some_nonexistent_model", - messages=[], - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert response.status_code == 404 - - -def test_logit_bias_not_supported(client): - body = dict( - model=MODEL_NAME, - logit_bias={"a": 1.0}, - messages=[], - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert response.status_code == 400 - - -def test_functions_and_tools(client): - body = dict( - model=MODEL_NAME, - functions=[ - { - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string", - }, - }, - ], - tools=[ - { - "type": "function", - "id": "tool-1", - "function": { - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string", - }, - }, - } - ], - messages=[], - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert response.status_code == 400 - - -def test_do_not_insert_default_situation_if_messages_empty(client, request_id, mocker): - expected_prompt = "" - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - body = dict( - model=MODEL_NAME, - messages=[], - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 - - -def test_insert_default_situation(client, request_id, mocker): - expected_prompt = """<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>person (User) -hi<|im_end|> -<|im_start|>me -""" - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - body = dict( - model=MODEL_NAME, - messages=[ - { - "role": "user", - "name": "User", - "content": "hi", - } - ], - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 - - -def test_escape_special_tokens(client, request_id, mocker): - st = list( - model_api.web.engine.engine.tokenizer.tokenizer.special_tokens_map.values() - )[0] - if isinstance(st, list): - st = st[0] - expected_prompt = f"""<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>person (User) -{st[0]} {st[1:]}<|im_end|> -<|im_start|>me -""" - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - - body = dict( - model=MODEL_NAME, - messages=[ - { - "role": "user", - "name": "User", - "content": st, - } - ], - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 - - -def test_function_called_by_name(client, request_id, mocker): - expected_prompt = """<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>functions -Available functions: - -{ - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string" - } -}<|im_end|> -<|im_start|>person (User) -hi<|im_end|> -<|im_start|>function_call -{"name": "func_name",""" - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - - body = dict( - model=MODEL_NAME, - messages=[ - { - "role": "user", - "name": "User", - "content": "hi", - } - ], - functions=[ - { - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string", - }, - }, - ], - function_call={"name": "func_name"}, - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 - - -def test_function_is_none(client, request_id, mocker): - expected_prompt = """<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>person (User) -hi<|im_end|> -<|im_start|>me -""" - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - - body = dict( - model=MODEL_NAME, - messages=[ - { - "role": "user", - "name": "User", - "content": "hi", - } - ], - functions=[ - { - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string", - }, - }, - ], - function_call="none", - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 - - -@pytest.mark.skip(reason="fix AsyncEngineDeadError") -def test_function_is_auto(client, request_id, mocker): - expected_prompt = """<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>functions -Available functions: - -{ - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string" - } -}<|im_end|> -<|im_start|>person (User) -hi<|im_end|> -<|im_start|>""" - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - - body = dict( - model=MODEL_NAME, - messages=[ - { - "role": "user", - "name": "User", - "content": "hi", - } - ], - functions=[ - { - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string", - }, - }, - ], - function_call="auto", - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 - - -# def test_rescale_temperature(client, request_id, mocker): -# expected_prompt = f"""<|im_start|>situation -# You are a helpful AI Assistant<|im_end|> -# <|im_start|>person (User) -# hi<|im_end|> -# <|im_start|>me -# """ -# temperature = 0.7 -# expected_sampling_params = SamplingParams( -# n=1, -# best_of=1, -# presence_penalty=0.0, -# frequency_penalty=0.75, -# repetition_penalty=1.0, -# temperature=0.0, -# top_p=0.99, -# top_k=-1, -# min_p=0.0, -# seed=None, -# use_beam_search=False, -# length_penalty=1.0, -# early_stopping=False, -# stop=["<", "<|"], -# stop_token_ids=[], -# include_stop_str_in_output=False, -# ignore_eos=False, -# max_tokens=1, -# logprobs=None, -# prompt_logprobs=None, -# skip_special_tokens=True, -# spaces_between_special_tokens=False, -# ) - -# mocker.patch("model_api.web.random_uuid", return_value=request_id) -# spy = mocker.spy(model_api.web.engine, "generate") - -# body = dict( -# model=MODEL_NAME, -# temperature=temperature, -# messages=[ -# { -# "role": "user", -# "name": "User", -# "content": "hi", -# } -# ], -# max_tokens=1, -# stop=["<", "<|"], -# frequency_penalty=0.75, -# ) -# response = client.post( -# "/v1/chat/completions", -# json=body, -# ) -# assert spy.call_count == 1 -# spy.assert_called_once_with( -# expected_prompt, expected_sampling_params, f"cmpl-{request_id}" -# ) -# assert response.status_code == 200 - - -@pytest.mark.skip(reason="fix AsyncEngineDeadError") -def test_logits_processor_fix_function_call_prediction(client, request_id, mocker): - expected_prompt = """<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>functions -Available functions: - -{ - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string" - } -}<|im_end|> -<|im_start|>person (User) -hi<|im_end|> -<|im_start|>""" - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - logits_processors=[fix_function_call_prediction], - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - - body = dict( - model=MODEL_NAME, - messages=[ - { - "role": "user", - "name": "User", - "content": "hi", - } - ], - functions=[ - { - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string", - }, - }, - ], - function_call="auto", - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 - - -@pytest.mark.skip(reason="fix SamplingParams comparison") -def test_logits_processor_drop_disallowed_start_tags(client, request_id, mocker): - expected_prompt = """<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>person (User) -hi<|im_end|> -<|im_start|>me -""" - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - logits_processors=[drop_disallowed_start_tags], - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - - body = dict( - model=MODEL_NAME, - messages=[ - { - "role": "user", - "name": "User", - "content": "hi", - } - ], - functions=[ - { - "name": "func_name", - "description": "func_desc", - "parameters": { - "param1": "string", - }, - }, - ], - function_call="none", - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/chat/completions", - json=body, - ) - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 diff --git a/model-serving/tests/test_completions.py b/model-serving/tests/test_completions.py deleted file mode 100644 index c3f1e218c..000000000 --- a/model-serving/tests/test_completions.py +++ /dev/null @@ -1,263 +0,0 @@ -# ruff: noqa: F401, F811 -from pytest_mock import mocker - -from model_api.logits_processors import drop_disallowed_start_tags -from model_api.protocol import SamplingParams -import model_api.web -from tests.fixtures import client, unauthorized_client, request_id, MODEL - - -def test_security(unauthorized_client): - response = unauthorized_client.post("/v1/completions") - assert response.status_code == 403 - - -def test_check_model(client): - body = dict( - model="some_nonexistent_model", - prompt="some text", - ) - response = client.post( - "/v1/completions", - json=body, - ) - assert response.status_code == 404 - - -def test_logit_bias_not_supported(client): - body = dict( - model=MODEL, - logit_bias={"a": 1.0}, - prompt="some text", - ) - response = client.post( - "/v1/completions", - json=body, - ) - assert response.status_code == 400 - - -def test_remove_last_space(client, request_id, mocker): - expected_prompt = """<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>person (User) -hi<|im_end|> -<|im_start|>me """ - prompt = expected_prompt - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - - body = dict( - model=MODEL, - prompt=prompt, - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/completions", - json=body, - ) - assert spy.call_count == 1 - - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 - - -def test_remove_last_space_2(client, request_id, mocker): - st = list( - model_api.web.engine.engine.tokenizer.tokenizer.special_tokens_map.values() - )[0] - if isinstance(st, list): - st = st[0] - expected_prompt = """<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>person (User) -hi<|im_end|> -<|im_start|>me""" - prompt = expected_prompt + " " - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - - body = dict( - model=MODEL, - prompt=prompt, - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/completions", - json=body, - ) - - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 - - -# def test_rescale_temperature(client, request_id, mocker): -# expected_prompt = f"""<|im_start|>situation -# You are a helpful AI Assistant<|im_end|> -# <|im_start|>person (User) -# hi<|im_end|> -# <|im_start|>me -# """ -# prompt = expected_prompt -# temperature = 0.7 -# expected_sampling_params = SamplingParams( -# n=1, -# best_of=1, -# presence_penalty=0.0, -# frequency_penalty=0.75, -# repetition_penalty=1.0, -# temperature=0.0, -# top_p=0.99, -# top_k=-1, -# min_p=0.01, -# seed=None, -# use_beam_search=False, -# length_penalty=1.0, -# early_stopping=False, -# stop=["<", "<|"], -# stop_token_ids=[], -# include_stop_str_in_output=False, -# ignore_eos=False, -# max_tokens=1, -# logprobs=None, -# prompt_logprobs=None, -# skip_special_tokens=True, -# spaces_between_special_tokens=False, -# ) - -# mocker.patch("model_api.web.random_uuid", return_value=request_id) -# spy = mocker.spy(model_api.web.engine, "generate") - -# body = dict( -# model=MODEL, -# temperature=temperature, -# prompt=prompt, -# max_tokens=1, -# stop=["<", "<|"], -# frequency_penalty=0.75, -# ) -# response = client.post( -# "/v1/completions", -# json=body, -# ) -# assert spy.call_count == 1 -# spy.assert_called_once_with( -# expected_prompt, expected_sampling_params, f"cmpl-{request_id}" -# ) -# assert response.status_code == 200 - - -def test_logits_processor_drop_disallowed_start_tags(client, request_id, mocker): - expected_prompt = """<|im_start|>situation -You are a helpful AI Assistant<|im_end|> -<|im_start|>person (User) -hi<|im_end|> -<|im_start|>""" - prompt = expected_prompt - expected_sampling_params = SamplingParams( - n=1, - best_of=1, - presence_penalty=0.0, - frequency_penalty=0.75, - repetition_penalty=1.0, - temperature=0.75, - top_p=0.99, - top_k=-1, - min_p=0.01, - seed=None, - use_beam_search=False, - length_penalty=1.0, - early_stopping=False, - stop=["<", "<|"], - stop_token_ids=[], - include_stop_str_in_output=False, - ignore_eos=False, - max_tokens=1, - logprobs=None, - prompt_logprobs=None, - skip_special_tokens=True, - spaces_between_special_tokens=False, - logits_processors=[drop_disallowed_start_tags], - ) - - mocker.patch("model_api.web.random_uuid", return_value=request_id) - spy = mocker.spy(model_api.web.engine, "generate") - - body = dict( - model=MODEL, - prompt=prompt, - max_tokens=1, - stop=["<", "<|"], - temperature=0.75, - frequency_penalty=0.75, - ) - response = client.post( - "/v1/completions", - json=body, - ) - assert spy.call_count == 1 - spy.assert_called_once_with( - expected_prompt, expected_sampling_params, f"cmpl-{request_id}" - ) - assert response.status_code == 200 diff --git a/model-serving/update_tokenizer_template.py b/model-serving/update_tokenizer_template.py deleted file mode 100644 index e2a590b17..000000000 --- a/model-serving/update_tokenizer_template.py +++ /dev/null @@ -1,34 +0,0 @@ -from fire import Fire -from transformers import AutoTokenizer - - -def update_tokenizer_template( - tokenizer_name: str = "julep-ai/samantha-1-turbo", - template_path: str = "./model_api/chat_template.jinja", - save_to_dir: str | None = None, - push_to_hub: bool = False, -): - assert ( - save_to_dir is not None or push_to_hub - ), "You must specify a directory to save the tokenizer to or push to the Hugging Face model hub." - - # Load the tokenizer - tokenizer = AutoTokenizer.from_pretrained(tokenizer_name) - - # Set the template - with open(template_path, "r") as f: - template = f.read() - - tokenizer.chat_template = template - - # Save the tokenizer - if save_to_dir: - tokenizer.save_pretrained(save_to_dir) - - # Push to the Hugging Face model hub - if push_to_hub: - tokenizer.push_to_hub(tokenizer_name) - - -if __name__ == "__main__": - Fire(update_tokenizer_template) diff --git a/scripts/generate_openapi_code.sh b/scripts/generate_openapi_code.sh old mode 100644 new mode 100755 index b58b8ed45..9d046dbda --- a/scripts/generate_openapi_code.sh +++ b/scripts/generate_openapi_code.sh @@ -3,11 +3,14 @@ # Turn on echo command set -x +# Exit on error +set -e + cd typespec/ && \ tsp compile . cd - -fern generate --local +# fern generate cd sdks/python && \ poetry update && \ diff --git a/sdks/python/julep/api/__init__.py b/sdks/python/julep/api/__init__.py index 6ea9e327d..c570a4827 100644 --- a/sdks/python/julep/api/__init__.py +++ b/sdks/python/julep/api/__init__.py @@ -8,49 +8,38 @@ AgentToolsRouteListRequestSortBy, AgentToolsRouteListResponse, AgentsAgent, - AgentsAgentDefaultSettings, AgentsAgentInstructions, AgentsCreateAgentRequest, - AgentsCreateAgentRequestDefaultSettings, AgentsCreateAgentRequestInstructions, + AgentsCreateOrUpdateAgentRequest, AgentsDocsSearchRouteSearchRequestBody, - AgentsDocsSearchRouteSearchRequestDirection, - AgentsDocsSearchRouteSearchRequestSortBy, - AgentsDocsSearchRouteSearchResponse, - AgentsPatchAgentRequestDefaultSettings, AgentsPatchAgentRequestInstructions, AgentsRouteListRequestDirection, AgentsRouteListRequestSortBy, AgentsRouteListResponse, AgentsUpdateAgentRequest, - AgentsUpdateAgentRequestDefaultSettings, AgentsUpdateAgentRequestInstructions, ChatBaseChatOutput, ChatBaseChatResponse, ChatBaseTokenLogProb, + ChatChatInputData, + ChatChatInputDataToolChoice, ChatChatOutputChunk, + ChatChatSettings, ChatChunkChatResponse, ChatCompetionUsage, ChatCompletionResponseFormat, ChatCompletionResponseFormatType, + ChatDefaultChatSettings, ChatFinishReason, - ChatGenerationPreset, - ChatGenerationPresetSettings, ChatLogProbResponse, ChatMessageChatResponse, + ChatMessageChatResponseChoicesItem, ChatMultipleChatOutput, ChatOpenAiSettings, - ChatRouteGenerateRequest, - ChatRouteGenerateRequestAgent, - ChatRouteGenerateRequestAgentToolChoice, - ChatRouteGenerateRequestFrequencyPenalty, - ChatRouteGenerateRequestFrequencyPenaltyToolChoice, - ChatRouteGenerateRequestPreset, - ChatRouteGenerateRequestPresetToolChoice, ChatRouteGenerateResponse, ChatSingleChatOutput, ChatTokenLogProb, - ChatVLlmSettings, CommonIdentifierSafeUnicode, CommonLimit, CommonLogitBias, @@ -63,21 +52,21 @@ CommonUuid, CommonValidPythonIdentifier, DocsBaseDocSearchRequest, + DocsCreateDocRequest, + DocsCreateDocRequestContent, DocsDoc, DocsDocContent, DocsDocOwner, DocsDocOwnerRole, DocsDocReference, + DocsDocSearchResponse, DocsEmbedQueryRequest, DocsEmbedQueryRequestText, DocsEmbedQueryResponse, DocsHybridDocSearchRequest, - DocsHybridDocSearchRequestText, - DocsHybridDocSearchRequestVector, + DocsSnippet, DocsTextOnlyDocSearchRequest, - DocsTextOnlyDocSearchRequestText, DocsVectorDocSearchRequest, - DocsVectorDocSearchRequestVector, EntriesBaseEntry, EntriesBaseEntryContent, EntriesBaseEntryContentItem, @@ -86,11 +75,6 @@ EntriesBaseEntryContentItemItem_Text, EntriesBaseEntrySource, EntriesChatMlImageContentPart, - EntriesChatMlMessage, - EntriesChatMlMessageContent, - EntriesChatMlMessageContentItem, - EntriesChatMlMessageContentItem_ImageUrl, - EntriesChatMlMessageContentItem_Text, EntriesChatMlRole, EntriesChatMlTextContentPart, EntriesEntry, @@ -112,6 +96,7 @@ ExecutionsResumeExecutionRequest, ExecutionsStopExecutionRequest, ExecutionsTransition, + ExecutionsTransitionTarget, ExecutionsTransitionType, ExecutionsUpdateExecutionRequest, ExecutionsUpdateExecutionRequest_Cancelled, @@ -119,6 +104,7 @@ JobsJobState, JobsJobStatus, SessionsContextOverflowType, + SessionsCreateOrUpdateSessionRequest, SessionsCreateSessionRequest, SessionsMultiAgentMultiUserSession, SessionsMultiAgentNoUserSession, @@ -140,65 +126,207 @@ TaskExecutionsRouteListRequestSortBy, TaskExecutionsRouteListResponse, TasksBaseWorkflowStep, + TasksBaseWorkflowStep_Embed, + TasksBaseWorkflowStep_Error, + TasksBaseWorkflowStep_Foreach, + TasksBaseWorkflowStep_Get, + TasksBaseWorkflowStep_IfElse, + TasksBaseWorkflowStep_Log, + TasksBaseWorkflowStep_MapReduce, + TasksBaseWorkflowStep_Parallel, + TasksBaseWorkflowStep_Prompt, + TasksBaseWorkflowStep_Return, + TasksBaseWorkflowStep_Search, + TasksBaseWorkflowStep_Set, + TasksBaseWorkflowStep_Sleep, + TasksBaseWorkflowStep_Switch, + TasksBaseWorkflowStep_ToolCall, + TasksBaseWorkflowStep_WaitForInput, + TasksBaseWorkflowStep_Yield, + TasksCaseThen, + TasksCaseThenThen, + TasksCaseThenThen_Embed, + TasksCaseThenThen_Error, + TasksCaseThenThen_Evaluate, + TasksCaseThenThen_Get, + TasksCaseThenThen_Log, + TasksCaseThenThen_Prompt, + TasksCaseThenThen_Return, + TasksCaseThenThen_Search, + TasksCaseThenThen_Set, + TasksCaseThenThen_Sleep, + TasksCaseThenThen_ToolCall, + TasksCaseThenThen_WaitForInput, + TasksCaseThenThen_Yield, TasksCreateTaskRequest, TasksCreateTaskRequestMainItem, + TasksCreateTaskRequestMainItem_Embed, TasksCreateTaskRequestMainItem_Error, TasksCreateTaskRequestMainItem_Evaluate, + TasksCreateTaskRequestMainItem_Foreach, + TasksCreateTaskRequestMainItem_Get, TasksCreateTaskRequestMainItem_IfElse, + TasksCreateTaskRequestMainItem_Log, + TasksCreateTaskRequestMainItem_MapReduce, + TasksCreateTaskRequestMainItem_Parallel, TasksCreateTaskRequestMainItem_Prompt, + TasksCreateTaskRequestMainItem_Return, + TasksCreateTaskRequestMainItem_Search, + TasksCreateTaskRequestMainItem_Set, + TasksCreateTaskRequestMainItem_Sleep, + TasksCreateTaskRequestMainItem_Switch, TasksCreateTaskRequestMainItem_ToolCall, TasksCreateTaskRequestMainItem_WaitForInput, TasksCreateTaskRequestMainItem_Yield, + TasksEmbedStep, TasksErrorWorkflowStep, TasksEvaluateStep, + TasksForeachDo, + TasksForeachDoDo, + TasksForeachDoDo_Embed, + TasksForeachDoDo_Error, + TasksForeachDoDo_Evaluate, + TasksForeachDoDo_Get, + TasksForeachDoDo_Log, + TasksForeachDoDo_Prompt, + TasksForeachDoDo_Return, + TasksForeachDoDo_Search, + TasksForeachDoDo_Set, + TasksForeachDoDo_Sleep, + TasksForeachDoDo_ToolCall, + TasksForeachDoDo_WaitForInput, + TasksForeachDoDo_Yield, + TasksForeachStep, + TasksGetStep, TasksIfElseWorkflowStep, TasksIfElseWorkflowStepElse, + TasksIfElseWorkflowStepElse_Embed, + TasksIfElseWorkflowStepElse_Error, + TasksIfElseWorkflowStepElse_Evaluate, + TasksIfElseWorkflowStepElse_Get, + TasksIfElseWorkflowStepElse_Log, + TasksIfElseWorkflowStepElse_Prompt, + TasksIfElseWorkflowStepElse_Return, + TasksIfElseWorkflowStepElse_Search, + TasksIfElseWorkflowStepElse_Set, + TasksIfElseWorkflowStepElse_Sleep, + TasksIfElseWorkflowStepElse_ToolCall, + TasksIfElseWorkflowStepElse_WaitForInput, + TasksIfElseWorkflowStepElse_Yield, TasksIfElseWorkflowStepThen, + TasksIfElseWorkflowStepThen_Embed, + TasksIfElseWorkflowStepThen_Error, + TasksIfElseWorkflowStepThen_Evaluate, + TasksIfElseWorkflowStepThen_Get, + TasksIfElseWorkflowStepThen_Log, + TasksIfElseWorkflowStepThen_Prompt, + TasksIfElseWorkflowStepThen_Return, + TasksIfElseWorkflowStepThen_Search, + TasksIfElseWorkflowStepThen_Set, + TasksIfElseWorkflowStepThen_Sleep, + TasksIfElseWorkflowStepThen_ToolCall, + TasksIfElseWorkflowStepThen_WaitForInput, + TasksIfElseWorkflowStepThen_Yield, + TasksLogStep, + TasksMapOver, + TasksMapReduceStep, + TasksParallelStep, + TasksParallelStepParallelItem, + TasksParallelStepParallelItem_Embed, + TasksParallelStepParallelItem_Error, + TasksParallelStepParallelItem_Evaluate, + TasksParallelStepParallelItem_Get, + TasksParallelStepParallelItem_Log, + TasksParallelStepParallelItem_Prompt, + TasksParallelStepParallelItem_Return, + TasksParallelStepParallelItem_Search, + TasksParallelStepParallelItem_Set, + TasksParallelStepParallelItem_Sleep, + TasksParallelStepParallelItem_ToolCall, + TasksParallelStepParallelItem_WaitForInput, + TasksParallelStepParallelItem_Yield, TasksPatchTaskRequestMainItem, + TasksPatchTaskRequestMainItem_Embed, TasksPatchTaskRequestMainItem_Error, TasksPatchTaskRequestMainItem_Evaluate, + TasksPatchTaskRequestMainItem_Foreach, + TasksPatchTaskRequestMainItem_Get, TasksPatchTaskRequestMainItem_IfElse, + TasksPatchTaskRequestMainItem_Log, + TasksPatchTaskRequestMainItem_MapReduce, + TasksPatchTaskRequestMainItem_Parallel, TasksPatchTaskRequestMainItem_Prompt, + TasksPatchTaskRequestMainItem_Return, + TasksPatchTaskRequestMainItem_Search, + TasksPatchTaskRequestMainItem_Set, + TasksPatchTaskRequestMainItem_Sleep, + TasksPatchTaskRequestMainItem_Switch, TasksPatchTaskRequestMainItem_ToolCall, TasksPatchTaskRequestMainItem_WaitForInput, TasksPatchTaskRequestMainItem_Yield, TasksPromptStep, TasksPromptStepPrompt, - TasksPromptStepSettings, - TasksPromptStepSettingsAgent, - TasksPromptStepSettingsFrequencyPenalty, - TasksPromptStepSettingsPreset, + TasksReturnStep, TasksRouteListRequestDirection, TasksRouteListRequestSortBy, TasksRouteListResponse, + TasksSearchStep, + TasksSearchStepSearch, + TasksSetKey, + TasksSetStep, + TasksSetStepSet, + TasksSleepFor, + TasksSleepStep, + TasksSwitchStep, TasksTask, TasksTaskMainItem, + TasksTaskMainItem_Embed, TasksTaskMainItem_Error, TasksTaskMainItem_Evaluate, + TasksTaskMainItem_Foreach, + TasksTaskMainItem_Get, TasksTaskMainItem_IfElse, + TasksTaskMainItem_Log, + TasksTaskMainItem_MapReduce, + TasksTaskMainItem_Parallel, TasksTaskMainItem_Prompt, + TasksTaskMainItem_Return, + TasksTaskMainItem_Search, + TasksTaskMainItem_Set, + TasksTaskMainItem_Sleep, + TasksTaskMainItem_Switch, TasksTaskMainItem_ToolCall, TasksTaskMainItem_WaitForInput, TasksTaskMainItem_Yield, TasksTaskTool, TasksToolCallStep, TasksUpdateTaskRequestMainItem, + TasksUpdateTaskRequestMainItem_Embed, TasksUpdateTaskRequestMainItem_Error, TasksUpdateTaskRequestMainItem_Evaluate, + TasksUpdateTaskRequestMainItem_Foreach, + TasksUpdateTaskRequestMainItem_Get, TasksUpdateTaskRequestMainItem_IfElse, + TasksUpdateTaskRequestMainItem_Log, + TasksUpdateTaskRequestMainItem_MapReduce, + TasksUpdateTaskRequestMainItem_Parallel, TasksUpdateTaskRequestMainItem_Prompt, + TasksUpdateTaskRequestMainItem_Return, + TasksUpdateTaskRequestMainItem_Search, + TasksUpdateTaskRequestMainItem_Set, + TasksUpdateTaskRequestMainItem_Sleep, + TasksUpdateTaskRequestMainItem_Switch, TasksUpdateTaskRequestMainItem_ToolCall, TasksUpdateTaskRequestMainItem_WaitForInput, TasksUpdateTaskRequestMainItem_Yield, TasksWaitForInputStep, - TasksWaitForInputStepInfo, TasksYieldStep, ToolsChosenFunctionCall, ToolsChosenToolCall, ToolsChosenToolCall_Function, + ToolsCreateToolRequest, ToolsFunctionCallOption, ToolsFunctionDef, - ToolsFunctionDefUpdate, ToolsFunctionTool, ToolsNamedFunctionChoice, ToolsNamedToolChoice, @@ -211,13 +339,11 @@ UserDocsRouteListRequestSortBy, UserDocsRouteListResponse, UserDocsSearchRouteSearchRequestBody, - UserDocsSearchRouteSearchRequestDirection, - UserDocsSearchRouteSearchRequestSortBy, - UserDocsSearchRouteSearchResponse, + UsersCreateOrUpdateUserRequest, + UsersCreateUserRequest, UsersRouteListRequestDirection, UsersRouteListRequestSortBy, UsersRouteListResponse, - UsersUpdateUserRequest, UsersUser, ) from .environment import JulepApiEnvironment @@ -230,49 +356,38 @@ "AgentToolsRouteListRequestSortBy", "AgentToolsRouteListResponse", "AgentsAgent", - "AgentsAgentDefaultSettings", "AgentsAgentInstructions", "AgentsCreateAgentRequest", - "AgentsCreateAgentRequestDefaultSettings", "AgentsCreateAgentRequestInstructions", + "AgentsCreateOrUpdateAgentRequest", "AgentsDocsSearchRouteSearchRequestBody", - "AgentsDocsSearchRouteSearchRequestDirection", - "AgentsDocsSearchRouteSearchRequestSortBy", - "AgentsDocsSearchRouteSearchResponse", - "AgentsPatchAgentRequestDefaultSettings", "AgentsPatchAgentRequestInstructions", "AgentsRouteListRequestDirection", "AgentsRouteListRequestSortBy", "AgentsRouteListResponse", "AgentsUpdateAgentRequest", - "AgentsUpdateAgentRequestDefaultSettings", "AgentsUpdateAgentRequestInstructions", "ChatBaseChatOutput", "ChatBaseChatResponse", "ChatBaseTokenLogProb", + "ChatChatInputData", + "ChatChatInputDataToolChoice", "ChatChatOutputChunk", + "ChatChatSettings", "ChatChunkChatResponse", "ChatCompetionUsage", "ChatCompletionResponseFormat", "ChatCompletionResponseFormatType", + "ChatDefaultChatSettings", "ChatFinishReason", - "ChatGenerationPreset", - "ChatGenerationPresetSettings", "ChatLogProbResponse", "ChatMessageChatResponse", + "ChatMessageChatResponseChoicesItem", "ChatMultipleChatOutput", "ChatOpenAiSettings", - "ChatRouteGenerateRequest", - "ChatRouteGenerateRequestAgent", - "ChatRouteGenerateRequestAgentToolChoice", - "ChatRouteGenerateRequestFrequencyPenalty", - "ChatRouteGenerateRequestFrequencyPenaltyToolChoice", - "ChatRouteGenerateRequestPreset", - "ChatRouteGenerateRequestPresetToolChoice", "ChatRouteGenerateResponse", "ChatSingleChatOutput", "ChatTokenLogProb", - "ChatVLlmSettings", "CommonIdentifierSafeUnicode", "CommonLimit", "CommonLogitBias", @@ -285,21 +400,21 @@ "CommonUuid", "CommonValidPythonIdentifier", "DocsBaseDocSearchRequest", + "DocsCreateDocRequest", + "DocsCreateDocRequestContent", "DocsDoc", "DocsDocContent", "DocsDocOwner", "DocsDocOwnerRole", "DocsDocReference", + "DocsDocSearchResponse", "DocsEmbedQueryRequest", "DocsEmbedQueryRequestText", "DocsEmbedQueryResponse", "DocsHybridDocSearchRequest", - "DocsHybridDocSearchRequestText", - "DocsHybridDocSearchRequestVector", + "DocsSnippet", "DocsTextOnlyDocSearchRequest", - "DocsTextOnlyDocSearchRequestText", "DocsVectorDocSearchRequest", - "DocsVectorDocSearchRequestVector", "EntriesBaseEntry", "EntriesBaseEntryContent", "EntriesBaseEntryContentItem", @@ -308,11 +423,6 @@ "EntriesBaseEntryContentItemItem_Text", "EntriesBaseEntrySource", "EntriesChatMlImageContentPart", - "EntriesChatMlMessage", - "EntriesChatMlMessageContent", - "EntriesChatMlMessageContentItem", - "EntriesChatMlMessageContentItem_ImageUrl", - "EntriesChatMlMessageContentItem_Text", "EntriesChatMlRole", "EntriesChatMlTextContentPart", "EntriesEntry", @@ -334,6 +444,7 @@ "ExecutionsResumeExecutionRequest", "ExecutionsStopExecutionRequest", "ExecutionsTransition", + "ExecutionsTransitionTarget", "ExecutionsTransitionType", "ExecutionsUpdateExecutionRequest", "ExecutionsUpdateExecutionRequest_Cancelled", @@ -342,6 +453,7 @@ "JobsJobStatus", "JulepApiEnvironment", "SessionsContextOverflowType", + "SessionsCreateOrUpdateSessionRequest", "SessionsCreateSessionRequest", "SessionsMultiAgentMultiUserSession", "SessionsMultiAgentNoUserSession", @@ -363,65 +475,207 @@ "TaskExecutionsRouteListRequestSortBy", "TaskExecutionsRouteListResponse", "TasksBaseWorkflowStep", + "TasksBaseWorkflowStep_Embed", + "TasksBaseWorkflowStep_Error", + "TasksBaseWorkflowStep_Foreach", + "TasksBaseWorkflowStep_Get", + "TasksBaseWorkflowStep_IfElse", + "TasksBaseWorkflowStep_Log", + "TasksBaseWorkflowStep_MapReduce", + "TasksBaseWorkflowStep_Parallel", + "TasksBaseWorkflowStep_Prompt", + "TasksBaseWorkflowStep_Return", + "TasksBaseWorkflowStep_Search", + "TasksBaseWorkflowStep_Set", + "TasksBaseWorkflowStep_Sleep", + "TasksBaseWorkflowStep_Switch", + "TasksBaseWorkflowStep_ToolCall", + "TasksBaseWorkflowStep_WaitForInput", + "TasksBaseWorkflowStep_Yield", + "TasksCaseThen", + "TasksCaseThenThen", + "TasksCaseThenThen_Embed", + "TasksCaseThenThen_Error", + "TasksCaseThenThen_Evaluate", + "TasksCaseThenThen_Get", + "TasksCaseThenThen_Log", + "TasksCaseThenThen_Prompt", + "TasksCaseThenThen_Return", + "TasksCaseThenThen_Search", + "TasksCaseThenThen_Set", + "TasksCaseThenThen_Sleep", + "TasksCaseThenThen_ToolCall", + "TasksCaseThenThen_WaitForInput", + "TasksCaseThenThen_Yield", "TasksCreateTaskRequest", "TasksCreateTaskRequestMainItem", + "TasksCreateTaskRequestMainItem_Embed", "TasksCreateTaskRequestMainItem_Error", "TasksCreateTaskRequestMainItem_Evaluate", + "TasksCreateTaskRequestMainItem_Foreach", + "TasksCreateTaskRequestMainItem_Get", "TasksCreateTaskRequestMainItem_IfElse", + "TasksCreateTaskRequestMainItem_Log", + "TasksCreateTaskRequestMainItem_MapReduce", + "TasksCreateTaskRequestMainItem_Parallel", "TasksCreateTaskRequestMainItem_Prompt", + "TasksCreateTaskRequestMainItem_Return", + "TasksCreateTaskRequestMainItem_Search", + "TasksCreateTaskRequestMainItem_Set", + "TasksCreateTaskRequestMainItem_Sleep", + "TasksCreateTaskRequestMainItem_Switch", "TasksCreateTaskRequestMainItem_ToolCall", "TasksCreateTaskRequestMainItem_WaitForInput", "TasksCreateTaskRequestMainItem_Yield", + "TasksEmbedStep", "TasksErrorWorkflowStep", "TasksEvaluateStep", + "TasksForeachDo", + "TasksForeachDoDo", + "TasksForeachDoDo_Embed", + "TasksForeachDoDo_Error", + "TasksForeachDoDo_Evaluate", + "TasksForeachDoDo_Get", + "TasksForeachDoDo_Log", + "TasksForeachDoDo_Prompt", + "TasksForeachDoDo_Return", + "TasksForeachDoDo_Search", + "TasksForeachDoDo_Set", + "TasksForeachDoDo_Sleep", + "TasksForeachDoDo_ToolCall", + "TasksForeachDoDo_WaitForInput", + "TasksForeachDoDo_Yield", + "TasksForeachStep", + "TasksGetStep", "TasksIfElseWorkflowStep", "TasksIfElseWorkflowStepElse", + "TasksIfElseWorkflowStepElse_Embed", + "TasksIfElseWorkflowStepElse_Error", + "TasksIfElseWorkflowStepElse_Evaluate", + "TasksIfElseWorkflowStepElse_Get", + "TasksIfElseWorkflowStepElse_Log", + "TasksIfElseWorkflowStepElse_Prompt", + "TasksIfElseWorkflowStepElse_Return", + "TasksIfElseWorkflowStepElse_Search", + "TasksIfElseWorkflowStepElse_Set", + "TasksIfElseWorkflowStepElse_Sleep", + "TasksIfElseWorkflowStepElse_ToolCall", + "TasksIfElseWorkflowStepElse_WaitForInput", + "TasksIfElseWorkflowStepElse_Yield", "TasksIfElseWorkflowStepThen", + "TasksIfElseWorkflowStepThen_Embed", + "TasksIfElseWorkflowStepThen_Error", + "TasksIfElseWorkflowStepThen_Evaluate", + "TasksIfElseWorkflowStepThen_Get", + "TasksIfElseWorkflowStepThen_Log", + "TasksIfElseWorkflowStepThen_Prompt", + "TasksIfElseWorkflowStepThen_Return", + "TasksIfElseWorkflowStepThen_Search", + "TasksIfElseWorkflowStepThen_Set", + "TasksIfElseWorkflowStepThen_Sleep", + "TasksIfElseWorkflowStepThen_ToolCall", + "TasksIfElseWorkflowStepThen_WaitForInput", + "TasksIfElseWorkflowStepThen_Yield", + "TasksLogStep", + "TasksMapOver", + "TasksMapReduceStep", + "TasksParallelStep", + "TasksParallelStepParallelItem", + "TasksParallelStepParallelItem_Embed", + "TasksParallelStepParallelItem_Error", + "TasksParallelStepParallelItem_Evaluate", + "TasksParallelStepParallelItem_Get", + "TasksParallelStepParallelItem_Log", + "TasksParallelStepParallelItem_Prompt", + "TasksParallelStepParallelItem_Return", + "TasksParallelStepParallelItem_Search", + "TasksParallelStepParallelItem_Set", + "TasksParallelStepParallelItem_Sleep", + "TasksParallelStepParallelItem_ToolCall", + "TasksParallelStepParallelItem_WaitForInput", + "TasksParallelStepParallelItem_Yield", "TasksPatchTaskRequestMainItem", + "TasksPatchTaskRequestMainItem_Embed", "TasksPatchTaskRequestMainItem_Error", "TasksPatchTaskRequestMainItem_Evaluate", + "TasksPatchTaskRequestMainItem_Foreach", + "TasksPatchTaskRequestMainItem_Get", "TasksPatchTaskRequestMainItem_IfElse", + "TasksPatchTaskRequestMainItem_Log", + "TasksPatchTaskRequestMainItem_MapReduce", + "TasksPatchTaskRequestMainItem_Parallel", "TasksPatchTaskRequestMainItem_Prompt", + "TasksPatchTaskRequestMainItem_Return", + "TasksPatchTaskRequestMainItem_Search", + "TasksPatchTaskRequestMainItem_Set", + "TasksPatchTaskRequestMainItem_Sleep", + "TasksPatchTaskRequestMainItem_Switch", "TasksPatchTaskRequestMainItem_ToolCall", "TasksPatchTaskRequestMainItem_WaitForInput", "TasksPatchTaskRequestMainItem_Yield", "TasksPromptStep", "TasksPromptStepPrompt", - "TasksPromptStepSettings", - "TasksPromptStepSettingsAgent", - "TasksPromptStepSettingsFrequencyPenalty", - "TasksPromptStepSettingsPreset", + "TasksReturnStep", "TasksRouteListRequestDirection", "TasksRouteListRequestSortBy", "TasksRouteListResponse", + "TasksSearchStep", + "TasksSearchStepSearch", + "TasksSetKey", + "TasksSetStep", + "TasksSetStepSet", + "TasksSleepFor", + "TasksSleepStep", + "TasksSwitchStep", "TasksTask", "TasksTaskMainItem", + "TasksTaskMainItem_Embed", "TasksTaskMainItem_Error", "TasksTaskMainItem_Evaluate", + "TasksTaskMainItem_Foreach", + "TasksTaskMainItem_Get", "TasksTaskMainItem_IfElse", + "TasksTaskMainItem_Log", + "TasksTaskMainItem_MapReduce", + "TasksTaskMainItem_Parallel", "TasksTaskMainItem_Prompt", + "TasksTaskMainItem_Return", + "TasksTaskMainItem_Search", + "TasksTaskMainItem_Set", + "TasksTaskMainItem_Sleep", + "TasksTaskMainItem_Switch", "TasksTaskMainItem_ToolCall", "TasksTaskMainItem_WaitForInput", "TasksTaskMainItem_Yield", "TasksTaskTool", "TasksToolCallStep", "TasksUpdateTaskRequestMainItem", + "TasksUpdateTaskRequestMainItem_Embed", "TasksUpdateTaskRequestMainItem_Error", "TasksUpdateTaskRequestMainItem_Evaluate", + "TasksUpdateTaskRequestMainItem_Foreach", + "TasksUpdateTaskRequestMainItem_Get", "TasksUpdateTaskRequestMainItem_IfElse", + "TasksUpdateTaskRequestMainItem_Log", + "TasksUpdateTaskRequestMainItem_MapReduce", + "TasksUpdateTaskRequestMainItem_Parallel", "TasksUpdateTaskRequestMainItem_Prompt", + "TasksUpdateTaskRequestMainItem_Return", + "TasksUpdateTaskRequestMainItem_Search", + "TasksUpdateTaskRequestMainItem_Set", + "TasksUpdateTaskRequestMainItem_Sleep", + "TasksUpdateTaskRequestMainItem_Switch", "TasksUpdateTaskRequestMainItem_ToolCall", "TasksUpdateTaskRequestMainItem_WaitForInput", "TasksUpdateTaskRequestMainItem_Yield", "TasksWaitForInputStep", - "TasksWaitForInputStepInfo", "TasksYieldStep", "ToolsChosenFunctionCall", "ToolsChosenToolCall", "ToolsChosenToolCall_Function", + "ToolsCreateToolRequest", "ToolsFunctionCallOption", "ToolsFunctionDef", - "ToolsFunctionDefUpdate", "ToolsFunctionTool", "ToolsNamedFunctionChoice", "ToolsNamedToolChoice", @@ -434,12 +688,10 @@ "UserDocsRouteListRequestSortBy", "UserDocsRouteListResponse", "UserDocsSearchRouteSearchRequestBody", - "UserDocsSearchRouteSearchRequestDirection", - "UserDocsSearchRouteSearchRequestSortBy", - "UserDocsSearchRouteSearchResponse", + "UsersCreateOrUpdateUserRequest", + "UsersCreateUserRequest", "UsersRouteListRequestDirection", "UsersRouteListRequestSortBy", "UsersRouteListResponse", - "UsersUpdateUserRequest", "UsersUser", ] diff --git a/sdks/python/julep/api/client.py b/sdks/python/julep/api/client.py index edd79bc0c..c71d2534e 100644 --- a/sdks/python/julep/api/client.py +++ b/sdks/python/julep/api/client.py @@ -24,53 +24,41 @@ ) from .types.agent_tools_route_list_response import AgentToolsRouteListResponse from .types.agents_agent import AgentsAgent -from .types.agents_create_agent_request_default_settings import ( - AgentsCreateAgentRequestDefaultSettings, -) from .types.agents_create_agent_request_instructions import ( AgentsCreateAgentRequestInstructions, ) from .types.agents_docs_search_route_search_request_body import ( AgentsDocsSearchRouteSearchRequestBody, ) -from .types.agents_docs_search_route_search_request_direction import ( - AgentsDocsSearchRouteSearchRequestDirection, -) -from .types.agents_docs_search_route_search_request_sort_by import ( - AgentsDocsSearchRouteSearchRequestSortBy, -) -from .types.agents_docs_search_route_search_response import ( - AgentsDocsSearchRouteSearchResponse, -) -from .types.agents_patch_agent_request_default_settings import ( - AgentsPatchAgentRequestDefaultSettings, -) from .types.agents_patch_agent_request_instructions import ( AgentsPatchAgentRequestInstructions, ) from .types.agents_route_list_request_direction import AgentsRouteListRequestDirection from .types.agents_route_list_request_sort_by import AgentsRouteListRequestSortBy from .types.agents_route_list_response import AgentsRouteListResponse -from .types.agents_update_agent_request_default_settings import ( - AgentsUpdateAgentRequestDefaultSettings, -) from .types.agents_update_agent_request_instructions import ( AgentsUpdateAgentRequestInstructions, ) -from .types.chat_route_generate_request import ChatRouteGenerateRequest +from .types.chat_chat_input_data_tool_choice import ChatChatInputDataToolChoice +from .types.chat_completion_response_format import ChatCompletionResponseFormat +from .types.chat_default_chat_settings import ChatDefaultChatSettings from .types.chat_route_generate_response import ChatRouteGenerateResponse from .types.common_identifier_safe_unicode import CommonIdentifierSafeUnicode from .types.common_limit import CommonLimit +from .types.common_logit_bias import CommonLogitBias from .types.common_offset import CommonOffset from .types.common_resource_created_response import CommonResourceCreatedResponse from .types.common_resource_deleted_response import CommonResourceDeletedResponse from .types.common_resource_updated_response import CommonResourceUpdatedResponse from .types.common_uuid import CommonUuid from .types.common_valid_python_identifier import CommonValidPythonIdentifier +from .types.docs_create_doc_request_content import DocsCreateDocRequestContent from .types.docs_doc import DocsDoc +from .types.docs_doc_search_response import DocsDocSearchResponse from .types.docs_embed_query_request import DocsEmbedQueryRequest from .types.docs_embed_query_response import DocsEmbedQueryResponse from .types.entries_history import EntriesHistory +from .types.entries_input_chat_ml_message import EntriesInputChatMlMessage from .types.execution_transitions_route_list_request_direction import ( ExecutionTransitionsRouteListRequestDirection, ) @@ -105,7 +93,7 @@ from .types.tasks_task_tool import TasksTaskTool from .types.tasks_update_task_request_main_item import TasksUpdateTaskRequestMainItem from .types.tools_function_def import ToolsFunctionDef -from .types.tools_function_def_update import ToolsFunctionDefUpdate +from .types.tools_function_tool import ToolsFunctionTool from .types.tools_tool_type import ToolsToolType from .types.user_docs_route_list_request_direction import ( UserDocsRouteListRequestDirection, @@ -115,15 +103,6 @@ from .types.user_docs_search_route_search_request_body import ( UserDocsSearchRouteSearchRequestBody, ) -from .types.user_docs_search_route_search_request_direction import ( - UserDocsSearchRouteSearchRequestDirection, -) -from .types.user_docs_search_route_search_request_sort_by import ( - UserDocsSearchRouteSearchRequestSortBy, -) -from .types.user_docs_search_route_search_response import ( - UserDocsSearchRouteSearchResponse, -) from .types.users_route_list_request_direction import UsersRouteListRequestDirection from .types.users_route_list_request_sort_by import UsersRouteListRequestSortBy from .types.users_route_list_response import UsersRouteListResponse @@ -286,9 +265,7 @@ def agents_route_create( model: str, instructions: AgentsCreateAgentRequestInstructions, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - default_settings: typing.Optional[ - AgentsCreateAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceCreatedResponse: """ @@ -310,7 +287,7 @@ def agents_route_create( metadata : typing.Optional[typing.Dict[str, typing.Any]] - default_settings : typing.Optional[AgentsCreateAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -411,9 +388,7 @@ def agents_route_create_or_update( model: str, instructions: AgentsUpdateAgentRequestInstructions, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - default_settings: typing.Optional[ - AgentsUpdateAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceUpdatedResponse: """ @@ -437,7 +412,7 @@ def agents_route_create_or_update( metadata : typing.Optional[typing.Dict[str, typing.Any]] - default_settings : typing.Optional[AgentsUpdateAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -495,9 +470,7 @@ def agents_route_update( model: str, instructions: AgentsUpdateAgentRequestInstructions, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - default_settings: typing.Optional[ - AgentsUpdateAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceUpdatedResponse: """ @@ -522,7 +495,7 @@ def agents_route_update( metadata : typing.Optional[typing.Dict[str, typing.Any]] - default_settings : typing.Optional[AgentsUpdateAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -624,9 +597,7 @@ def agents_route_patch( about: typing.Optional[str] = OMIT, model: typing.Optional[str] = OMIT, instructions: typing.Optional[AgentsPatchAgentRequestInstructions] = OMIT, - default_settings: typing.Optional[ - AgentsPatchAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceUpdatedResponse: """ @@ -651,7 +622,7 @@ def agents_route_patch( instructions : typing.Optional[AgentsPatchAgentRequestInstructions] Instructions for the agent - default_settings : typing.Optional[AgentsPatchAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -775,40 +746,134 @@ def agent_docs_route_list( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def agents_docs_search_route_search( + def agent_docs_route_create( self, id: CommonUuid, *, - limit: CommonLimit, - offset: CommonOffset, - sort_by: AgentsDocsSearchRouteSearchRequestSortBy, - direction: AgentsDocsSearchRouteSearchRequestDirection, - metadata_filter: str, - body: AgentsDocsSearchRouteSearchRequestBody, + title: CommonIdentifierSafeUnicode, + content: DocsCreateDocRequestContent, + metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> AgentsDocsSearchRouteSearchResponse: + ) -> CommonResourceCreatedResponse: """ - Search Docs owned by an Agent + Create a Doc for this Agent Parameters ---------- id : CommonUuid - ID of the parent + ID of parent resource - limit : CommonLimit - Limit the number of items returned + title : CommonIdentifierSafeUnicode + Title describing what this document contains - offset : CommonOffset - Offset the items returned + content : DocsCreateDocRequestContent + Contents of the document - sort_by : AgentsDocsSearchRouteSearchRequestSortBy - Sort by a field + metadata : typing.Optional[typing.Dict[str, typing.Any]] - direction : AgentsDocsSearchRouteSearchRequestDirection - Sort direction + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - metadata_filter : str - JSON string of object that should be used to filter objects by metadata + Returns + ------- + CommonResourceCreatedResponse + The request has succeeded and a new resource has been created as a result. + + Examples + -------- + from julep.client import JulepApi + + client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + client.agent_docs_route_create( + id="id", + title="title", + content="content", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"agents/{jsonable_encoder(id)}/docs", + method="POST", + json={"metadata": metadata, "title": title, "content": content}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceCreatedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def agent_docs_route_delete( + self, + id: CommonUuid, + child_id: CommonUuid, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> CommonResourceDeletedResponse: + """ + Delete a Doc for this Agent + + Parameters + ---------- + id : CommonUuid + ID of parent resource + + child_id : CommonUuid + ID of the resource to be deleted + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + CommonResourceDeletedResponse + The request has been accepted for processing, but processing has not yet completed. + + Examples + -------- + from julep.client import JulepApi + + client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + client.agent_docs_route_delete( + id="id", + child_id="child_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"agents/{jsonable_encoder(id)}/docs/{jsonable_encoder(child_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceDeletedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def agents_docs_search_route_search( + self, + id: CommonUuid, + *, + body: AgentsDocsSearchRouteSearchRequestBody, + request_options: typing.Optional[RequestOptions] = None, + ) -> DocsDocSearchResponse: + """ + Search Docs owned by an Agent + + Parameters + ---------- + id : CommonUuid + ID of the parent body : AgentsDocsSearchRouteSearchRequestBody @@ -817,7 +882,7 @@ def agents_docs_search_route_search( Returns ------- - AgentsDocsSearchRouteSearchResponse + DocsDocSearchResponse The request has succeeded. Examples @@ -831,15 +896,9 @@ def agents_docs_search_route_search( ) client.agents_docs_search_route_search( id="id", - limit=1, - offset=1, - sort_by="created_at", - direction="asc", - metadata_filter="metadata_filter", body=DocsVectorDocSearchRequest( + limit=1, confidence=1.1, - alpha=1.1, - mmr=True, vector=[1.1], ), ) @@ -847,20 +906,13 @@ def agents_docs_search_route_search( _response = self._client_wrapper.httpx_client.request( f"agents/{jsonable_encoder(id)}/search", method="POST", - params={ - "limit": limit, - "offset": offset, - "sort_by": sort_by, - "direction": direction, - "metadata_filter": metadata_filter, - }, json={"body": body}, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AgentsDocsSearchRouteSearchResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(DocsDocSearchResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -990,7 +1042,7 @@ def tasks_route_create( Returns ------- CommonResourceCreatedResponse - The request has succeeded and a new resource has been created as a result. + The request has succeeded. Examples -------- @@ -1354,9 +1406,7 @@ def agent_tools_route_create( model: str, instructions: AgentsCreateAgentRequestInstructions, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - default_settings: typing.Optional[ - AgentsCreateAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceCreatedResponse: """ @@ -1381,7 +1431,7 @@ def agent_tools_route_create( metadata : typing.Optional[typing.Dict[str, typing.Any]] - default_settings : typing.Optional[AgentsCreateAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -1572,7 +1622,7 @@ def agent_tools_route_patch( *, type: typing.Optional[ToolsToolType] = OMIT, name: typing.Optional[CommonValidPythonIdentifier] = OMIT, - function: typing.Optional[ToolsFunctionDefUpdate] = OMIT, + function: typing.Optional[ToolsFunctionDef] = OMIT, integration: typing.Optional[typing.Any] = OMIT, system: typing.Optional[typing.Any] = OMIT, api_call: typing.Optional[typing.Any] = OMIT, @@ -1595,7 +1645,7 @@ def agent_tools_route_patch( name : typing.Optional[CommonValidPythonIdentifier] Name of the tool (must be unique for this agent and a valid python identifier string ) - function : typing.Optional[ToolsFunctionDefUpdate] + function : typing.Optional[ToolsFunctionDef] integration : typing.Optional[typing.Any] @@ -1659,14 +1709,14 @@ def tasks_create_or_update_route_create_or_update( input_schema: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> CommonResourceUpdatedResponse: + ) -> CommonResourceCreatedResponse: """ Create or update a task Parameters ---------- parent_id : CommonUuid - ID of parent resource + ID of the agent id : CommonUuid @@ -1693,8 +1743,8 @@ def tasks_create_or_update_route_create_or_update( Returns ------- - CommonResourceUpdatedResponse - The request has succeeded. + CommonResourceCreatedResponse + The request has succeeded and a new resource has been created as a result. Examples -------- @@ -1737,7 +1787,7 @@ def tasks_create_or_update_route_create_or_update( ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(CommonResourceCreatedResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1787,96 +1837,106 @@ def individual_docs_route_get( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def individual_docs_route_delete( - self, id: CommonUuid, *, request_options: typing.Optional[RequestOptions] = None - ) -> CommonResourceDeletedResponse: + def embed_route_embed( + self, + *, + body: DocsEmbedQueryRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> DocsEmbedQueryResponse: """ - Delete an existing Doc by id + Embed a query for search Parameters ---------- - id : CommonUuid - ID of the resource + body : DocsEmbedQueryRequest request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - CommonResourceDeletedResponse - The request has been accepted for processing, but processing has not yet completed. + DocsEmbedQueryResponse + The request has succeeded. Examples -------- + from julep import DocsEmbedQueryRequest from julep.client import JulepApi client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) - client.individual_docs_route_delete( - id="id", + client.embed_route_embed( + body=DocsEmbedQueryRequest( + text="text", + ), ) """ _response = self._client_wrapper.httpx_client.request( - f"docs/{jsonable_encoder(id)}", - method="DELETE", + "embed", + method="POST", + json={"body": body}, request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonResourceDeletedResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(DocsEmbedQueryResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def embed_route_embed( + def executions_route_resume_with_task_token( self, *, - body: DocsEmbedQueryRequest, + task_token: str, + input: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> DocsEmbedQueryResponse: + ) -> CommonResourceUpdatedResponse: """ - Embed a query for search + Resume an execution with a task token Parameters ---------- - body : DocsEmbedQueryRequest + task_token : str + A Task Token is a unique identifier for a specific Task Execution. + + input : typing.Optional[typing.Dict[str, typing.Any]] + The input to resume the execution with request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - DocsEmbedQueryResponse + CommonResourceUpdatedResponse The request has succeeded. Examples -------- - from julep import DocsEmbedQueryRequest from julep.client import JulepApi client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) - client.embed_route_embed( - body=DocsEmbedQueryRequest( - text="text", - ), + client.executions_route_resume_with_task_token( + task_token="task_token", ) """ _response = self._client_wrapper.httpx_client.request( - "embed", + "executions", method="POST", - json={"body": body}, + params={"task_token": task_token}, + json={"input": input, "status": "running"}, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DocsEmbedQueryResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -1926,77 +1986,133 @@ def executions_route_get( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def execution_transitions_route_list( + def executions_route_update( self, id: CommonUuid, *, - limit: CommonLimit, - offset: CommonOffset, - sort_by: ExecutionTransitionsRouteListRequestSortBy, - direction: ExecutionTransitionsRouteListRequestDirection, - metadata_filter: str, + request: ExecutionsUpdateExecutionRequest, request_options: typing.Optional[RequestOptions] = None, - ) -> ExecutionTransitionsRouteListResponse: + ) -> CommonResourceUpdatedResponse: """ - List the Transitions of an Execution by id + Update an existing Execution Parameters ---------- id : CommonUuid - ID of parent - - limit : CommonLimit - Limit the number of items returned - - offset : CommonOffset - Offset the items returned - - sort_by : ExecutionTransitionsRouteListRequestSortBy - Sort by a field - - direction : ExecutionTransitionsRouteListRequestDirection - Sort direction + ID of the resource - metadata_filter : str - JSON string of object that should be used to filter objects by metadata + request : ExecutionsUpdateExecutionRequest request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - ExecutionTransitionsRouteListResponse + CommonResourceUpdatedResponse The request has succeeded. Examples -------- + from julep import ExecutionsUpdateExecutionRequest_Cancelled from julep.client import JulepApi client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) - client.execution_transitions_route_list( - id="id", - limit=1, - offset=1, - sort_by="created_at", - direction="asc", - metadata_filter="metadata_filter", + client.executions_route_update( + id="string", + request=ExecutionsUpdateExecutionRequest_Cancelled( + reason="string", + ), ) """ _response = self._client_wrapper.httpx_client.request( - f"executions/{jsonable_encoder(id)}/transitions", - method="GET", - params={ - "limit": limit, - "offset": offset, - "sort_by": sort_by, - "direction": direction, - "metadata_filter": metadata_filter, - }, - request_options=request_options, - ) + f"executions/{jsonable_encoder(id)}", + method="PUT", + json=request, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def execution_transitions_route_list( + self, + id: CommonUuid, + *, + limit: CommonLimit, + offset: CommonOffset, + sort_by: ExecutionTransitionsRouteListRequestSortBy, + direction: ExecutionTransitionsRouteListRequestDirection, + metadata_filter: str, + request_options: typing.Optional[RequestOptions] = None, + ) -> ExecutionTransitionsRouteListResponse: + """ + List the Transitions of an Execution by id + + Parameters + ---------- + id : CommonUuid + ID of parent + + limit : CommonLimit + Limit the number of items returned + + offset : CommonOffset + Offset the items returned + + sort_by : ExecutionTransitionsRouteListRequestSortBy + Sort by a field + + direction : ExecutionTransitionsRouteListRequestDirection + Sort direction + + metadata_filter : str + JSON string of object that should be used to filter objects by metadata + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ExecutionTransitionsRouteListResponse + The request has succeeded. + + Examples + -------- + from julep.client import JulepApi + + client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + client.execution_transitions_route_list( + id="id", + limit=1, + offset=1, + sort_by="created_at", + direction="asc", + metadata_filter="metadata_filter", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"executions/{jsonable_encoder(id)}/transitions", + method="GET", + params={ + "limit": limit, + "offset": offset, + "sort_by": sort_by, + "direction": direction, + "metadata_filter": metadata_filter, + }, + request_options=request_options, + ) try: if 200 <= _response.status_code < 300: return pydantic_v1.parse_obj_as(ExecutionTransitionsRouteListResponse, _response.json()) # type: ignore @@ -2547,7 +2663,27 @@ def chat_route_generate( self, id: CommonUuid, *, - request: ChatRouteGenerateRequest, + remember: bool, + recall: bool, + save: bool, + stream: bool, + messages: typing.Sequence[EntriesInputChatMlMessage], + model: typing.Optional[CommonIdentifierSafeUnicode] = OMIT, + stop: typing.Optional[typing.Sequence[str]] = OMIT, + seed: typing.Optional[int] = OMIT, + max_tokens: typing.Optional[int] = OMIT, + logit_bias: typing.Optional[typing.Dict[str, CommonLogitBias]] = OMIT, + response_format: typing.Optional[ChatCompletionResponseFormat] = OMIT, + agent: typing.Optional[CommonUuid] = OMIT, + repetition_penalty: typing.Optional[float] = OMIT, + length_penalty: typing.Optional[float] = OMIT, + min_p: typing.Optional[float] = OMIT, + frequency_penalty: typing.Optional[float] = OMIT, + presence_penalty: typing.Optional[float] = OMIT, + temperature: typing.Optional[float] = OMIT, + top_p: typing.Optional[float] = OMIT, + tools: typing.Optional[typing.Sequence[ToolsFunctionTool]] = OMIT, + tool_choice: typing.Optional[ChatChatInputDataToolChoice] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ChatRouteGenerateResponse: """ @@ -2558,7 +2694,68 @@ def chat_route_generate( id : CommonUuid The session ID - request : ChatRouteGenerateRequest + remember : bool + DISABLED: Whether this interaction should form new memories or not (will be enabled in a future release) + + recall : bool + Whether previous memories and docs should be recalled or not + + save : bool + Whether this interaction should be stored in the session history or not + + stream : bool + Indicates if the server should stream the response as it's generated + + messages : typing.Sequence[EntriesInputChatMlMessage] + A list of new input messages comprising the conversation so far. + + model : typing.Optional[CommonIdentifierSafeUnicode] + Identifier of the model to be used + + stop : typing.Optional[typing.Sequence[str]] + Up to 4 sequences where the API will stop generating further tokens. + + seed : typing.Optional[int] + If specified, the system will make a best effort to sample deterministically for that particular seed value + + max_tokens : typing.Optional[int] + The maximum number of tokens to generate in the chat completion + + logit_bias : typing.Optional[typing.Dict[str, CommonLogitBias]] + Modify the likelihood of specified tokens appearing in the completion + + response_format : typing.Optional[ChatCompletionResponseFormat] + Response format (set to `json_object` to restrict output to JSON) + + agent : typing.Optional[CommonUuid] + Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) + + repetition_penalty : typing.Optional[float] + Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + + length_penalty : typing.Optional[float] + Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. + + min_p : typing.Optional[float] + Minimum probability compared to leading token to be considered + + frequency_penalty : typing.Optional[float] + Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + + presence_penalty : typing.Optional[float] + Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + + temperature : typing.Optional[float] + What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. + + top_p : typing.Optional[float] + Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. + + tools : typing.Optional[typing.Sequence[ToolsFunctionTool]] + (Advanced) List of tools that are provided in addition to agent's default set of tools. + + tool_choice : typing.Optional[ChatChatInputDataToolChoice] + Can be one of existing tools given to the agent earlier or the ones provided in this request. request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -2570,7 +2767,7 @@ def chat_route_generate( Examples -------- - from julep import ChatRouteGenerateRequestPreset, EntriesInputChatMlMessage + from julep import EntriesInputChatMlMessage from julep.client import JulepApi client = JulepApi( @@ -2579,24 +2776,44 @@ def chat_route_generate( ) client.chat_route_generate( id="id", - request=ChatRouteGenerateRequestPreset( - messages=[ - EntriesInputChatMlMessage( - role="user", - content="content", - ) - ], - recall=True, - remember=True, - save=True, - stream=True, - ), + messages=[ + EntriesInputChatMlMessage( + role="user", + content="content", + ) + ], + remember=True, + recall=True, + save=True, + stream=True, ) """ _response = self._client_wrapper.httpx_client.request( f"sessions/{jsonable_encoder(id)}/chat", method="POST", - json=request, + json={ + "remember": remember, + "recall": recall, + "save": save, + "model": model, + "stream": stream, + "stop": stop, + "seed": seed, + "max_tokens": max_tokens, + "logit_bias": logit_bias, + "response_format": response_format, + "agent": agent, + "repetition_penalty": repetition_penalty, + "length_penalty": length_penalty, + "min_p": min_p, + "frequency_penalty": frequency_penalty, + "presence_penalty": presence_penalty, + "temperature": temperature, + "top_p": top_p, + "messages": messages, + "tools": tools, + "tool_choice": tool_choice, + }, request_options=request_options, omit=OMIT, ) @@ -2609,11 +2826,7 @@ def chat_route_generate( raise ApiError(status_code=_response.status_code, body=_response_json) def history_route_history( - self, - id: CommonUuid, - *, - limit: CommonLimit, - request_options: typing.Optional[RequestOptions] = None, + self, id: CommonUuid, *, request_options: typing.Optional[RequestOptions] = None ) -> EntriesHistory: """ Get history of a Session @@ -2623,9 +2836,6 @@ def history_route_history( id : CommonUuid ID of parent - limit : CommonLimit - Limit the number of items returned - request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -2644,13 +2854,11 @@ def history_route_history( ) client.history_route_history( id="id", - limit=1, ) """ _response = self._client_wrapper.httpx_client.request( f"sessions/{jsonable_encoder(id)}/history", method="GET", - params={"limit": limit}, request_options=request_options, ) try: @@ -2841,125 +3049,6 @@ def task_executions_route_create( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def task_executions_route_resume_with_task_token( - self, - id: CommonUuid, - *, - task_token: str, - input: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> CommonResourceUpdatedResponse: - """ - Resume an execution with a task token - - Parameters - ---------- - id : CommonUuid - ID of parent Task - - task_token : str - A Task Token is a unique identifier for a specific Task Execution. - - input : typing.Optional[typing.Dict[str, typing.Any]] - The input to resume the execution with - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - CommonResourceUpdatedResponse - The request has succeeded. - - Examples - -------- - from julep.client import JulepApi - - client = JulepApi( - auth_key="YOUR_AUTH_KEY", - api_key="YOUR_API_KEY", - ) - client.task_executions_route_resume_with_task_token( - id="id", - task_token="task_token", - ) - """ - _response = self._client_wrapper.httpx_client.request( - f"tasks/{jsonable_encoder(id)}/executions", - method="PUT", - json={"task_token": task_token, "input": input, "status": "running"}, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def task_executions_route_update( - self, - id: CommonUuid, - child_id: CommonUuid, - *, - request: ExecutionsUpdateExecutionRequest, - request_options: typing.Optional[RequestOptions] = None, - ) -> CommonResourceUpdatedResponse: - """ - Update an existing Execution - - Parameters - ---------- - id : CommonUuid - ID of parent resource - - child_id : CommonUuid - ID of the resource to be updated - - request : ExecutionsUpdateExecutionRequest - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - CommonResourceUpdatedResponse - The request has succeeded. - - Examples - -------- - from julep import ExecutionsUpdateExecutionRequest_Cancelled - from julep.client import JulepApi - - client = JulepApi( - auth_key="YOUR_AUTH_KEY", - api_key="YOUR_API_KEY", - ) - client.task_executions_route_update( - id="string", - child_id="string", - request=ExecutionsUpdateExecutionRequest_Cancelled( - reason="string", - ), - ) - """ - _response = self._client_wrapper.httpx_client.request( - f"tasks/{jsonable_encoder(id)}/executions/{jsonable_encoder(child_id)}", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - def users_route_list( self, *, @@ -3441,40 +3530,134 @@ def user_docs_route_list( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def user_docs_search_route_search( + def user_docs_route_create( self, id: CommonUuid, *, - limit: CommonLimit, - offset: CommonOffset, - sort_by: UserDocsSearchRouteSearchRequestSortBy, - direction: UserDocsSearchRouteSearchRequestDirection, - metadata_filter: str, - body: UserDocsSearchRouteSearchRequestBody, + title: CommonIdentifierSafeUnicode, + content: DocsCreateDocRequestContent, + metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> UserDocsSearchRouteSearchResponse: + ) -> CommonResourceCreatedResponse: """ - Search Docs owned by a User + Create a Doc for this User Parameters ---------- id : CommonUuid - ID of the parent + ID of parent resource - limit : CommonLimit - Limit the number of items returned + title : CommonIdentifierSafeUnicode + Title describing what this document contains - offset : CommonOffset - Offset the items returned + content : DocsCreateDocRequestContent + Contents of the document - sort_by : UserDocsSearchRouteSearchRequestSortBy - Sort by a field + metadata : typing.Optional[typing.Dict[str, typing.Any]] - direction : UserDocsSearchRouteSearchRequestDirection - Sort direction + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - metadata_filter : str - JSON string of object that should be used to filter objects by metadata + Returns + ------- + CommonResourceCreatedResponse + The request has succeeded and a new resource has been created as a result. + + Examples + -------- + from julep.client import JulepApi + + client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + client.user_docs_route_create( + id="id", + title="title", + content="content", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"users/{jsonable_encoder(id)}/docs", + method="POST", + json={"metadata": metadata, "title": title, "content": content}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceCreatedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def user_docs_route_delete( + self, + id: CommonUuid, + child_id: CommonUuid, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> CommonResourceDeletedResponse: + """ + Delete a Doc for this User + + Parameters + ---------- + id : CommonUuid + ID of parent resource + + child_id : CommonUuid + ID of the resource to be deleted + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + CommonResourceDeletedResponse + The request has been accepted for processing, but processing has not yet completed. + + Examples + -------- + from julep.client import JulepApi + + client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + client.user_docs_route_delete( + id="id", + child_id="child_id", + ) + """ + _response = self._client_wrapper.httpx_client.request( + f"users/{jsonable_encoder(id)}/docs/{jsonable_encoder(child_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceDeletedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def user_docs_search_route_search( + self, + id: CommonUuid, + *, + body: UserDocsSearchRouteSearchRequestBody, + request_options: typing.Optional[RequestOptions] = None, + ) -> DocsDocSearchResponse: + """ + Search Docs owned by a User + + Parameters + ---------- + id : CommonUuid + ID of the parent body : UserDocsSearchRouteSearchRequestBody @@ -3483,7 +3666,7 @@ def user_docs_search_route_search( Returns ------- - UserDocsSearchRouteSearchResponse + DocsDocSearchResponse The request has succeeded. Examples @@ -3497,15 +3680,9 @@ def user_docs_search_route_search( ) client.user_docs_search_route_search( id="id", - limit=1, - offset=1, - sort_by="created_at", - direction="asc", - metadata_filter="metadata_filter", body=DocsVectorDocSearchRequest( + limit=1, confidence=1.1, - alpha=1.1, - mmr=True, vector=[1.1], ), ) @@ -3513,20 +3690,13 @@ def user_docs_search_route_search( _response = self._client_wrapper.httpx_client.request( f"users/{jsonable_encoder(id)}/search", method="POST", - params={ - "limit": limit, - "offset": offset, - "sort_by": sort_by, - "direction": direction, - "metadata_filter": metadata_filter, - }, json={"body": body}, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UserDocsSearchRouteSearchResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(DocsDocSearchResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -3694,9 +3864,7 @@ async def agents_route_create( model: str, instructions: AgentsCreateAgentRequestInstructions, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - default_settings: typing.Optional[ - AgentsCreateAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceCreatedResponse: """ @@ -3718,7 +3886,7 @@ async def agents_route_create( metadata : typing.Optional[typing.Dict[str, typing.Any]] - default_settings : typing.Optional[AgentsCreateAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -3835,9 +4003,7 @@ async def agents_route_create_or_update( model: str, instructions: AgentsUpdateAgentRequestInstructions, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - default_settings: typing.Optional[ - AgentsUpdateAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceUpdatedResponse: """ @@ -3861,7 +4027,7 @@ async def agents_route_create_or_update( metadata : typing.Optional[typing.Dict[str, typing.Any]] - default_settings : typing.Optional[AgentsUpdateAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -3927,9 +4093,7 @@ async def agents_route_update( model: str, instructions: AgentsUpdateAgentRequestInstructions, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - default_settings: typing.Optional[ - AgentsUpdateAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceUpdatedResponse: """ @@ -3954,7 +4118,7 @@ async def agents_route_update( metadata : typing.Optional[typing.Dict[str, typing.Any]] - default_settings : typing.Optional[AgentsUpdateAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -4072,9 +4236,7 @@ async def agents_route_patch( about: typing.Optional[str] = OMIT, model: typing.Optional[str] = OMIT, instructions: typing.Optional[AgentsPatchAgentRequestInstructions] = OMIT, - default_settings: typing.Optional[ - AgentsPatchAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceUpdatedResponse: """ @@ -4099,7 +4261,7 @@ async def agents_route_patch( instructions : typing.Optional[AgentsPatchAgentRequestInstructions] Instructions for the agent - default_settings : typing.Optional[AgentsPatchAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -4239,40 +4401,150 @@ async def main() -> None: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def agents_docs_search_route_search( + async def agent_docs_route_create( self, id: CommonUuid, *, - limit: CommonLimit, - offset: CommonOffset, - sort_by: AgentsDocsSearchRouteSearchRequestSortBy, - direction: AgentsDocsSearchRouteSearchRequestDirection, - metadata_filter: str, - body: AgentsDocsSearchRouteSearchRequestBody, + title: CommonIdentifierSafeUnicode, + content: DocsCreateDocRequestContent, + metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> AgentsDocsSearchRouteSearchResponse: + ) -> CommonResourceCreatedResponse: """ - Search Docs owned by an Agent + Create a Doc for this Agent Parameters ---------- id : CommonUuid - ID of the parent + ID of parent resource - limit : CommonLimit - Limit the number of items returned + title : CommonIdentifierSafeUnicode + Title describing what this document contains - offset : CommonOffset - Offset the items returned + content : DocsCreateDocRequestContent + Contents of the document - sort_by : AgentsDocsSearchRouteSearchRequestSortBy - Sort by a field + metadata : typing.Optional[typing.Dict[str, typing.Any]] - direction : AgentsDocsSearchRouteSearchRequestDirection - Sort direction + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - metadata_filter : str - JSON string of object that should be used to filter objects by metadata + Returns + ------- + CommonResourceCreatedResponse + The request has succeeded and a new resource has been created as a result. + + Examples + -------- + import asyncio + + from julep.client import AsyncJulepApi + + client = AsyncJulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.agent_docs_route_create( + id="id", + title="title", + content="content", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"agents/{jsonable_encoder(id)}/docs", + method="POST", + json={"metadata": metadata, "title": title, "content": content}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceCreatedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def agent_docs_route_delete( + self, + id: CommonUuid, + child_id: CommonUuid, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> CommonResourceDeletedResponse: + """ + Delete a Doc for this Agent + + Parameters + ---------- + id : CommonUuid + ID of parent resource + + child_id : CommonUuid + ID of the resource to be deleted + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + CommonResourceDeletedResponse + The request has been accepted for processing, but processing has not yet completed. + + Examples + -------- + import asyncio + + from julep.client import AsyncJulepApi + + client = AsyncJulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.agent_docs_route_delete( + id="id", + child_id="child_id", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"agents/{jsonable_encoder(id)}/docs/{jsonable_encoder(child_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceDeletedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def agents_docs_search_route_search( + self, + id: CommonUuid, + *, + body: AgentsDocsSearchRouteSearchRequestBody, + request_options: typing.Optional[RequestOptions] = None, + ) -> DocsDocSearchResponse: + """ + Search Docs owned by an Agent + + Parameters + ---------- + id : CommonUuid + ID of the parent body : AgentsDocsSearchRouteSearchRequestBody @@ -4281,7 +4553,7 @@ async def agents_docs_search_route_search( Returns ------- - AgentsDocsSearchRouteSearchResponse + DocsDocSearchResponse The request has succeeded. Examples @@ -4300,15 +4572,9 @@ async def agents_docs_search_route_search( async def main() -> None: await client.agents_docs_search_route_search( id="id", - limit=1, - offset=1, - sort_by="created_at", - direction="asc", - metadata_filter="metadata_filter", body=DocsVectorDocSearchRequest( + limit=1, confidence=1.1, - alpha=1.1, - mmr=True, vector=[1.1], ), ) @@ -4319,20 +4585,13 @@ async def main() -> None: _response = await self._client_wrapper.httpx_client.request( f"agents/{jsonable_encoder(id)}/search", method="POST", - params={ - "limit": limit, - "offset": offset, - "sort_by": sort_by, - "direction": direction, - "metadata_filter": metadata_filter, - }, json={"body": body}, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(AgentsDocsSearchRouteSearchResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(DocsDocSearchResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -4470,7 +4729,7 @@ async def tasks_route_create( Returns ------- CommonResourceCreatedResponse - The request has succeeded and a new resource has been created as a result. + The request has succeeded. Examples -------- @@ -4874,9 +5133,7 @@ async def agent_tools_route_create( model: str, instructions: AgentsCreateAgentRequestInstructions, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - default_settings: typing.Optional[ - AgentsCreateAgentRequestDefaultSettings - ] = OMIT, + default_settings: typing.Optional[ChatDefaultChatSettings] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> CommonResourceCreatedResponse: """ @@ -4901,7 +5158,7 @@ async def agent_tools_route_create( metadata : typing.Optional[typing.Dict[str, typing.Any]] - default_settings : typing.Optional[AgentsCreateAgentRequestDefaultSettings] + default_settings : typing.Optional[ChatDefaultChatSettings] Default settings for all sessions created by this agent request_options : typing.Optional[RequestOptions] @@ -5116,7 +5373,7 @@ async def agent_tools_route_patch( *, type: typing.Optional[ToolsToolType] = OMIT, name: typing.Optional[CommonValidPythonIdentifier] = OMIT, - function: typing.Optional[ToolsFunctionDefUpdate] = OMIT, + function: typing.Optional[ToolsFunctionDef] = OMIT, integration: typing.Optional[typing.Any] = OMIT, system: typing.Optional[typing.Any] = OMIT, api_call: typing.Optional[typing.Any] = OMIT, @@ -5139,7 +5396,7 @@ async def agent_tools_route_patch( name : typing.Optional[CommonValidPythonIdentifier] Name of the tool (must be unique for this agent and a valid python identifier string ) - function : typing.Optional[ToolsFunctionDefUpdate] + function : typing.Optional[ToolsFunctionDef] integration : typing.Optional[typing.Any] @@ -5211,14 +5468,14 @@ async def tasks_create_or_update_route_create_or_update( input_schema: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> CommonResourceUpdatedResponse: + ) -> CommonResourceCreatedResponse: """ Create or update a task Parameters ---------- parent_id : CommonUuid - ID of parent resource + ID of the agent id : CommonUuid @@ -5245,8 +5502,8 @@ async def tasks_create_or_update_route_create_or_update( Returns ------- - CommonResourceUpdatedResponse - The request has succeeded. + CommonResourceCreatedResponse + The request has succeeded and a new resource has been created as a result. Examples -------- @@ -5297,7 +5554,7 @@ async def main() -> None: ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(CommonResourceCreatedResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -5355,29 +5612,32 @@ async def main() -> None: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def individual_docs_route_delete( - self, id: CommonUuid, *, request_options: typing.Optional[RequestOptions] = None - ) -> CommonResourceDeletedResponse: + async def embed_route_embed( + self, + *, + body: DocsEmbedQueryRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> DocsEmbedQueryResponse: """ - Delete an existing Doc by id + Embed a query for search Parameters ---------- - id : CommonUuid - ID of the resource + body : DocsEmbedQueryRequest request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - CommonResourceDeletedResponse - The request has been accepted for processing, but processing has not yet completed. + DocsEmbedQueryResponse + The request has succeeded. Examples -------- import asyncio + from julep import DocsEmbedQueryRequest from julep.client import AsyncJulepApi client = AsyncJulepApi( @@ -5387,52 +5647,60 @@ async def individual_docs_route_delete( async def main() -> None: - await client.individual_docs_route_delete( - id="id", + await client.embed_route_embed( + body=DocsEmbedQueryRequest( + text="text", + ), ) asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - f"docs/{jsonable_encoder(id)}", - method="DELETE", + "embed", + method="POST", + json={"body": body}, request_options=request_options, + omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonResourceDeletedResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(DocsEmbedQueryResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def embed_route_embed( + async def executions_route_resume_with_task_token( self, *, - body: DocsEmbedQueryRequest, + task_token: str, + input: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> DocsEmbedQueryResponse: + ) -> CommonResourceUpdatedResponse: """ - Embed a query for search + Resume an execution with a task token Parameters ---------- - body : DocsEmbedQueryRequest + task_token : str + A Task Token is a unique identifier for a specific Task Execution. + + input : typing.Optional[typing.Dict[str, typing.Any]] + The input to resume the execution with request_options : typing.Optional[RequestOptions] Request-specific configuration. Returns ------- - DocsEmbedQueryResponse + CommonResourceUpdatedResponse The request has succeeded. Examples -------- import asyncio - from julep import DocsEmbedQueryRequest from julep.client import AsyncJulepApi client = AsyncJulepApi( @@ -5442,25 +5710,24 @@ async def embed_route_embed( async def main() -> None: - await client.embed_route_embed( - body=DocsEmbedQueryRequest( - text="text", - ), + await client.executions_route_resume_with_task_token( + task_token="task_token", ) asyncio.run(main()) """ _response = await self._client_wrapper.httpx_client.request( - "embed", + "executions", method="POST", - json={"body": body}, + params={"task_token": task_token}, + json={"input": input, "status": "running"}, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(DocsEmbedQueryResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) @@ -5518,6 +5785,70 @@ async def main() -> None: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) + async def executions_route_update( + self, + id: CommonUuid, + *, + request: ExecutionsUpdateExecutionRequest, + request_options: typing.Optional[RequestOptions] = None, + ) -> CommonResourceUpdatedResponse: + """ + Update an existing Execution + + Parameters + ---------- + id : CommonUuid + ID of the resource + + request : ExecutionsUpdateExecutionRequest + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + CommonResourceUpdatedResponse + The request has succeeded. + + Examples + -------- + import asyncio + + from julep import ExecutionsUpdateExecutionRequest_Cancelled + from julep.client import AsyncJulepApi + + client = AsyncJulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.executions_route_update( + id="string", + request=ExecutionsUpdateExecutionRequest_Cancelled( + reason="string", + ), + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"executions/{jsonable_encoder(id)}", + method="PUT", + json=request, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + async def execution_transitions_route_list( self, id: CommonUuid, @@ -6211,7 +6542,27 @@ async def chat_route_generate( self, id: CommonUuid, *, - request: ChatRouteGenerateRequest, + remember: bool, + recall: bool, + save: bool, + stream: bool, + messages: typing.Sequence[EntriesInputChatMlMessage], + model: typing.Optional[CommonIdentifierSafeUnicode] = OMIT, + stop: typing.Optional[typing.Sequence[str]] = OMIT, + seed: typing.Optional[int] = OMIT, + max_tokens: typing.Optional[int] = OMIT, + logit_bias: typing.Optional[typing.Dict[str, CommonLogitBias]] = OMIT, + response_format: typing.Optional[ChatCompletionResponseFormat] = OMIT, + agent: typing.Optional[CommonUuid] = OMIT, + repetition_penalty: typing.Optional[float] = OMIT, + length_penalty: typing.Optional[float] = OMIT, + min_p: typing.Optional[float] = OMIT, + frequency_penalty: typing.Optional[float] = OMIT, + presence_penalty: typing.Optional[float] = OMIT, + temperature: typing.Optional[float] = OMIT, + top_p: typing.Optional[float] = OMIT, + tools: typing.Optional[typing.Sequence[ToolsFunctionTool]] = OMIT, + tool_choice: typing.Optional[ChatChatInputDataToolChoice] = OMIT, request_options: typing.Optional[RequestOptions] = None, ) -> ChatRouteGenerateResponse: """ @@ -6222,7 +6573,68 @@ async def chat_route_generate( id : CommonUuid The session ID - request : ChatRouteGenerateRequest + remember : bool + DISABLED: Whether this interaction should form new memories or not (will be enabled in a future release) + + recall : bool + Whether previous memories and docs should be recalled or not + + save : bool + Whether this interaction should be stored in the session history or not + + stream : bool + Indicates if the server should stream the response as it's generated + + messages : typing.Sequence[EntriesInputChatMlMessage] + A list of new input messages comprising the conversation so far. + + model : typing.Optional[CommonIdentifierSafeUnicode] + Identifier of the model to be used + + stop : typing.Optional[typing.Sequence[str]] + Up to 4 sequences where the API will stop generating further tokens. + + seed : typing.Optional[int] + If specified, the system will make a best effort to sample deterministically for that particular seed value + + max_tokens : typing.Optional[int] + The maximum number of tokens to generate in the chat completion + + logit_bias : typing.Optional[typing.Dict[str, CommonLogitBias]] + Modify the likelihood of specified tokens appearing in the completion + + response_format : typing.Optional[ChatCompletionResponseFormat] + Response format (set to `json_object` to restrict output to JSON) + + agent : typing.Optional[CommonUuid] + Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) + + repetition_penalty : typing.Optional[float] + Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + + length_penalty : typing.Optional[float] + Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. + + min_p : typing.Optional[float] + Minimum probability compared to leading token to be considered + + frequency_penalty : typing.Optional[float] + Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + + presence_penalty : typing.Optional[float] + Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + + temperature : typing.Optional[float] + What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. + + top_p : typing.Optional[float] + Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. + + tools : typing.Optional[typing.Sequence[ToolsFunctionTool]] + (Advanced) List of tools that are provided in addition to agent's default set of tools. + + tool_choice : typing.Optional[ChatChatInputDataToolChoice] + Can be one of existing tools given to the agent earlier or the ones provided in this request. request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -6236,7 +6648,7 @@ async def chat_route_generate( -------- import asyncio - from julep import ChatRouteGenerateRequestPreset, EntriesInputChatMlMessage + from julep import EntriesInputChatMlMessage from julep.client import AsyncJulepApi client = AsyncJulepApi( @@ -6248,18 +6660,16 @@ async def chat_route_generate( async def main() -> None: await client.chat_route_generate( id="id", - request=ChatRouteGenerateRequestPreset( - messages=[ - EntriesInputChatMlMessage( - role="user", - content="content", - ) - ], - recall=True, - remember=True, - save=True, - stream=True, - ), + messages=[ + EntriesInputChatMlMessage( + role="user", + content="content", + ) + ], + remember=True, + recall=True, + save=True, + stream=True, ) @@ -6268,7 +6678,29 @@ async def main() -> None: _response = await self._client_wrapper.httpx_client.request( f"sessions/{jsonable_encoder(id)}/chat", method="POST", - json=request, + json={ + "remember": remember, + "recall": recall, + "save": save, + "model": model, + "stream": stream, + "stop": stop, + "seed": seed, + "max_tokens": max_tokens, + "logit_bias": logit_bias, + "response_format": response_format, + "agent": agent, + "repetition_penalty": repetition_penalty, + "length_penalty": length_penalty, + "min_p": min_p, + "frequency_penalty": frequency_penalty, + "presence_penalty": presence_penalty, + "temperature": temperature, + "top_p": top_p, + "messages": messages, + "tools": tools, + "tool_choice": tool_choice, + }, request_options=request_options, omit=OMIT, ) @@ -6281,11 +6713,7 @@ async def main() -> None: raise ApiError(status_code=_response.status_code, body=_response_json) async def history_route_history( - self, - id: CommonUuid, - *, - limit: CommonLimit, - request_options: typing.Optional[RequestOptions] = None, + self, id: CommonUuid, *, request_options: typing.Optional[RequestOptions] = None ) -> EntriesHistory: """ Get history of a Session @@ -6295,9 +6723,6 @@ async def history_route_history( id : CommonUuid ID of parent - limit : CommonLimit - Limit the number of items returned - request_options : typing.Optional[RequestOptions] Request-specific configuration. @@ -6321,7 +6746,6 @@ async def history_route_history( async def main() -> None: await client.history_route_history( id="id", - limit=1, ) @@ -6330,7 +6754,6 @@ async def main() -> None: _response = await self._client_wrapper.httpx_client.request( f"sessions/{jsonable_encoder(id)}/history", method="GET", - params={"limit": limit}, request_options=request_options, ) try: @@ -6545,141 +6968,6 @@ async def main() -> None: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def task_executions_route_resume_with_task_token( - self, - id: CommonUuid, - *, - task_token: str, - input: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - request_options: typing.Optional[RequestOptions] = None, - ) -> CommonResourceUpdatedResponse: - """ - Resume an execution with a task token - - Parameters - ---------- - id : CommonUuid - ID of parent Task - - task_token : str - A Task Token is a unique identifier for a specific Task Execution. - - input : typing.Optional[typing.Dict[str, typing.Any]] - The input to resume the execution with - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - CommonResourceUpdatedResponse - The request has succeeded. - - Examples - -------- - import asyncio - - from julep.client import AsyncJulepApi - - client = AsyncJulepApi( - auth_key="YOUR_AUTH_KEY", - api_key="YOUR_API_KEY", - ) - - - async def main() -> None: - await client.task_executions_route_resume_with_task_token( - id="id", - task_token="task_token", - ) - - - asyncio.run(main()) - """ - _response = await self._client_wrapper.httpx_client.request( - f"tasks/{jsonable_encoder(id)}/executions", - method="PUT", - json={"task_token": task_token, "input": input, "status": "running"}, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def task_executions_route_update( - self, - id: CommonUuid, - child_id: CommonUuid, - *, - request: ExecutionsUpdateExecutionRequest, - request_options: typing.Optional[RequestOptions] = None, - ) -> CommonResourceUpdatedResponse: - """ - Update an existing Execution - - Parameters - ---------- - id : CommonUuid - ID of parent resource - - child_id : CommonUuid - ID of the resource to be updated - - request : ExecutionsUpdateExecutionRequest - - request_options : typing.Optional[RequestOptions] - Request-specific configuration. - - Returns - ------- - CommonResourceUpdatedResponse - The request has succeeded. - - Examples - -------- - import asyncio - - from julep import ExecutionsUpdateExecutionRequest_Cancelled - from julep.client import AsyncJulepApi - - client = AsyncJulepApi( - auth_key="YOUR_AUTH_KEY", - api_key="YOUR_API_KEY", - ) - - - async def main() -> None: - await client.task_executions_route_update( - id="string", - child_id="string", - request=ExecutionsUpdateExecutionRequest_Cancelled( - reason="string", - ), - ) - - - asyncio.run(main()) - """ - _response = await self._client_wrapper.httpx_client.request( - f"tasks/{jsonable_encoder(id)}/executions/{jsonable_encoder(child_id)}", - method="PUT", - json=request, - request_options=request_options, - omit=OMIT, - ) - try: - if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(CommonResourceUpdatedResponse, _response.json()) # type: ignore - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - async def users_route_list( self, *, @@ -7225,40 +7513,150 @@ async def main() -> None: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def user_docs_search_route_search( + async def user_docs_route_create( self, id: CommonUuid, *, - limit: CommonLimit, - offset: CommonOffset, - sort_by: UserDocsSearchRouteSearchRequestSortBy, - direction: UserDocsSearchRouteSearchRequestDirection, - metadata_filter: str, - body: UserDocsSearchRouteSearchRequestBody, + title: CommonIdentifierSafeUnicode, + content: DocsCreateDocRequestContent, + metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, request_options: typing.Optional[RequestOptions] = None, - ) -> UserDocsSearchRouteSearchResponse: + ) -> CommonResourceCreatedResponse: """ - Search Docs owned by a User + Create a Doc for this User Parameters ---------- id : CommonUuid - ID of the parent + ID of parent resource - limit : CommonLimit - Limit the number of items returned + title : CommonIdentifierSafeUnicode + Title describing what this document contains - offset : CommonOffset - Offset the items returned + content : DocsCreateDocRequestContent + Contents of the document - sort_by : UserDocsSearchRouteSearchRequestSortBy - Sort by a field + metadata : typing.Optional[typing.Dict[str, typing.Any]] - direction : UserDocsSearchRouteSearchRequestDirection - Sort direction + request_options : typing.Optional[RequestOptions] + Request-specific configuration. - metadata_filter : str - JSON string of object that should be used to filter objects by metadata + Returns + ------- + CommonResourceCreatedResponse + The request has succeeded and a new resource has been created as a result. + + Examples + -------- + import asyncio + + from julep.client import AsyncJulepApi + + client = AsyncJulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.user_docs_route_create( + id="id", + title="title", + content="content", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"users/{jsonable_encoder(id)}/docs", + method="POST", + json={"metadata": metadata, "title": title, "content": content}, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceCreatedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def user_docs_route_delete( + self, + id: CommonUuid, + child_id: CommonUuid, + *, + request_options: typing.Optional[RequestOptions] = None, + ) -> CommonResourceDeletedResponse: + """ + Delete a Doc for this User + + Parameters + ---------- + id : CommonUuid + ID of parent resource + + child_id : CommonUuid + ID of the resource to be deleted + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + CommonResourceDeletedResponse + The request has been accepted for processing, but processing has not yet completed. + + Examples + -------- + import asyncio + + from julep.client import AsyncJulepApi + + client = AsyncJulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", + ) + + + async def main() -> None: + await client.user_docs_route_delete( + id="id", + child_id="child_id", + ) + + + asyncio.run(main()) + """ + _response = await self._client_wrapper.httpx_client.request( + f"users/{jsonable_encoder(id)}/docs/{jsonable_encoder(child_id)}", + method="DELETE", + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + return pydantic_v1.parse_obj_as(CommonResourceDeletedResponse, _response.json()) # type: ignore + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def user_docs_search_route_search( + self, + id: CommonUuid, + *, + body: UserDocsSearchRouteSearchRequestBody, + request_options: typing.Optional[RequestOptions] = None, + ) -> DocsDocSearchResponse: + """ + Search Docs owned by a User + + Parameters + ---------- + id : CommonUuid + ID of the parent body : UserDocsSearchRouteSearchRequestBody @@ -7267,7 +7665,7 @@ async def user_docs_search_route_search( Returns ------- - UserDocsSearchRouteSearchResponse + DocsDocSearchResponse The request has succeeded. Examples @@ -7286,15 +7684,9 @@ async def user_docs_search_route_search( async def main() -> None: await client.user_docs_search_route_search( id="id", - limit=1, - offset=1, - sort_by="created_at", - direction="asc", - metadata_filter="metadata_filter", body=DocsVectorDocSearchRequest( + limit=1, confidence=1.1, - alpha=1.1, - mmr=True, vector=[1.1], ), ) @@ -7305,20 +7697,13 @@ async def main() -> None: _response = await self._client_wrapper.httpx_client.request( f"users/{jsonable_encoder(id)}/search", method="POST", - params={ - "limit": limit, - "offset": offset, - "sort_by": sort_by, - "direction": direction, - "metadata_filter": metadata_filter, - }, json={"body": body}, request_options=request_options, omit=OMIT, ) try: if 200 <= _response.status_code < 300: - return pydantic_v1.parse_obj_as(UserDocsSearchRouteSearchResponse, _response.json()) # type: ignore + return pydantic_v1.parse_obj_as(DocsDocSearchResponse, _response.json()) # type: ignore _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) diff --git a/sdks/python/julep/api/reference.md b/sdks/python/julep/api/reference.md index be1c91b28..9ab2e4f12 100644 --- a/sdks/python/julep/api/reference.md +++ b/sdks/python/julep/api/reference.md @@ -200,7 +200,7 @@ client.agents_route_create(
-**default_settings:** `typing.Optional[AgentsCreateAgentRequestDefaultSettings]` — Default settings for all sessions created by this agent +**default_settings:** `typing.Optional[ChatDefaultChatSettings]` — Default settings for all sessions created by this agent
@@ -394,7 +394,7 @@ client.agents_route_create_or_update(
-**default_settings:** `typing.Optional[AgentsUpdateAgentRequestDefaultSettings]` — Default settings for all sessions created by this agent +**default_settings:** `typing.Optional[ChatDefaultChatSettings]` — Default settings for all sessions created by this agent
@@ -517,7 +517,7 @@ client.agents_route_update(
-**default_settings:** `typing.Optional[AgentsUpdateAgentRequestDefaultSettings]` — Default settings for all sessions created by this agent +**default_settings:** `typing.Optional[ChatDefaultChatSettings]` — Default settings for all sessions created by this agent
@@ -707,7 +707,7 @@ client.agents_route_patch(
-**default_settings:** `typing.Optional[AgentsPatchAgentRequestDefaultSettings]` — Default settings for all sessions created by this agent +**default_settings:** `typing.Optional[ChatDefaultChatSettings]` — Default settings for all sessions created by this agent
@@ -843,7 +843,7 @@ client.agent_docs_route_list( -
client.agents_docs_search_route_search(...) +
client.agent_docs_route_create(...)
@@ -855,7 +855,7 @@ client.agent_docs_route_list(
-Search Docs owned by an Agent +Create a Doc for this Agent
@@ -870,26 +870,16 @@ Search Docs owned by an Agent
```python -from julep import DocsVectorDocSearchRequest from julep.client import JulepApi client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.agents_docs_search_route_search( +client.agent_docs_route_create( id="id", - limit=1, - offset=1, - sort_by="created_at", - direction="asc", - metadata_filter="metadata_filter", - body=DocsVectorDocSearchRequest( - confidence=1.1, - alpha=1.1, - mmr=True, - vector=[1.1], - ), + title="title", + content="content", ) ``` @@ -906,7 +896,7 @@ client.agents_docs_search_route_search(
-**id:** `CommonUuid` — ID of the parent +**id:** `CommonUuid` — ID of parent resource
@@ -914,7 +904,7 @@ client.agents_docs_search_route_search(
-**limit:** `CommonLimit` — Limit the number of items returned +**title:** `CommonIdentifierSafeUnicode` — Title describing what this document contains
@@ -922,7 +912,7 @@ client.agents_docs_search_route_search(
-**offset:** `CommonOffset` — Offset the items returned +**content:** `DocsCreateDocRequestContent` — Contents of the document
@@ -930,7 +920,7 @@ client.agents_docs_search_route_search(
-**sort_by:** `AgentsDocsSearchRouteSearchRequestSortBy` — Sort by a field +**metadata:** `typing.Optional[typing.Dict[str, typing.Any]]`
@@ -938,15 +928,156 @@ client.agents_docs_search_route_search(
-**direction:** `AgentsDocsSearchRouteSearchRequestDirection` — Sort direction +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+ + + + +
+
client.agent_docs_route_delete(...)
-**metadata_filter:** `str` — JSON string of object that should be used to filter objects by metadata +#### 📝 Description + +
+
+ +
+
+ +Delete a Doc for this Agent +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from julep.client import JulepApi + +client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", +) +client.agent_docs_route_delete( + id="id", + child_id="child_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `CommonUuid` — ID of parent resource + +
+
+ +
+
+ +**child_id:** `CommonUuid` — ID of the resource to be deleted + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.agents_docs_search_route_search(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Search Docs owned by an Agent +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from julep import DocsVectorDocSearchRequest +from julep.client import JulepApi + +client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", +) +client.agents_docs_search_route_search( + id="id", + body=DocsVectorDocSearchRequest( + limit=1, + confidence=1.1, + vector=[1.1], + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `CommonUuid` — ID of the parent
@@ -1793,7 +1924,7 @@ client.agent_tools_route_create(
-**default_settings:** `typing.Optional[AgentsCreateAgentRequestDefaultSettings]` — Default settings for all sessions created by this agent +**default_settings:** `typing.Optional[ChatDefaultChatSettings]` — Default settings for all sessions created by this agent
@@ -2107,7 +2238,7 @@ client.agent_tools_route_patch(
-**function:** `typing.Optional[ToolsFunctionDefUpdate]` +**function:** `typing.Optional[ToolsFunctionDef]`
@@ -2214,7 +2345,7 @@ client.tasks_create_or_update_route_create_or_update(
-**parent_id:** `CommonUuid` — ID of parent resource +**parent_id:** `CommonUuid` — ID of the agent
@@ -2369,7 +2500,7 @@ client.individual_docs_route_get(
-
client.individual_docs_route_delete(...) +
client.embed_route_embed(...)
@@ -2381,7 +2512,7 @@ client.individual_docs_route_get(
-Delete an existing Doc by id +Embed a query for search
@@ -2396,14 +2527,17 @@ Delete an existing Doc by id
```python +from julep import DocsEmbedQueryRequest from julep.client import JulepApi client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.individual_docs_route_delete( - id="id", +client.embed_route_embed( + body=DocsEmbedQueryRequest( + text="text", + ), ) ``` @@ -2420,7 +2554,7 @@ client.individual_docs_route_delete(
-**id:** `CommonUuid` — ID of the resource +**body:** `DocsEmbedQueryRequest`
@@ -2440,7 +2574,7 @@ client.individual_docs_route_delete(
-
client.embed_route_embed(...) +
client.executions_route_resume_with_task_token(...)
@@ -2452,7 +2586,7 @@ client.individual_docs_route_delete(
-Embed a query for search +Resume an execution with a task token
@@ -2467,17 +2601,14 @@ Embed a query for search
```python -from julep import DocsEmbedQueryRequest from julep.client import JulepApi client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.embed_route_embed( - body=DocsEmbedQueryRequest( - text="text", - ), +client.executions_route_resume_with_task_token( + task_token="task_token", ) ``` @@ -2494,7 +2625,15 @@ client.embed_route_embed(
-**body:** `DocsEmbedQueryRequest` +**task_token:** `str` — A Task Token is a unique identifier for a specific Task Execution. + +
+
+ +
+
+ +**input:** `typing.Optional[typing.Dict[str, typing.Any]]` — The input to resume the execution with
@@ -2585,7 +2724,7 @@ client.executions_route_get(
-
client.execution_transitions_route_list(...) +
client.executions_route_update(...)
@@ -2597,7 +2736,7 @@ client.executions_route_get(
-List the Transitions of an Execution by id +Update an existing Execution
@@ -2612,19 +2751,18 @@ List the Transitions of an Execution by id
```python +from julep import ExecutionsUpdateExecutionRequest_Cancelled from julep.client import JulepApi client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.execution_transitions_route_list( - id="id", - limit=1, - offset=1, - sort_by="created_at", - direction="asc", - metadata_filter="metadata_filter", +client.executions_route_update( + id="string", + request=ExecutionsUpdateExecutionRequest_Cancelled( + reason="string", + ), ) ``` @@ -2641,39 +2779,7 @@ client.execution_transitions_route_list(
-**id:** `CommonUuid` — ID of parent - -
-
- -
-
- -**limit:** `CommonLimit` — Limit the number of items returned - -
-
- -
-
- -**offset:** `CommonOffset` — Offset the items returned - -
-
- -
-
- -**sort_by:** `ExecutionTransitionsRouteListRequestSortBy` — Sort by a field - -
-
- -
-
- -**direction:** `ExecutionTransitionsRouteListRequestDirection` — Sort direction +**id:** `CommonUuid` — ID of the resource
@@ -2681,7 +2787,7 @@ client.execution_transitions_route_list(
-**metadata_filter:** `str` — JSON string of object that should be used to filter objects by metadata +**request:** `ExecutionsUpdateExecutionRequest`
@@ -2701,7 +2807,7 @@ client.execution_transitions_route_list(
-
client.job_route_get(...) +
client.execution_transitions_route_list(...)
@@ -2713,7 +2819,7 @@ client.execution_transitions_route_list(
-Get the status of an existing Job by its id +List the Transitions of an Execution by id
@@ -2734,8 +2840,13 @@ client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.job_route_get( +client.execution_transitions_route_list( id="id", + limit=1, + offset=1, + sort_by="created_at", + direction="asc", + metadata_filter="metadata_filter", ) ``` @@ -2752,7 +2863,7 @@ client.job_route_get(
-**id:** `CommonUuid` — ID of the resource +**id:** `CommonUuid` — ID of parent
@@ -2760,12 +2871,123 @@ client.job_route_get(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**limit:** `CommonLimit` — Limit the number of items returned
- -
+ +
+
+ +**offset:** `CommonOffset` — Offset the items returned + +
+
+ +
+
+ +**sort_by:** `ExecutionTransitionsRouteListRequestSortBy` — Sort by a field + +
+
+ +
+
+ +**direction:** `ExecutionTransitionsRouteListRequestDirection` — Sort direction + +
+
+ +
+
+ +**metadata_filter:** `str` — JSON string of object that should be used to filter objects by metadata + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+ + + + + + +
+ +
client.job_route_get(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Get the status of an existing Job by its id +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from julep.client import JulepApi + +client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", +) +client.job_route_get( + id="id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `CommonUuid` — ID of the resource + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
@@ -3553,7 +3775,7 @@ Generate a response from the model
```python -from julep import ChatRouteGenerateRequestPreset, EntriesInputChatMlMessage +from julep import EntriesInputChatMlMessage from julep.client import JulepApi client = JulepApi( @@ -3562,18 +3784,16 @@ client = JulepApi( ) client.chat_route_generate( id="id", - request=ChatRouteGenerateRequestPreset( - messages=[ - EntriesInputChatMlMessage( - role="user", - content="content", - ) - ], - recall=True, - remember=True, - save=True, - stream=True, - ), + messages=[ + EntriesInputChatMlMessage( + role="user", + content="content", + ) + ], + remember=True, + recall=True, + save=True, + stream=True, ) ``` @@ -3598,7 +3818,7 @@ client.chat_route_generate(
-**request:** `ChatRouteGenerateRequest` +**remember:** `bool` — DISABLED: Whether this interaction should form new memories or not (will be enabled in a future release)
@@ -3606,71 +3826,71 @@ client.chat_route_generate(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**recall:** `bool` — Whether previous memories and docs should be recalled or not
-
-
+
+
+**save:** `bool` — Whether this interaction should be stored in the session history or not +
-
-
client.history_route_history(...)
-#### 📝 Description +**stream:** `bool` — Indicates if the server should stream the response as it's generated + +
+
+**messages:** `typing.Sequence[EntriesInputChatMlMessage]` — A list of new input messages comprising the conversation so far. + +
+
+
-Get history of a Session -
-
+**model:** `typing.Optional[CommonIdentifierSafeUnicode]` — Identifier of the model to be used + -#### 🔌 Usage -
+**stop:** `typing.Optional[typing.Sequence[str]]` — Up to 4 sequences where the API will stop generating further tokens. + +
+
+
-```python -from julep.client import JulepApi - -client = JulepApi( - auth_key="YOUR_AUTH_KEY", - api_key="YOUR_API_KEY", -) -client.history_route_history( - id="id", - limit=1, -) - -``` -
-
+**seed:** `typing.Optional[int]` — If specified, the system will make a best effort to sample deterministically for that particular seed value + -#### ⚙️ Parameters -
+**max_tokens:** `typing.Optional[int]` — The maximum number of tokens to generate in the chat completion + +
+
+
-**id:** `CommonUuid` — ID of parent +**logit_bias:** `typing.Optional[typing.Dict[str, CommonLogitBias]]` — Modify the likelihood of specified tokens appearing in the completion
@@ -3678,7 +3898,7 @@ client.history_route_history(
-**limit:** `CommonLimit` — Limit the number of items returned +**response_format:** `typing.Optional[ChatCompletionResponseFormat]` — Response format (set to `json_object` to restrict output to JSON)
@@ -3686,70 +3906,79 @@ client.history_route_history(
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. +**agent:** `typing.Optional[CommonUuid]` — Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions)
- - +
+
+**repetition_penalty:** `typing.Optional[float]` — Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. +
-
-
client.history_route_delete(...)
-#### 📝 Description +**length_penalty:** `typing.Optional[float]` — Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. + +
+
+**min_p:** `typing.Optional[float]` — Minimum probability compared to leading token to be considered + +
+
+
-Clear the history of a Session (resets the Session) -
-
+**frequency_penalty:** `typing.Optional[float]` — Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + -#### 🔌 Usage -
+**presence_penalty:** `typing.Optional[float]` — Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + +
+
+
-```python -from julep.client import JulepApi - -client = JulepApi( - auth_key="YOUR_AUTH_KEY", - api_key="YOUR_API_KEY", -) -client.history_route_delete( - id="id", -) - -``` +**temperature:** `typing.Optional[float]` — What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. +
+ +
+
+ +**top_p:** `typing.Optional[float]` — Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. +
-#### ⚙️ Parameters -
+**tools:** `typing.Optional[typing.Sequence[ToolsFunctionTool]]` — (Advanced) List of tools that are provided in addition to agent's default set of tools. + +
+
+
-**id:** `CommonUuid` — ID of the resource +**tool_choice:** `typing.Optional[ChatChatInputDataToolChoice]` — Can be one of existing tools given to the agent earlier or the ones provided in this request.
@@ -3769,7 +3998,7 @@ client.history_route_delete(
-
client.task_executions_route_list(...) +
client.history_route_history(...)
@@ -3781,7 +4010,7 @@ client.history_route_delete(
-List executions of the given task +Get history of a Session
@@ -3802,13 +4031,8 @@ client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.task_executions_route_list( +client.history_route_history( id="id", - limit=1, - offset=1, - sort_by="created_at", - direction="asc", - metadata_filter="metadata_filter", ) ``` @@ -3833,46 +4057,6 @@ client.task_executions_route_list(
-**limit:** `CommonLimit` — Limit the number of items returned - -
-
- -
-
- -**offset:** `CommonOffset` — Offset the items returned - -
-
- -
-
- -**sort_by:** `TaskExecutionsRouteListRequestSortBy` — Sort by a field - -
-
- -
-
- -**direction:** `TaskExecutionsRouteListRequestDirection` — Sort direction - -
-
- -
-
- -**metadata_filter:** `str` — JSON string of object that should be used to filter objects by metadata - -
-
- -
-
- **request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
@@ -3885,7 +4069,7 @@ client.task_executions_route_list(
-
client.task_executions_route_create(...) +
client.history_route_delete(...)
@@ -3897,7 +4081,7 @@ client.task_executions_route_list(
-Create an execution for the given task +Clear the history of a Session (resets the Session)
@@ -3918,9 +4102,8 @@ client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.task_executions_route_create( +client.history_route_delete( id="id", - input={"key": "value"}, ) ``` @@ -3937,23 +4120,7 @@ client.task_executions_route_create(
-**id:** `CommonUuid` — ID of parent resource - -
-
- -
-
- -**input:** `typing.Dict[str, typing.Any]` — The input to the execution - -
-
- -
-
- -**metadata:** `typing.Optional[typing.Dict[str, typing.Any]]` +**id:** `CommonUuid` — ID of the resource
@@ -3973,7 +4140,7 @@ client.task_executions_route_create(
-
client.task_executions_route_resume_with_task_token(...) +
client.task_executions_route_list(...)
@@ -3985,7 +4152,7 @@ client.task_executions_route_create(
-Resume an execution with a task token +List executions of the given task
@@ -4006,9 +4173,13 @@ client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.task_executions_route_resume_with_task_token( +client.task_executions_route_list( id="id", - task_token="task_token", + limit=1, + offset=1, + sort_by="created_at", + direction="asc", + metadata_filter="metadata_filter", ) ``` @@ -4025,7 +4196,7 @@ client.task_executions_route_resume_with_task_token(
-**id:** `CommonUuid` — ID of parent Task +**id:** `CommonUuid` — ID of parent
@@ -4033,7 +4204,7 @@ client.task_executions_route_resume_with_task_token(
-**task_token:** `str` — A Task Token is a unique identifier for a specific Task Execution. +**limit:** `CommonLimit` — Limit the number of items returned
@@ -4041,7 +4212,31 @@ client.task_executions_route_resume_with_task_token(
-**input:** `typing.Optional[typing.Dict[str, typing.Any]]` — The input to resume the execution with +**offset:** `CommonOffset` — Offset the items returned + +
+
+ +
+
+ +**sort_by:** `TaskExecutionsRouteListRequestSortBy` — Sort by a field + +
+
+ +
+
+ +**direction:** `TaskExecutionsRouteListRequestDirection` — Sort direction + +
+
+ +
+
+ +**metadata_filter:** `str` — JSON string of object that should be used to filter objects by metadata
@@ -4061,7 +4256,7 @@ client.task_executions_route_resume_with_task_token(
-
client.task_executions_route_update(...) +
client.task_executions_route_create(...)
@@ -4073,7 +4268,7 @@ client.task_executions_route_resume_with_task_token(
-Update an existing Execution +Create an execution for the given task
@@ -4088,19 +4283,15 @@ Update an existing Execution
```python -from julep import ExecutionsUpdateExecutionRequest_Cancelled from julep.client import JulepApi client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.task_executions_route_update( - id="string", - child_id="string", - request=ExecutionsUpdateExecutionRequest_Cancelled( - reason="string", - ), +client.task_executions_route_create( + id="id", + input={"key": "value"}, ) ``` @@ -4125,7 +4316,7 @@ client.task_executions_route_update(
-**child_id:** `CommonUuid` — ID of the resource to be updated +**input:** `typing.Dict[str, typing.Any]` — The input to the execution
@@ -4133,7 +4324,7 @@ client.task_executions_route_update(
-**request:** `ExecutionsUpdateExecutionRequest` +**metadata:** `typing.Optional[typing.Dict[str, typing.Any]]`
@@ -4895,7 +5086,7 @@ client.user_docs_route_list(
-
client.user_docs_search_route_search(...) +
client.user_docs_route_create(...)
@@ -4907,7 +5098,7 @@ client.user_docs_route_list(
-Search Docs owned by a User +Create a Doc for this User
@@ -4922,26 +5113,16 @@ Search Docs owned by a User
```python -from julep import DocsVectorDocSearchRequest from julep.client import JulepApi client = JulepApi( auth_key="YOUR_AUTH_KEY", api_key="YOUR_API_KEY", ) -client.user_docs_search_route_search( +client.user_docs_route_create( id="id", - limit=1, - offset=1, - sort_by="created_at", - direction="asc", - metadata_filter="metadata_filter", - body=DocsVectorDocSearchRequest( - confidence=1.1, - alpha=1.1, - mmr=True, - vector=[1.1], - ), + title="title", + content="content", ) ``` @@ -4958,7 +5139,7 @@ client.user_docs_search_route_search(
-**id:** `CommonUuid` — ID of the parent +**id:** `CommonUuid` — ID of parent resource
@@ -4966,7 +5147,7 @@ client.user_docs_search_route_search(
-**limit:** `CommonLimit` — Limit the number of items returned +**title:** `CommonIdentifierSafeUnicode` — Title describing what this document contains
@@ -4974,7 +5155,7 @@ client.user_docs_search_route_search(
-**offset:** `CommonOffset` — Offset the items returned +**content:** `DocsCreateDocRequestContent` — Contents of the document
@@ -4982,7 +5163,7 @@ client.user_docs_search_route_search(
-**sort_by:** `UserDocsSearchRouteSearchRequestSortBy` — Sort by a field +**metadata:** `typing.Optional[typing.Dict[str, typing.Any]]`
@@ -4990,15 +5171,156 @@ client.user_docs_search_route_search(
-**direction:** `UserDocsSearchRouteSearchRequestDirection` — Sort direction +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
+
+
+ + + +
+ +
client.user_docs_route_delete(...)
-**metadata_filter:** `str` — JSON string of object that should be used to filter objects by metadata +#### 📝 Description + +
+
+ +
+
+ +Delete a Doc for this User +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from julep.client import JulepApi + +client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", +) +client.user_docs_route_delete( + id="id", + child_id="child_id", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `CommonUuid` — ID of parent resource + +
+
+ +
+
+ +**child_id:** `CommonUuid` — ID of the resource to be deleted + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.user_docs_search_route_search(...) +
+
+ +#### 📝 Description + +
+
+ +
+
+ +Search Docs owned by a User +
+
+
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from julep import DocsVectorDocSearchRequest +from julep.client import JulepApi + +client = JulepApi( + auth_key="YOUR_AUTH_KEY", + api_key="YOUR_API_KEY", +) +client.user_docs_search_route_search( + id="id", + body=DocsVectorDocSearchRequest( + limit=1, + confidence=1.1, + vector=[1.1], + ), +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**id:** `CommonUuid` — ID of the parent
diff --git a/sdks/python/julep/api/types/__init__.py b/sdks/python/julep/api/types/__init__.py index d4d576485..b1aead7fe 100644 --- a/sdks/python/julep/api/types/__init__.py +++ b/sdks/python/julep/api/types/__init__.py @@ -9,75 +9,44 @@ from .agent_tools_route_list_request_sort_by import AgentToolsRouteListRequestSortBy from .agent_tools_route_list_response import AgentToolsRouteListResponse from .agents_agent import AgentsAgent -from .agents_agent_default_settings import AgentsAgentDefaultSettings from .agents_agent_instructions import AgentsAgentInstructions from .agents_create_agent_request import AgentsCreateAgentRequest -from .agents_create_agent_request_default_settings import ( - AgentsCreateAgentRequestDefaultSettings, -) from .agents_create_agent_request_instructions import ( AgentsCreateAgentRequestInstructions, ) +from .agents_create_or_update_agent_request import AgentsCreateOrUpdateAgentRequest from .agents_docs_search_route_search_request_body import ( AgentsDocsSearchRouteSearchRequestBody, ) -from .agents_docs_search_route_search_request_direction import ( - AgentsDocsSearchRouteSearchRequestDirection, -) -from .agents_docs_search_route_search_request_sort_by import ( - AgentsDocsSearchRouteSearchRequestSortBy, -) -from .agents_docs_search_route_search_response import ( - AgentsDocsSearchRouteSearchResponse, -) -from .agents_patch_agent_request_default_settings import ( - AgentsPatchAgentRequestDefaultSettings, -) from .agents_patch_agent_request_instructions import AgentsPatchAgentRequestInstructions from .agents_route_list_request_direction import AgentsRouteListRequestDirection from .agents_route_list_request_sort_by import AgentsRouteListRequestSortBy from .agents_route_list_response import AgentsRouteListResponse from .agents_update_agent_request import AgentsUpdateAgentRequest -from .agents_update_agent_request_default_settings import ( - AgentsUpdateAgentRequestDefaultSettings, -) from .agents_update_agent_request_instructions import ( AgentsUpdateAgentRequestInstructions, ) from .chat_base_chat_output import ChatBaseChatOutput from .chat_base_chat_response import ChatBaseChatResponse from .chat_base_token_log_prob import ChatBaseTokenLogProb +from .chat_chat_input_data import ChatChatInputData +from .chat_chat_input_data_tool_choice import ChatChatInputDataToolChoice from .chat_chat_output_chunk import ChatChatOutputChunk +from .chat_chat_settings import ChatChatSettings from .chat_chunk_chat_response import ChatChunkChatResponse from .chat_competion_usage import ChatCompetionUsage from .chat_completion_response_format import ChatCompletionResponseFormat from .chat_completion_response_format_type import ChatCompletionResponseFormatType +from .chat_default_chat_settings import ChatDefaultChatSettings from .chat_finish_reason import ChatFinishReason -from .chat_generation_preset import ChatGenerationPreset -from .chat_generation_preset_settings import ChatGenerationPresetSettings from .chat_log_prob_response import ChatLogProbResponse from .chat_message_chat_response import ChatMessageChatResponse +from .chat_message_chat_response_choices_item import ChatMessageChatResponseChoicesItem from .chat_multiple_chat_output import ChatMultipleChatOutput from .chat_open_ai_settings import ChatOpenAiSettings -from .chat_route_generate_request import ChatRouteGenerateRequest -from .chat_route_generate_request_agent import ChatRouteGenerateRequestAgent -from .chat_route_generate_request_agent_tool_choice import ( - ChatRouteGenerateRequestAgentToolChoice, -) -from .chat_route_generate_request_frequency_penalty import ( - ChatRouteGenerateRequestFrequencyPenalty, -) -from .chat_route_generate_request_frequency_penalty_tool_choice import ( - ChatRouteGenerateRequestFrequencyPenaltyToolChoice, -) -from .chat_route_generate_request_preset import ChatRouteGenerateRequestPreset -from .chat_route_generate_request_preset_tool_choice import ( - ChatRouteGenerateRequestPresetToolChoice, -) from .chat_route_generate_response import ChatRouteGenerateResponse from .chat_single_chat_output import ChatSingleChatOutput from .chat_token_log_prob import ChatTokenLogProb -from .chat_v_llm_settings import ChatVLlmSettings from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode from .common_limit import CommonLimit from .common_logit_bias import CommonLogitBias @@ -90,21 +59,21 @@ from .common_uuid import CommonUuid from .common_valid_python_identifier import CommonValidPythonIdentifier from .docs_base_doc_search_request import DocsBaseDocSearchRequest +from .docs_create_doc_request import DocsCreateDocRequest +from .docs_create_doc_request_content import DocsCreateDocRequestContent from .docs_doc import DocsDoc from .docs_doc_content import DocsDocContent from .docs_doc_owner import DocsDocOwner from .docs_doc_owner_role import DocsDocOwnerRole from .docs_doc_reference import DocsDocReference +from .docs_doc_search_response import DocsDocSearchResponse from .docs_embed_query_request import DocsEmbedQueryRequest from .docs_embed_query_request_text import DocsEmbedQueryRequestText from .docs_embed_query_response import DocsEmbedQueryResponse from .docs_hybrid_doc_search_request import DocsHybridDocSearchRequest -from .docs_hybrid_doc_search_request_text import DocsHybridDocSearchRequestText -from .docs_hybrid_doc_search_request_vector import DocsHybridDocSearchRequestVector +from .docs_snippet import DocsSnippet from .docs_text_only_doc_search_request import DocsTextOnlyDocSearchRequest -from .docs_text_only_doc_search_request_text import DocsTextOnlyDocSearchRequestText from .docs_vector_doc_search_request import DocsVectorDocSearchRequest -from .docs_vector_doc_search_request_vector import DocsVectorDocSearchRequestVector from .entries_base_entry import EntriesBaseEntry from .entries_base_entry_content import EntriesBaseEntryContent from .entries_base_entry_content_item import EntriesBaseEntryContentItem @@ -115,13 +84,6 @@ ) from .entries_base_entry_source import EntriesBaseEntrySource from .entries_chat_ml_image_content_part import EntriesChatMlImageContentPart -from .entries_chat_ml_message import EntriesChatMlMessage -from .entries_chat_ml_message_content import EntriesChatMlMessageContent -from .entries_chat_ml_message_content_item import ( - EntriesChatMlMessageContentItem, - EntriesChatMlMessageContentItem_ImageUrl, - EntriesChatMlMessageContentItem_Text, -) from .entries_chat_ml_role import EntriesChatMlRole from .entries_chat_ml_text_content_part import EntriesChatMlTextContentPart from .entries_entry import EntriesEntry @@ -153,6 +115,7 @@ from .executions_resume_execution_request import ExecutionsResumeExecutionRequest from .executions_stop_execution_request import ExecutionsStopExecutionRequest from .executions_transition import ExecutionsTransition +from .executions_transition_target import ExecutionsTransitionTarget from .executions_transition_type import ExecutionsTransitionType from .executions_update_execution_request import ( ExecutionsUpdateExecutionRequest, @@ -162,6 +125,9 @@ from .jobs_job_state import JobsJobState from .jobs_job_status import JobsJobStatus from .sessions_context_overflow_type import SessionsContextOverflowType +from .sessions_create_or_update_session_request import ( + SessionsCreateOrUpdateSessionRequest, +) from .sessions_create_session_request import SessionsCreateSessionRequest from .sessions_multi_agent_multi_user_session import SessionsMultiAgentMultiUserSession from .sessions_multi_agent_no_user_session import SessionsMultiAgentNoUserSession @@ -194,51 +160,193 @@ TaskExecutionsRouteListRequestSortBy, ) from .task_executions_route_list_response import TaskExecutionsRouteListResponse -from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .tasks_base_workflow_step import ( + TasksBaseWorkflowStep, + TasksBaseWorkflowStep_Embed, + TasksBaseWorkflowStep_Error, + TasksBaseWorkflowStep_Foreach, + TasksBaseWorkflowStep_Get, + TasksBaseWorkflowStep_IfElse, + TasksBaseWorkflowStep_Log, + TasksBaseWorkflowStep_MapReduce, + TasksBaseWorkflowStep_Parallel, + TasksBaseWorkflowStep_Prompt, + TasksBaseWorkflowStep_Return, + TasksBaseWorkflowStep_Search, + TasksBaseWorkflowStep_Set, + TasksBaseWorkflowStep_Sleep, + TasksBaseWorkflowStep_Switch, + TasksBaseWorkflowStep_ToolCall, + TasksBaseWorkflowStep_WaitForInput, + TasksBaseWorkflowStep_Yield, +) +from .tasks_case_then import TasksCaseThen +from .tasks_case_then_then import ( + TasksCaseThenThen, + TasksCaseThenThen_Embed, + TasksCaseThenThen_Error, + TasksCaseThenThen_Evaluate, + TasksCaseThenThen_Get, + TasksCaseThenThen_Log, + TasksCaseThenThen_Prompt, + TasksCaseThenThen_Return, + TasksCaseThenThen_Search, + TasksCaseThenThen_Set, + TasksCaseThenThen_Sleep, + TasksCaseThenThen_ToolCall, + TasksCaseThenThen_WaitForInput, + TasksCaseThenThen_Yield, +) from .tasks_create_task_request import TasksCreateTaskRequest from .tasks_create_task_request_main_item import ( TasksCreateTaskRequestMainItem, + TasksCreateTaskRequestMainItem_Embed, TasksCreateTaskRequestMainItem_Error, TasksCreateTaskRequestMainItem_Evaluate, + TasksCreateTaskRequestMainItem_Foreach, + TasksCreateTaskRequestMainItem_Get, TasksCreateTaskRequestMainItem_IfElse, + TasksCreateTaskRequestMainItem_Log, + TasksCreateTaskRequestMainItem_MapReduce, + TasksCreateTaskRequestMainItem_Parallel, TasksCreateTaskRequestMainItem_Prompt, + TasksCreateTaskRequestMainItem_Return, + TasksCreateTaskRequestMainItem_Search, + TasksCreateTaskRequestMainItem_Set, + TasksCreateTaskRequestMainItem_Sleep, + TasksCreateTaskRequestMainItem_Switch, TasksCreateTaskRequestMainItem_ToolCall, TasksCreateTaskRequestMainItem_WaitForInput, TasksCreateTaskRequestMainItem_Yield, ) +from .tasks_embed_step import TasksEmbedStep from .tasks_error_workflow_step import TasksErrorWorkflowStep from .tasks_evaluate_step import TasksEvaluateStep +from .tasks_foreach_do import TasksForeachDo +from .tasks_foreach_do_do import ( + TasksForeachDoDo, + TasksForeachDoDo_Embed, + TasksForeachDoDo_Error, + TasksForeachDoDo_Evaluate, + TasksForeachDoDo_Get, + TasksForeachDoDo_Log, + TasksForeachDoDo_Prompt, + TasksForeachDoDo_Return, + TasksForeachDoDo_Search, + TasksForeachDoDo_Set, + TasksForeachDoDo_Sleep, + TasksForeachDoDo_ToolCall, + TasksForeachDoDo_WaitForInput, + TasksForeachDoDo_Yield, +) +from .tasks_foreach_step import TasksForeachStep +from .tasks_get_step import TasksGetStep from .tasks_if_else_workflow_step import TasksIfElseWorkflowStep -from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse -from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_if_else_workflow_step_else import ( + TasksIfElseWorkflowStepElse, + TasksIfElseWorkflowStepElse_Embed, + TasksIfElseWorkflowStepElse_Error, + TasksIfElseWorkflowStepElse_Evaluate, + TasksIfElseWorkflowStepElse_Get, + TasksIfElseWorkflowStepElse_Log, + TasksIfElseWorkflowStepElse_Prompt, + TasksIfElseWorkflowStepElse_Return, + TasksIfElseWorkflowStepElse_Search, + TasksIfElseWorkflowStepElse_Set, + TasksIfElseWorkflowStepElse_Sleep, + TasksIfElseWorkflowStepElse_ToolCall, + TasksIfElseWorkflowStepElse_WaitForInput, + TasksIfElseWorkflowStepElse_Yield, +) +from .tasks_if_else_workflow_step_then import ( + TasksIfElseWorkflowStepThen, + TasksIfElseWorkflowStepThen_Embed, + TasksIfElseWorkflowStepThen_Error, + TasksIfElseWorkflowStepThen_Evaluate, + TasksIfElseWorkflowStepThen_Get, + TasksIfElseWorkflowStepThen_Log, + TasksIfElseWorkflowStepThen_Prompt, + TasksIfElseWorkflowStepThen_Return, + TasksIfElseWorkflowStepThen_Search, + TasksIfElseWorkflowStepThen_Set, + TasksIfElseWorkflowStepThen_Sleep, + TasksIfElseWorkflowStepThen_ToolCall, + TasksIfElseWorkflowStepThen_WaitForInput, + TasksIfElseWorkflowStepThen_Yield, +) +from .tasks_log_step import TasksLogStep +from .tasks_map_over import TasksMapOver +from .tasks_map_reduce_step import TasksMapReduceStep +from .tasks_parallel_step import TasksParallelStep +from .tasks_parallel_step_parallel_item import ( + TasksParallelStepParallelItem, + TasksParallelStepParallelItem_Embed, + TasksParallelStepParallelItem_Error, + TasksParallelStepParallelItem_Evaluate, + TasksParallelStepParallelItem_Get, + TasksParallelStepParallelItem_Log, + TasksParallelStepParallelItem_Prompt, + TasksParallelStepParallelItem_Return, + TasksParallelStepParallelItem_Search, + TasksParallelStepParallelItem_Set, + TasksParallelStepParallelItem_Sleep, + TasksParallelStepParallelItem_ToolCall, + TasksParallelStepParallelItem_WaitForInput, + TasksParallelStepParallelItem_Yield, +) from .tasks_patch_task_request_main_item import ( TasksPatchTaskRequestMainItem, + TasksPatchTaskRequestMainItem_Embed, TasksPatchTaskRequestMainItem_Error, TasksPatchTaskRequestMainItem_Evaluate, + TasksPatchTaskRequestMainItem_Foreach, + TasksPatchTaskRequestMainItem_Get, TasksPatchTaskRequestMainItem_IfElse, + TasksPatchTaskRequestMainItem_Log, + TasksPatchTaskRequestMainItem_MapReduce, + TasksPatchTaskRequestMainItem_Parallel, TasksPatchTaskRequestMainItem_Prompt, + TasksPatchTaskRequestMainItem_Return, + TasksPatchTaskRequestMainItem_Search, + TasksPatchTaskRequestMainItem_Set, + TasksPatchTaskRequestMainItem_Sleep, + TasksPatchTaskRequestMainItem_Switch, TasksPatchTaskRequestMainItem_ToolCall, TasksPatchTaskRequestMainItem_WaitForInput, TasksPatchTaskRequestMainItem_Yield, ) from .tasks_prompt_step import TasksPromptStep from .tasks_prompt_step_prompt import TasksPromptStepPrompt -from .tasks_prompt_step_settings import TasksPromptStepSettings -from .tasks_prompt_step_settings_agent import TasksPromptStepSettingsAgent -from .tasks_prompt_step_settings_frequency_penalty import ( - TasksPromptStepSettingsFrequencyPenalty, -) -from .tasks_prompt_step_settings_preset import TasksPromptStepSettingsPreset +from .tasks_return_step import TasksReturnStep from .tasks_route_list_request_direction import TasksRouteListRequestDirection from .tasks_route_list_request_sort_by import TasksRouteListRequestSortBy from .tasks_route_list_response import TasksRouteListResponse +from .tasks_search_step import TasksSearchStep +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_key import TasksSetKey +from .tasks_set_step import TasksSetStep +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor +from .tasks_sleep_step import TasksSleepStep +from .tasks_switch_step import TasksSwitchStep from .tasks_task import TasksTask from .tasks_task_main_item import ( TasksTaskMainItem, + TasksTaskMainItem_Embed, TasksTaskMainItem_Error, TasksTaskMainItem_Evaluate, + TasksTaskMainItem_Foreach, + TasksTaskMainItem_Get, TasksTaskMainItem_IfElse, + TasksTaskMainItem_Log, + TasksTaskMainItem_MapReduce, + TasksTaskMainItem_Parallel, TasksTaskMainItem_Prompt, + TasksTaskMainItem_Return, + TasksTaskMainItem_Search, + TasksTaskMainItem_Set, + TasksTaskMainItem_Sleep, + TasksTaskMainItem_Switch, TasksTaskMainItem_ToolCall, TasksTaskMainItem_WaitForInput, TasksTaskMainItem_Yield, @@ -247,22 +355,32 @@ from .tasks_tool_call_step import TasksToolCallStep from .tasks_update_task_request_main_item import ( TasksUpdateTaskRequestMainItem, + TasksUpdateTaskRequestMainItem_Embed, TasksUpdateTaskRequestMainItem_Error, TasksUpdateTaskRequestMainItem_Evaluate, + TasksUpdateTaskRequestMainItem_Foreach, + TasksUpdateTaskRequestMainItem_Get, TasksUpdateTaskRequestMainItem_IfElse, + TasksUpdateTaskRequestMainItem_Log, + TasksUpdateTaskRequestMainItem_MapReduce, + TasksUpdateTaskRequestMainItem_Parallel, TasksUpdateTaskRequestMainItem_Prompt, + TasksUpdateTaskRequestMainItem_Return, + TasksUpdateTaskRequestMainItem_Search, + TasksUpdateTaskRequestMainItem_Set, + TasksUpdateTaskRequestMainItem_Sleep, + TasksUpdateTaskRequestMainItem_Switch, TasksUpdateTaskRequestMainItem_ToolCall, TasksUpdateTaskRequestMainItem_WaitForInput, TasksUpdateTaskRequestMainItem_Yield, ) from .tasks_wait_for_input_step import TasksWaitForInputStep -from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo from .tasks_yield_step import TasksYieldStep from .tools_chosen_function_call import ToolsChosenFunctionCall from .tools_chosen_tool_call import ToolsChosenToolCall, ToolsChosenToolCall_Function +from .tools_create_tool_request import ToolsCreateToolRequest from .tools_function_call_option import ToolsFunctionCallOption from .tools_function_def import ToolsFunctionDef -from .tools_function_def_update import ToolsFunctionDefUpdate from .tools_function_tool import ToolsFunctionTool from .tools_named_function_choice import ToolsNamedFunctionChoice from .tools_named_tool_choice import ToolsNamedToolChoice, ToolsNamedToolChoice_Function @@ -275,17 +393,11 @@ from .user_docs_search_route_search_request_body import ( UserDocsSearchRouteSearchRequestBody, ) -from .user_docs_search_route_search_request_direction import ( - UserDocsSearchRouteSearchRequestDirection, -) -from .user_docs_search_route_search_request_sort_by import ( - UserDocsSearchRouteSearchRequestSortBy, -) -from .user_docs_search_route_search_response import UserDocsSearchRouteSearchResponse +from .users_create_or_update_user_request import UsersCreateOrUpdateUserRequest +from .users_create_user_request import UsersCreateUserRequest from .users_route_list_request_direction import UsersRouteListRequestDirection from .users_route_list_request_sort_by import UsersRouteListRequestSortBy from .users_route_list_response import UsersRouteListResponse -from .users_update_user_request import UsersUpdateUserRequest from .users_user import UsersUser __all__ = [ @@ -296,49 +408,38 @@ "AgentToolsRouteListRequestSortBy", "AgentToolsRouteListResponse", "AgentsAgent", - "AgentsAgentDefaultSettings", "AgentsAgentInstructions", "AgentsCreateAgentRequest", - "AgentsCreateAgentRequestDefaultSettings", "AgentsCreateAgentRequestInstructions", + "AgentsCreateOrUpdateAgentRequest", "AgentsDocsSearchRouteSearchRequestBody", - "AgentsDocsSearchRouteSearchRequestDirection", - "AgentsDocsSearchRouteSearchRequestSortBy", - "AgentsDocsSearchRouteSearchResponse", - "AgentsPatchAgentRequestDefaultSettings", "AgentsPatchAgentRequestInstructions", "AgentsRouteListRequestDirection", "AgentsRouteListRequestSortBy", "AgentsRouteListResponse", "AgentsUpdateAgentRequest", - "AgentsUpdateAgentRequestDefaultSettings", "AgentsUpdateAgentRequestInstructions", "ChatBaseChatOutput", "ChatBaseChatResponse", "ChatBaseTokenLogProb", + "ChatChatInputData", + "ChatChatInputDataToolChoice", "ChatChatOutputChunk", + "ChatChatSettings", "ChatChunkChatResponse", "ChatCompetionUsage", "ChatCompletionResponseFormat", "ChatCompletionResponseFormatType", + "ChatDefaultChatSettings", "ChatFinishReason", - "ChatGenerationPreset", - "ChatGenerationPresetSettings", "ChatLogProbResponse", "ChatMessageChatResponse", + "ChatMessageChatResponseChoicesItem", "ChatMultipleChatOutput", "ChatOpenAiSettings", - "ChatRouteGenerateRequest", - "ChatRouteGenerateRequestAgent", - "ChatRouteGenerateRequestAgentToolChoice", - "ChatRouteGenerateRequestFrequencyPenalty", - "ChatRouteGenerateRequestFrequencyPenaltyToolChoice", - "ChatRouteGenerateRequestPreset", - "ChatRouteGenerateRequestPresetToolChoice", "ChatRouteGenerateResponse", "ChatSingleChatOutput", "ChatTokenLogProb", - "ChatVLlmSettings", "CommonIdentifierSafeUnicode", "CommonLimit", "CommonLogitBias", @@ -351,21 +452,21 @@ "CommonUuid", "CommonValidPythonIdentifier", "DocsBaseDocSearchRequest", + "DocsCreateDocRequest", + "DocsCreateDocRequestContent", "DocsDoc", "DocsDocContent", "DocsDocOwner", "DocsDocOwnerRole", "DocsDocReference", + "DocsDocSearchResponse", "DocsEmbedQueryRequest", "DocsEmbedQueryRequestText", "DocsEmbedQueryResponse", "DocsHybridDocSearchRequest", - "DocsHybridDocSearchRequestText", - "DocsHybridDocSearchRequestVector", + "DocsSnippet", "DocsTextOnlyDocSearchRequest", - "DocsTextOnlyDocSearchRequestText", "DocsVectorDocSearchRequest", - "DocsVectorDocSearchRequestVector", "EntriesBaseEntry", "EntriesBaseEntryContent", "EntriesBaseEntryContentItem", @@ -374,11 +475,6 @@ "EntriesBaseEntryContentItemItem_Text", "EntriesBaseEntrySource", "EntriesChatMlImageContentPart", - "EntriesChatMlMessage", - "EntriesChatMlMessageContent", - "EntriesChatMlMessageContentItem", - "EntriesChatMlMessageContentItem_ImageUrl", - "EntriesChatMlMessageContentItem_Text", "EntriesChatMlRole", "EntriesChatMlTextContentPart", "EntriesEntry", @@ -400,6 +496,7 @@ "ExecutionsResumeExecutionRequest", "ExecutionsStopExecutionRequest", "ExecutionsTransition", + "ExecutionsTransitionTarget", "ExecutionsTransitionType", "ExecutionsUpdateExecutionRequest", "ExecutionsUpdateExecutionRequest_Cancelled", @@ -407,6 +504,7 @@ "JobsJobState", "JobsJobStatus", "SessionsContextOverflowType", + "SessionsCreateOrUpdateSessionRequest", "SessionsCreateSessionRequest", "SessionsMultiAgentMultiUserSession", "SessionsMultiAgentNoUserSession", @@ -428,65 +526,207 @@ "TaskExecutionsRouteListRequestSortBy", "TaskExecutionsRouteListResponse", "TasksBaseWorkflowStep", + "TasksBaseWorkflowStep_Embed", + "TasksBaseWorkflowStep_Error", + "TasksBaseWorkflowStep_Foreach", + "TasksBaseWorkflowStep_Get", + "TasksBaseWorkflowStep_IfElse", + "TasksBaseWorkflowStep_Log", + "TasksBaseWorkflowStep_MapReduce", + "TasksBaseWorkflowStep_Parallel", + "TasksBaseWorkflowStep_Prompt", + "TasksBaseWorkflowStep_Return", + "TasksBaseWorkflowStep_Search", + "TasksBaseWorkflowStep_Set", + "TasksBaseWorkflowStep_Sleep", + "TasksBaseWorkflowStep_Switch", + "TasksBaseWorkflowStep_ToolCall", + "TasksBaseWorkflowStep_WaitForInput", + "TasksBaseWorkflowStep_Yield", + "TasksCaseThen", + "TasksCaseThenThen", + "TasksCaseThenThen_Embed", + "TasksCaseThenThen_Error", + "TasksCaseThenThen_Evaluate", + "TasksCaseThenThen_Get", + "TasksCaseThenThen_Log", + "TasksCaseThenThen_Prompt", + "TasksCaseThenThen_Return", + "TasksCaseThenThen_Search", + "TasksCaseThenThen_Set", + "TasksCaseThenThen_Sleep", + "TasksCaseThenThen_ToolCall", + "TasksCaseThenThen_WaitForInput", + "TasksCaseThenThen_Yield", "TasksCreateTaskRequest", "TasksCreateTaskRequestMainItem", + "TasksCreateTaskRequestMainItem_Embed", "TasksCreateTaskRequestMainItem_Error", "TasksCreateTaskRequestMainItem_Evaluate", + "TasksCreateTaskRequestMainItem_Foreach", + "TasksCreateTaskRequestMainItem_Get", "TasksCreateTaskRequestMainItem_IfElse", + "TasksCreateTaskRequestMainItem_Log", + "TasksCreateTaskRequestMainItem_MapReduce", + "TasksCreateTaskRequestMainItem_Parallel", "TasksCreateTaskRequestMainItem_Prompt", + "TasksCreateTaskRequestMainItem_Return", + "TasksCreateTaskRequestMainItem_Search", + "TasksCreateTaskRequestMainItem_Set", + "TasksCreateTaskRequestMainItem_Sleep", + "TasksCreateTaskRequestMainItem_Switch", "TasksCreateTaskRequestMainItem_ToolCall", "TasksCreateTaskRequestMainItem_WaitForInput", "TasksCreateTaskRequestMainItem_Yield", + "TasksEmbedStep", "TasksErrorWorkflowStep", "TasksEvaluateStep", + "TasksForeachDo", + "TasksForeachDoDo", + "TasksForeachDoDo_Embed", + "TasksForeachDoDo_Error", + "TasksForeachDoDo_Evaluate", + "TasksForeachDoDo_Get", + "TasksForeachDoDo_Log", + "TasksForeachDoDo_Prompt", + "TasksForeachDoDo_Return", + "TasksForeachDoDo_Search", + "TasksForeachDoDo_Set", + "TasksForeachDoDo_Sleep", + "TasksForeachDoDo_ToolCall", + "TasksForeachDoDo_WaitForInput", + "TasksForeachDoDo_Yield", + "TasksForeachStep", + "TasksGetStep", "TasksIfElseWorkflowStep", "TasksIfElseWorkflowStepElse", + "TasksIfElseWorkflowStepElse_Embed", + "TasksIfElseWorkflowStepElse_Error", + "TasksIfElseWorkflowStepElse_Evaluate", + "TasksIfElseWorkflowStepElse_Get", + "TasksIfElseWorkflowStepElse_Log", + "TasksIfElseWorkflowStepElse_Prompt", + "TasksIfElseWorkflowStepElse_Return", + "TasksIfElseWorkflowStepElse_Search", + "TasksIfElseWorkflowStepElse_Set", + "TasksIfElseWorkflowStepElse_Sleep", + "TasksIfElseWorkflowStepElse_ToolCall", + "TasksIfElseWorkflowStepElse_WaitForInput", + "TasksIfElseWorkflowStepElse_Yield", "TasksIfElseWorkflowStepThen", + "TasksIfElseWorkflowStepThen_Embed", + "TasksIfElseWorkflowStepThen_Error", + "TasksIfElseWorkflowStepThen_Evaluate", + "TasksIfElseWorkflowStepThen_Get", + "TasksIfElseWorkflowStepThen_Log", + "TasksIfElseWorkflowStepThen_Prompt", + "TasksIfElseWorkflowStepThen_Return", + "TasksIfElseWorkflowStepThen_Search", + "TasksIfElseWorkflowStepThen_Set", + "TasksIfElseWorkflowStepThen_Sleep", + "TasksIfElseWorkflowStepThen_ToolCall", + "TasksIfElseWorkflowStepThen_WaitForInput", + "TasksIfElseWorkflowStepThen_Yield", + "TasksLogStep", + "TasksMapOver", + "TasksMapReduceStep", + "TasksParallelStep", + "TasksParallelStepParallelItem", + "TasksParallelStepParallelItem_Embed", + "TasksParallelStepParallelItem_Error", + "TasksParallelStepParallelItem_Evaluate", + "TasksParallelStepParallelItem_Get", + "TasksParallelStepParallelItem_Log", + "TasksParallelStepParallelItem_Prompt", + "TasksParallelStepParallelItem_Return", + "TasksParallelStepParallelItem_Search", + "TasksParallelStepParallelItem_Set", + "TasksParallelStepParallelItem_Sleep", + "TasksParallelStepParallelItem_ToolCall", + "TasksParallelStepParallelItem_WaitForInput", + "TasksParallelStepParallelItem_Yield", "TasksPatchTaskRequestMainItem", + "TasksPatchTaskRequestMainItem_Embed", "TasksPatchTaskRequestMainItem_Error", "TasksPatchTaskRequestMainItem_Evaluate", + "TasksPatchTaskRequestMainItem_Foreach", + "TasksPatchTaskRequestMainItem_Get", "TasksPatchTaskRequestMainItem_IfElse", + "TasksPatchTaskRequestMainItem_Log", + "TasksPatchTaskRequestMainItem_MapReduce", + "TasksPatchTaskRequestMainItem_Parallel", "TasksPatchTaskRequestMainItem_Prompt", + "TasksPatchTaskRequestMainItem_Return", + "TasksPatchTaskRequestMainItem_Search", + "TasksPatchTaskRequestMainItem_Set", + "TasksPatchTaskRequestMainItem_Sleep", + "TasksPatchTaskRequestMainItem_Switch", "TasksPatchTaskRequestMainItem_ToolCall", "TasksPatchTaskRequestMainItem_WaitForInput", "TasksPatchTaskRequestMainItem_Yield", "TasksPromptStep", "TasksPromptStepPrompt", - "TasksPromptStepSettings", - "TasksPromptStepSettingsAgent", - "TasksPromptStepSettingsFrequencyPenalty", - "TasksPromptStepSettingsPreset", + "TasksReturnStep", "TasksRouteListRequestDirection", "TasksRouteListRequestSortBy", "TasksRouteListResponse", + "TasksSearchStep", + "TasksSearchStepSearch", + "TasksSetKey", + "TasksSetStep", + "TasksSetStepSet", + "TasksSleepFor", + "TasksSleepStep", + "TasksSwitchStep", "TasksTask", "TasksTaskMainItem", + "TasksTaskMainItem_Embed", "TasksTaskMainItem_Error", "TasksTaskMainItem_Evaluate", + "TasksTaskMainItem_Foreach", + "TasksTaskMainItem_Get", "TasksTaskMainItem_IfElse", + "TasksTaskMainItem_Log", + "TasksTaskMainItem_MapReduce", + "TasksTaskMainItem_Parallel", "TasksTaskMainItem_Prompt", + "TasksTaskMainItem_Return", + "TasksTaskMainItem_Search", + "TasksTaskMainItem_Set", + "TasksTaskMainItem_Sleep", + "TasksTaskMainItem_Switch", "TasksTaskMainItem_ToolCall", "TasksTaskMainItem_WaitForInput", "TasksTaskMainItem_Yield", "TasksTaskTool", "TasksToolCallStep", "TasksUpdateTaskRequestMainItem", + "TasksUpdateTaskRequestMainItem_Embed", "TasksUpdateTaskRequestMainItem_Error", "TasksUpdateTaskRequestMainItem_Evaluate", + "TasksUpdateTaskRequestMainItem_Foreach", + "TasksUpdateTaskRequestMainItem_Get", "TasksUpdateTaskRequestMainItem_IfElse", + "TasksUpdateTaskRequestMainItem_Log", + "TasksUpdateTaskRequestMainItem_MapReduce", + "TasksUpdateTaskRequestMainItem_Parallel", "TasksUpdateTaskRequestMainItem_Prompt", + "TasksUpdateTaskRequestMainItem_Return", + "TasksUpdateTaskRequestMainItem_Search", + "TasksUpdateTaskRequestMainItem_Set", + "TasksUpdateTaskRequestMainItem_Sleep", + "TasksUpdateTaskRequestMainItem_Switch", "TasksUpdateTaskRequestMainItem_ToolCall", "TasksUpdateTaskRequestMainItem_WaitForInput", "TasksUpdateTaskRequestMainItem_Yield", "TasksWaitForInputStep", - "TasksWaitForInputStepInfo", "TasksYieldStep", "ToolsChosenFunctionCall", "ToolsChosenToolCall", "ToolsChosenToolCall_Function", + "ToolsCreateToolRequest", "ToolsFunctionCallOption", "ToolsFunctionDef", - "ToolsFunctionDefUpdate", "ToolsFunctionTool", "ToolsNamedFunctionChoice", "ToolsNamedToolChoice", @@ -499,12 +739,10 @@ "UserDocsRouteListRequestSortBy", "UserDocsRouteListResponse", "UserDocsSearchRouteSearchRequestBody", - "UserDocsSearchRouteSearchRequestDirection", - "UserDocsSearchRouteSearchRequestSortBy", - "UserDocsSearchRouteSearchResponse", + "UsersCreateOrUpdateUserRequest", + "UsersCreateUserRequest", "UsersRouteListRequestDirection", "UsersRouteListRequestSortBy", "UsersRouteListResponse", - "UsersUpdateUserRequest", "UsersUser", ] diff --git a/sdks/python/julep/api/types/agents_agent.py b/sdks/python/julep/api/types/agents_agent.py index 04827f31f..01fce6908 100644 --- a/sdks/python/julep/api/types/agents_agent.py +++ b/sdks/python/julep/api/types/agents_agent.py @@ -5,8 +5,8 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .agents_agent_default_settings import AgentsAgentDefaultSettings from .agents_agent_instructions import AgentsAgentInstructions +from .chat_default_chat_settings import ChatDefaultChatSettings from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode from .common_uuid import CommonUuid @@ -44,7 +44,7 @@ class AgentsAgent(pydantic_v1.BaseModel): Instructions for the agent """ - default_settings: typing.Optional[AgentsAgentDefaultSettings] = pydantic_v1.Field( + default_settings: typing.Optional[ChatDefaultChatSettings] = pydantic_v1.Field( default=None ) """ diff --git a/sdks/python/julep/api/types/agents_agent_default_settings.py b/sdks/python/julep/api/types/agents_agent_default_settings.py deleted file mode 100644 index 71b82cfc2..000000000 --- a/sdks/python/julep/api/types/agents_agent_default_settings.py +++ /dev/null @@ -1,11 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .chat_generation_preset_settings import ChatGenerationPresetSettings -from .chat_open_ai_settings import ChatOpenAiSettings -from .chat_v_llm_settings import ChatVLlmSettings - -AgentsAgentDefaultSettings = typing.Union[ - ChatGenerationPresetSettings, ChatOpenAiSettings, ChatVLlmSettings -] diff --git a/sdks/python/julep/api/types/agents_create_agent_request.py b/sdks/python/julep/api/types/agents_create_agent_request.py index 2a545c6dc..d5a78d583 100644 --- a/sdks/python/julep/api/types/agents_create_agent_request.py +++ b/sdks/python/julep/api/types/agents_create_agent_request.py @@ -5,12 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .agents_create_agent_request_default_settings import ( - AgentsCreateAgentRequestDefaultSettings, -) from .agents_create_agent_request_instructions import ( AgentsCreateAgentRequestInstructions, ) +from .chat_default_chat_settings import ChatDefaultChatSettings from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode @@ -40,8 +38,8 @@ class AgentsCreateAgentRequest(pydantic_v1.BaseModel): Instructions for the agent """ - default_settings: typing.Optional[AgentsCreateAgentRequestDefaultSettings] = ( - pydantic_v1.Field(default=None) + default_settings: typing.Optional[ChatDefaultChatSettings] = pydantic_v1.Field( + default=None ) """ Default settings for all sessions created by this agent diff --git a/sdks/python/julep/api/types/agents_create_agent_request_default_settings.py b/sdks/python/julep/api/types/agents_create_agent_request_default_settings.py deleted file mode 100644 index da2d239bf..000000000 --- a/sdks/python/julep/api/types/agents_create_agent_request_default_settings.py +++ /dev/null @@ -1,11 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .chat_generation_preset_settings import ChatGenerationPresetSettings -from .chat_open_ai_settings import ChatOpenAiSettings -from .chat_v_llm_settings import ChatVLlmSettings - -AgentsCreateAgentRequestDefaultSettings = typing.Union[ - ChatGenerationPresetSettings, ChatOpenAiSettings, ChatVLlmSettings -] diff --git a/sdks/python/julep/api/types/agents_create_or_update_agent_request.py b/sdks/python/julep/api/types/agents_create_or_update_agent_request.py new file mode 100644 index 000000000..fa92a7a9b --- /dev/null +++ b/sdks/python/julep/api/types/agents_create_or_update_agent_request.py @@ -0,0 +1,75 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .agents_create_agent_request_instructions import ( + AgentsCreateAgentRequestInstructions, +) +from .chat_default_chat_settings import ChatDefaultChatSettings +from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode +from .common_uuid import CommonUuid + + +class AgentsCreateOrUpdateAgentRequest(pydantic_v1.BaseModel): + id: CommonUuid + metadata: typing.Optional[typing.Dict[str, typing.Any]] = None + default_settings: typing.Optional[ChatDefaultChatSettings] = pydantic_v1.Field( + default=None + ) + """ + Default settings for all sessions created by this agent + """ + + name: CommonIdentifierSafeUnicode = pydantic_v1.Field() + """ + Name of the agent + """ + + about: str = pydantic_v1.Field() + """ + About the agent + """ + + model: str = pydantic_v1.Field() + """ + Model name to use (gpt-4-turbo, gemini-nano etc) + """ + + instructions: AgentsCreateAgentRequestInstructions = pydantic_v1.Field() + """ + Instructions for the agent + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/agents_docs_search_route_search_request_direction.py b/sdks/python/julep/api/types/agents_docs_search_route_search_request_direction.py deleted file mode 100644 index 07c53fe78..000000000 --- a/sdks/python/julep/api/types/agents_docs_search_route_search_request_direction.py +++ /dev/null @@ -1,7 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -AgentsDocsSearchRouteSearchRequestDirection = typing.Union[ - typing.Literal["asc", "desc"], typing.Any -] diff --git a/sdks/python/julep/api/types/agents_docs_search_route_search_request_sort_by.py b/sdks/python/julep/api/types/agents_docs_search_route_search_request_sort_by.py deleted file mode 100644 index a85bdee6c..000000000 --- a/sdks/python/julep/api/types/agents_docs_search_route_search_request_sort_by.py +++ /dev/null @@ -1,7 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -AgentsDocsSearchRouteSearchRequestSortBy = typing.Union[ - typing.Literal["created_at", "updated_at"], typing.Any -] diff --git a/sdks/python/julep/api/types/agents_patch_agent_request_default_settings.py b/sdks/python/julep/api/types/agents_patch_agent_request_default_settings.py deleted file mode 100644 index 2ce2b4fc3..000000000 --- a/sdks/python/julep/api/types/agents_patch_agent_request_default_settings.py +++ /dev/null @@ -1,11 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .chat_generation_preset_settings import ChatGenerationPresetSettings -from .chat_open_ai_settings import ChatOpenAiSettings -from .chat_v_llm_settings import ChatVLlmSettings - -AgentsPatchAgentRequestDefaultSettings = typing.Union[ - ChatGenerationPresetSettings, ChatOpenAiSettings, ChatVLlmSettings -] diff --git a/sdks/python/julep/api/types/agents_route_list_response.py b/sdks/python/julep/api/types/agents_route_list_response.py index 98362162e..8802e37f5 100644 --- a/sdks/python/julep/api/types/agents_route_list_response.py +++ b/sdks/python/julep/api/types/agents_route_list_response.py @@ -9,7 +9,7 @@ class AgentsRouteListResponse(pydantic_v1.BaseModel): - results: typing.List[AgentsAgent] + items: typing.List[AgentsAgent] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { diff --git a/sdks/python/julep/api/types/agents_update_agent_request.py b/sdks/python/julep/api/types/agents_update_agent_request.py index 1b7a648a7..ccbcc48cf 100644 --- a/sdks/python/julep/api/types/agents_update_agent_request.py +++ b/sdks/python/julep/api/types/agents_update_agent_request.py @@ -5,12 +5,10 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .agents_update_agent_request_default_settings import ( - AgentsUpdateAgentRequestDefaultSettings, -) from .agents_update_agent_request_instructions import ( AgentsUpdateAgentRequestInstructions, ) +from .chat_default_chat_settings import ChatDefaultChatSettings from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode @@ -40,8 +38,8 @@ class AgentsUpdateAgentRequest(pydantic_v1.BaseModel): Instructions for the agent """ - default_settings: typing.Optional[AgentsUpdateAgentRequestDefaultSettings] = ( - pydantic_v1.Field(default=None) + default_settings: typing.Optional[ChatDefaultChatSettings] = pydantic_v1.Field( + default=None ) """ Default settings for all sessions created by this agent diff --git a/sdks/python/julep/api/types/agents_update_agent_request_default_settings.py b/sdks/python/julep/api/types/agents_update_agent_request_default_settings.py deleted file mode 100644 index dc6ff15ab..000000000 --- a/sdks/python/julep/api/types/agents_update_agent_request_default_settings.py +++ /dev/null @@ -1,11 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .chat_generation_preset_settings import ChatGenerationPresetSettings -from .chat_open_ai_settings import ChatOpenAiSettings -from .chat_v_llm_settings import ChatVLlmSettings - -AgentsUpdateAgentRequestDefaultSettings = typing.Union[ - ChatGenerationPresetSettings, ChatOpenAiSettings, ChatVLlmSettings -] diff --git a/sdks/python/julep/api/types/entries_chat_ml_message.py b/sdks/python/julep/api/types/chat_chat_input_data.py similarity index 61% rename from sdks/python/julep/api/types/entries_chat_ml_message.py rename to sdks/python/julep/api/types/chat_chat_input_data.py index f3c087f6b..c7ab665b7 100644 --- a/sdks/python/julep/api/types/entries_chat_ml_message.py +++ b/sdks/python/julep/api/types/chat_chat_input_data.py @@ -5,40 +5,31 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .common_uuid import CommonUuid -from .entries_chat_ml_message_content import EntriesChatMlMessageContent -from .entries_chat_ml_role import EntriesChatMlRole -from .tools_chosen_tool_call import ToolsChosenToolCall +from .chat_chat_input_data_tool_choice import ChatChatInputDataToolChoice +from .entries_input_chat_ml_message import EntriesInputChatMlMessage +from .tools_function_tool import ToolsFunctionTool -class EntriesChatMlMessage(pydantic_v1.BaseModel): - role: EntriesChatMlRole = pydantic_v1.Field() +class ChatChatInputData(pydantic_v1.BaseModel): + messages: typing.List[EntriesInputChatMlMessage] = pydantic_v1.Field() """ - The role of the message + A list of new input messages comprising the conversation so far. """ - content: EntriesChatMlMessageContent = pydantic_v1.Field() + tools: typing.Optional[typing.List[ToolsFunctionTool]] = pydantic_v1.Field( + default=None + ) """ - The content parts of the message + (Advanced) List of tools that are provided in addition to agent's default set of tools. """ - name: typing.Optional[str] = pydantic_v1.Field(default=None) + tool_choice: typing.Optional[ChatChatInputDataToolChoice] = pydantic_v1.Field( + default=None + ) """ - Name + Can be one of existing tools given to the agent earlier or the ones provided in this request. """ - tool_calls: typing.List[ToolsChosenToolCall] = pydantic_v1.Field() - """ - Tool calls generated by the model. - """ - - created_at: dt.datetime = pydantic_v1.Field() - """ - When this resource was created as UTC date-time - """ - - id: CommonUuid - def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { "by_alias": True, diff --git a/sdks/python/julep/api/types/chat_route_generate_request_agent_tool_choice.py b/sdks/python/julep/api/types/chat_chat_input_data_tool_choice.py similarity index 79% rename from sdks/python/julep/api/types/chat_route_generate_request_agent_tool_choice.py rename to sdks/python/julep/api/types/chat_chat_input_data_tool_choice.py index 9e95f3e1d..4f0f065e0 100644 --- a/sdks/python/julep/api/types/chat_route_generate_request_agent_tool_choice.py +++ b/sdks/python/julep/api/types/chat_chat_input_data_tool_choice.py @@ -4,6 +4,6 @@ from .tools_named_tool_choice import ToolsNamedToolChoice -ChatRouteGenerateRequestAgentToolChoice = typing.Union[ +ChatChatInputDataToolChoice = typing.Union[ typing.Literal["auto"], typing.Literal["none"], ToolsNamedToolChoice ] diff --git a/sdks/python/julep/api/types/chat_chat_output_chunk.py b/sdks/python/julep/api/types/chat_chat_output_chunk.py index 6eb42c639..926787155 100644 --- a/sdks/python/julep/api/types/chat_chat_output_chunk.py +++ b/sdks/python/julep/api/types/chat_chat_output_chunk.py @@ -6,7 +6,7 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .chat_base_chat_output import ChatBaseChatOutput -from .entries_chat_ml_message import EntriesChatMlMessage +from .entries_input_chat_ml_message import EntriesInputChatMlMessage class ChatChatOutputChunk(ChatBaseChatOutput): @@ -14,7 +14,7 @@ class ChatChatOutputChunk(ChatBaseChatOutput): Streaming chat completion output """ - delta: EntriesChatMlMessage = pydantic_v1.Field() + delta: EntriesInputChatMlMessage = pydantic_v1.Field() """ The message generated by the model """ diff --git a/sdks/python/julep/api/types/tasks_prompt_step_settings_preset.py b/sdks/python/julep/api/types/chat_chat_settings.py similarity index 89% rename from sdks/python/julep/api/types/tasks_prompt_step_settings_preset.py rename to sdks/python/julep/api/types/chat_chat_settings.py index 699611201..01a10a9fc 100644 --- a/sdks/python/julep/api/types/tasks_prompt_step_settings_preset.py +++ b/sdks/python/julep/api/types/chat_chat_settings.py @@ -6,13 +6,13 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .chat_completion_response_format import ChatCompletionResponseFormat -from .chat_generation_preset import ChatGenerationPreset +from .chat_default_chat_settings import ChatDefaultChatSettings from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode from .common_logit_bias import CommonLogitBias from .common_uuid import CommonUuid -class TasksPromptStepSettingsPreset(pydantic_v1.BaseModel): +class ChatChatSettings(ChatDefaultChatSettings): model: typing.Optional[CommonIdentifierSafeUnicode] = pydantic_v1.Field( default=None ) @@ -59,11 +59,6 @@ class TasksPromptStepSettingsPreset(pydantic_v1.BaseModel): Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) """ - preset: typing.Optional[ChatGenerationPreset] = pydantic_v1.Field(default=None) - """ - Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) - """ - def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { "by_alias": True, @@ -92,5 +87,7 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True + allow_population_by_field_name = True + populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/chat_competion_usage.py b/sdks/python/julep/api/types/chat_competion_usage.py index f6f798330..23d6b7b3c 100644 --- a/sdks/python/julep/api/types/chat_competion_usage.py +++ b/sdks/python/julep/api/types/chat_competion_usage.py @@ -12,17 +12,17 @@ class ChatCompetionUsage(pydantic_v1.BaseModel): Usage statistics for the completion request """ - completion_tokens: int = pydantic_v1.Field() + completion_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) """ Number of tokens in the generated completion """ - prompt_tokens: int = pydantic_v1.Field() + prompt_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) """ Number of tokens in the prompt """ - total_tokens: int = pydantic_v1.Field() + total_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) """ Total number of tokens used in the request (prompt + completion) """ diff --git a/sdks/python/julep/api/types/chat_v_llm_settings.py b/sdks/python/julep/api/types/chat_default_chat_settings.py similarity index 72% rename from sdks/python/julep/api/types/chat_v_llm_settings.py rename to sdks/python/julep/api/types/chat_default_chat_settings.py index 897c25b2f..77864e20b 100644 --- a/sdks/python/julep/api/types/chat_v_llm_settings.py +++ b/sdks/python/julep/api/types/chat_default_chat_settings.py @@ -5,9 +5,14 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_open_ai_settings import ChatOpenAiSettings -class ChatVLlmSettings(pydantic_v1.BaseModel): +class ChatDefaultChatSettings(ChatOpenAiSettings): + """ + Default settings for the chat session (also used by the agent) + """ + repetition_penalty: typing.Optional[float] = pydantic_v1.Field(default=None) """ Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. @@ -18,16 +23,6 @@ class ChatVLlmSettings(pydantic_v1.BaseModel): Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. """ - temperature: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - """ - - top_p: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - """ - min_p: typing.Optional[float] = pydantic_v1.Field(default=None) """ Minimum probability compared to leading token to be considered @@ -61,5 +56,7 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True + allow_population_by_field_name = True + populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/chat_generation_preset.py b/sdks/python/julep/api/types/chat_generation_preset.py deleted file mode 100644 index fcf1a3a1b..000000000 --- a/sdks/python/julep/api/types/chat_generation_preset.py +++ /dev/null @@ -1,18 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -ChatGenerationPreset = typing.Union[ - typing.Literal[ - "problem_solving", - "conversational", - "fun", - "prose", - "creative", - "business", - "deterministic", - "code", - "multilingual", - ], - typing.Any, -] diff --git a/sdks/python/julep/api/types/chat_message_chat_response.py b/sdks/python/julep/api/types/chat_message_chat_response.py index fd8b40082..2a5a789d3 100644 --- a/sdks/python/julep/api/types/chat_message_chat_response.py +++ b/sdks/python/julep/api/types/chat_message_chat_response.py @@ -6,11 +6,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .chat_base_chat_response import ChatBaseChatResponse -from .chat_chat_output_chunk import ChatChatOutputChunk +from .chat_message_chat_response_choices_item import ChatMessageChatResponseChoicesItem class ChatMessageChatResponse(ChatBaseChatResponse): - choices: typing.List[ChatChatOutputChunk] = pydantic_v1.Field() + choices: typing.List[ChatMessageChatResponseChoicesItem] = pydantic_v1.Field() """ The deltas generated by the model """ diff --git a/sdks/python/julep/api/types/chat_message_chat_response_choices_item.py b/sdks/python/julep/api/types/chat_message_chat_response_choices_item.py new file mode 100644 index 000000000..c542c6335 --- /dev/null +++ b/sdks/python/julep/api/types/chat_message_chat_response_choices_item.py @@ -0,0 +1,10 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .chat_multiple_chat_output import ChatMultipleChatOutput +from .chat_single_chat_output import ChatSingleChatOutput + +ChatMessageChatResponseChoicesItem = typing.Union[ + ChatSingleChatOutput, ChatMultipleChatOutput +] diff --git a/sdks/python/julep/api/types/chat_multiple_chat_output.py b/sdks/python/julep/api/types/chat_multiple_chat_output.py index ab41027eb..2fe0a50df 100644 --- a/sdks/python/julep/api/types/chat_multiple_chat_output.py +++ b/sdks/python/julep/api/types/chat_multiple_chat_output.py @@ -6,7 +6,7 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .chat_base_chat_output import ChatBaseChatOutput -from .entries_chat_ml_message import EntriesChatMlMessage +from .entries_input_chat_ml_message import EntriesInputChatMlMessage class ChatMultipleChatOutput(ChatBaseChatOutput): @@ -14,7 +14,7 @@ class ChatMultipleChatOutput(ChatBaseChatOutput): The output returned by the model. Note that, depending on the model provider, they might return more than one message. """ - messages: typing.List[EntriesChatMlMessage] + messages: typing.List[EntriesInputChatMlMessage] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { diff --git a/sdks/python/julep/api/types/chat_route_generate_request.py b/sdks/python/julep/api/types/chat_route_generate_request.py deleted file mode 100644 index 33c9ec62d..000000000 --- a/sdks/python/julep/api/types/chat_route_generate_request.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .chat_route_generate_request_agent import ChatRouteGenerateRequestAgent -from .chat_route_generate_request_frequency_penalty import ( - ChatRouteGenerateRequestFrequencyPenalty, -) -from .chat_route_generate_request_preset import ChatRouteGenerateRequestPreset - -ChatRouteGenerateRequest = typing.Union[ - ChatRouteGenerateRequestPreset, - ChatRouteGenerateRequestFrequencyPenalty, - ChatRouteGenerateRequestAgent, -] diff --git a/sdks/python/julep/api/types/chat_route_generate_request_agent.py b/sdks/python/julep/api/types/chat_route_generate_request_agent.py deleted file mode 100644 index 6ab50c264..000000000 --- a/sdks/python/julep/api/types/chat_route_generate_request_agent.py +++ /dev/null @@ -1,154 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .chat_completion_response_format import ChatCompletionResponseFormat -from .chat_route_generate_request_agent_tool_choice import ( - ChatRouteGenerateRequestAgentToolChoice, -) -from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode -from .common_logit_bias import CommonLogitBias -from .common_uuid import CommonUuid -from .entries_input_chat_ml_message import EntriesInputChatMlMessage -from .tools_function_tool import ToolsFunctionTool - - -class ChatRouteGenerateRequestAgent(pydantic_v1.BaseModel): - messages: typing.List[EntriesInputChatMlMessage] = pydantic_v1.Field() - """ - A list of new input messages comprising the conversation so far. - """ - - tools: typing.Optional[typing.List[ToolsFunctionTool]] = pydantic_v1.Field( - default=None - ) - """ - (Advanced) List of tools that are provided in addition to agent's default set of tools. - """ - - tool_choice: typing.Optional[ChatRouteGenerateRequestAgentToolChoice] = ( - pydantic_v1.Field(default=None) - ) - """ - Can be one of existing tools given to the agent earlier or the ones provided in this request. - """ - - recall: bool = pydantic_v1.Field() - """ - Whether previous memories should be recalled or not (will be enabled in a future release) - """ - - remember: bool = pydantic_v1.Field() - """ - Whether this interaction should form new memories or not (will be enabled in a future release) - """ - - save: bool = pydantic_v1.Field() - """ - Whether this interaction should be stored in the session history or not - """ - - model: typing.Optional[CommonIdentifierSafeUnicode] = pydantic_v1.Field( - default=None - ) - """ - Identifier of the model to be used - """ - - stream: bool = pydantic_v1.Field() - """ - Indicates if the server should stream the response as it's generated - """ - - stop: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) - """ - Up to 4 sequences where the API will stop generating further tokens. - """ - - seed: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - If specified, the system will make a best effort to sample deterministically for that particular seed value - """ - - max_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - The maximum number of tokens to generate in the chat completion - """ - - logit_bias: typing.Optional[typing.Dict[str, CommonLogitBias]] = pydantic_v1.Field( - default=None - ) - """ - Modify the likelihood of specified tokens appearing in the completion - """ - - response_format: typing.Optional[ChatCompletionResponseFormat] = pydantic_v1.Field( - default=None - ) - """ - Response format (set to `json_object` to restrict output to JSON) - """ - - agent: typing.Optional[CommonUuid] = pydantic_v1.Field(default=None) - """ - Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - """ - - repetition_penalty: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - """ - - length_penalty: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. - """ - - temperature: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - """ - - top_p: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - """ - - min_p: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Minimum probability compared to leading token to be considered - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/chat_route_generate_request_frequency_penalty.py b/sdks/python/julep/api/types/chat_route_generate_request_frequency_penalty.py deleted file mode 100644 index f62b323df..000000000 --- a/sdks/python/julep/api/types/chat_route_generate_request_frequency_penalty.py +++ /dev/null @@ -1,149 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .chat_completion_response_format import ChatCompletionResponseFormat -from .chat_route_generate_request_frequency_penalty_tool_choice import ( - ChatRouteGenerateRequestFrequencyPenaltyToolChoice, -) -from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode -from .common_logit_bias import CommonLogitBias -from .common_uuid import CommonUuid -from .entries_input_chat_ml_message import EntriesInputChatMlMessage -from .tools_function_tool import ToolsFunctionTool - - -class ChatRouteGenerateRequestFrequencyPenalty(pydantic_v1.BaseModel): - messages: typing.List[EntriesInputChatMlMessage] = pydantic_v1.Field() - """ - A list of new input messages comprising the conversation so far. - """ - - tools: typing.Optional[typing.List[ToolsFunctionTool]] = pydantic_v1.Field( - default=None - ) - """ - (Advanced) List of tools that are provided in addition to agent's default set of tools. - """ - - tool_choice: typing.Optional[ChatRouteGenerateRequestFrequencyPenaltyToolChoice] = ( - pydantic_v1.Field(default=None) - ) - """ - Can be one of existing tools given to the agent earlier or the ones provided in this request. - """ - - recall: bool = pydantic_v1.Field() - """ - Whether previous memories should be recalled or not (will be enabled in a future release) - """ - - remember: bool = pydantic_v1.Field() - """ - Whether this interaction should form new memories or not (will be enabled in a future release) - """ - - save: bool = pydantic_v1.Field() - """ - Whether this interaction should be stored in the session history or not - """ - - model: typing.Optional[CommonIdentifierSafeUnicode] = pydantic_v1.Field( - default=None - ) - """ - Identifier of the model to be used - """ - - stream: bool = pydantic_v1.Field() - """ - Indicates if the server should stream the response as it's generated - """ - - stop: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) - """ - Up to 4 sequences where the API will stop generating further tokens. - """ - - seed: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - If specified, the system will make a best effort to sample deterministically for that particular seed value - """ - - max_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - The maximum number of tokens to generate in the chat completion - """ - - logit_bias: typing.Optional[typing.Dict[str, CommonLogitBias]] = pydantic_v1.Field( - default=None - ) - """ - Modify the likelihood of specified tokens appearing in the completion - """ - - response_format: typing.Optional[ChatCompletionResponseFormat] = pydantic_v1.Field( - default=None - ) - """ - Response format (set to `json_object` to restrict output to JSON) - """ - - agent: typing.Optional[CommonUuid] = pydantic_v1.Field(default=None) - """ - Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - """ - - frequency_penalty: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - """ - - presence_penalty: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - """ - - temperature: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - """ - - top_p: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/chat_route_generate_request_frequency_penalty_tool_choice.py b/sdks/python/julep/api/types/chat_route_generate_request_frequency_penalty_tool_choice.py deleted file mode 100644 index 73169ee30..000000000 --- a/sdks/python/julep/api/types/chat_route_generate_request_frequency_penalty_tool_choice.py +++ /dev/null @@ -1,9 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .tools_named_tool_choice import ToolsNamedToolChoice - -ChatRouteGenerateRequestFrequencyPenaltyToolChoice = typing.Union[ - typing.Literal["auto"], typing.Literal["none"], ToolsNamedToolChoice -] diff --git a/sdks/python/julep/api/types/chat_route_generate_request_preset.py b/sdks/python/julep/api/types/chat_route_generate_request_preset.py deleted file mode 100644 index b98ccd98d..000000000 --- a/sdks/python/julep/api/types/chat_route_generate_request_preset.py +++ /dev/null @@ -1,135 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .chat_completion_response_format import ChatCompletionResponseFormat -from .chat_generation_preset import ChatGenerationPreset -from .chat_route_generate_request_preset_tool_choice import ( - ChatRouteGenerateRequestPresetToolChoice, -) -from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode -from .common_logit_bias import CommonLogitBias -from .common_uuid import CommonUuid -from .entries_input_chat_ml_message import EntriesInputChatMlMessage -from .tools_function_tool import ToolsFunctionTool - - -class ChatRouteGenerateRequestPreset(pydantic_v1.BaseModel): - messages: typing.List[EntriesInputChatMlMessage] = pydantic_v1.Field() - """ - A list of new input messages comprising the conversation so far. - """ - - tools: typing.Optional[typing.List[ToolsFunctionTool]] = pydantic_v1.Field( - default=None - ) - """ - (Advanced) List of tools that are provided in addition to agent's default set of tools. - """ - - tool_choice: typing.Optional[ChatRouteGenerateRequestPresetToolChoice] = ( - pydantic_v1.Field(default=None) - ) - """ - Can be one of existing tools given to the agent earlier or the ones provided in this request. - """ - - recall: bool = pydantic_v1.Field() - """ - Whether previous memories should be recalled or not (will be enabled in a future release) - """ - - remember: bool = pydantic_v1.Field() - """ - Whether this interaction should form new memories or not (will be enabled in a future release) - """ - - save: bool = pydantic_v1.Field() - """ - Whether this interaction should be stored in the session history or not - """ - - model: typing.Optional[CommonIdentifierSafeUnicode] = pydantic_v1.Field( - default=None - ) - """ - Identifier of the model to be used - """ - - stream: bool = pydantic_v1.Field() - """ - Indicates if the server should stream the response as it's generated - """ - - stop: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) - """ - Up to 4 sequences where the API will stop generating further tokens. - """ - - seed: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - If specified, the system will make a best effort to sample deterministically for that particular seed value - """ - - max_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - The maximum number of tokens to generate in the chat completion - """ - - logit_bias: typing.Optional[typing.Dict[str, CommonLogitBias]] = pydantic_v1.Field( - default=None - ) - """ - Modify the likelihood of specified tokens appearing in the completion - """ - - response_format: typing.Optional[ChatCompletionResponseFormat] = pydantic_v1.Field( - default=None - ) - """ - Response format (set to `json_object` to restrict output to JSON) - """ - - agent: typing.Optional[CommonUuid] = pydantic_v1.Field(default=None) - """ - Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - """ - - preset: typing.Optional[ChatGenerationPreset] = pydantic_v1.Field(default=None) - """ - Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/chat_route_generate_request_preset_tool_choice.py b/sdks/python/julep/api/types/chat_route_generate_request_preset_tool_choice.py deleted file mode 100644 index 92d41ff7f..000000000 --- a/sdks/python/julep/api/types/chat_route_generate_request_preset_tool_choice.py +++ /dev/null @@ -1,9 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .tools_named_tool_choice import ToolsNamedToolChoice - -ChatRouteGenerateRequestPresetToolChoice = typing.Union[ - typing.Literal["auto"], typing.Literal["none"], ToolsNamedToolChoice -] diff --git a/sdks/python/julep/api/types/chat_single_chat_output.py b/sdks/python/julep/api/types/chat_single_chat_output.py index 236d7d566..51b5d99fe 100644 --- a/sdks/python/julep/api/types/chat_single_chat_output.py +++ b/sdks/python/julep/api/types/chat_single_chat_output.py @@ -6,7 +6,7 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .chat_base_chat_output import ChatBaseChatOutput -from .entries_chat_ml_message import EntriesChatMlMessage +from .entries_input_chat_ml_message import EntriesInputChatMlMessage class ChatSingleChatOutput(ChatBaseChatOutput): @@ -14,7 +14,7 @@ class ChatSingleChatOutput(ChatBaseChatOutput): The output returned by the model. Note that, depending on the model provider, they might return more than one message. """ - message: EntriesChatMlMessage + message: EntriesInputChatMlMessage def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { diff --git a/sdks/python/julep/api/types/docs_base_doc_search_request.py b/sdks/python/julep/api/types/docs_base_doc_search_request.py index f7c1ecfa2..1b9646593 100644 --- a/sdks/python/julep/api/types/docs_base_doc_search_request.py +++ b/sdks/python/julep/api/types/docs_base_doc_search_request.py @@ -8,21 +8,7 @@ class DocsBaseDocSearchRequest(pydantic_v1.BaseModel): - confidence: float = pydantic_v1.Field() - """ - The confidence cutoff level - """ - - alpha: float = pydantic_v1.Field() - """ - The weight to apply to BM25 vs Vector search results. 0 => pure BM25; 1 => pure vector; - """ - - mmr: bool = pydantic_v1.Field() - """ - Whether to include the MMR algorithm in the search. Optimizes for diversity in search results. - """ - + limit: int lang: typing.Literal["en-US"] = pydantic_v1.Field(default="en-US") """ The language to be used for text-only search. Support for other languages coming soon. diff --git a/sdks/python/julep/api/types/docs_create_doc_request.py b/sdks/python/julep/api/types/docs_create_doc_request.py new file mode 100644 index 000000000..caf1497b0 --- /dev/null +++ b/sdks/python/julep/api/types/docs_create_doc_request.py @@ -0,0 +1,57 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode +from .docs_create_doc_request_content import DocsCreateDocRequestContent + + +class DocsCreateDocRequest(pydantic_v1.BaseModel): + """ + Payload for creating a doc + """ + + metadata: typing.Optional[typing.Dict[str, typing.Any]] = None + title: CommonIdentifierSafeUnicode = pydantic_v1.Field() + """ + Title describing what this document contains + """ + + content: DocsCreateDocRequestContent = pydantic_v1.Field() + """ + Contents of the document + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/docs_hybrid_doc_search_request_text.py b/sdks/python/julep/api/types/docs_create_doc_request_content.py similarity index 53% rename from sdks/python/julep/api/types/docs_hybrid_doc_search_request_text.py rename to sdks/python/julep/api/types/docs_create_doc_request_content.py index b3cfcf16a..5382bf647 100644 --- a/sdks/python/julep/api/types/docs_hybrid_doc_search_request_text.py +++ b/sdks/python/julep/api/types/docs_create_doc_request_content.py @@ -2,4 +2,4 @@ import typing -DocsHybridDocSearchRequestText = typing.Union[str, typing.List[str]] +DocsCreateDocRequestContent = typing.Union[str, typing.List[str]] diff --git a/sdks/python/julep/api/types/docs_doc_reference.py b/sdks/python/julep/api/types/docs_doc_reference.py index 6d7ddb9c7..94c1eaefc 100644 --- a/sdks/python/julep/api/types/docs_doc_reference.py +++ b/sdks/python/julep/api/types/docs_doc_reference.py @@ -7,6 +7,7 @@ from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .common_uuid import CommonUuid from .docs_doc_owner import DocsDocOwner +from .docs_snippet import DocsSnippet class DocsDocReference(pydantic_v1.BaseModel): @@ -20,13 +21,9 @@ class DocsDocReference(pydantic_v1.BaseModel): ID of the document """ - snippet_index: typing.List[int] = pydantic_v1.Field() - """ - Snippets referred to of the document - """ - title: typing.Optional[str] = None - snippet: typing.Optional[str] = None + snippets: typing.List[DocsSnippet] + distance: typing.Optional[float] = None def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { diff --git a/sdks/python/julep/api/types/chat_generation_preset_settings.py b/sdks/python/julep/api/types/docs_doc_search_response.py similarity index 78% rename from sdks/python/julep/api/types/chat_generation_preset_settings.py rename to sdks/python/julep/api/types/docs_doc_search_response.py index 7ce09ceff..59d26bdb9 100644 --- a/sdks/python/julep/api/types/chat_generation_preset_settings.py +++ b/sdks/python/julep/api/types/docs_doc_search_response.py @@ -5,13 +5,18 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .chat_generation_preset import ChatGenerationPreset +from .docs_doc_reference import DocsDocReference -class ChatGenerationPresetSettings(pydantic_v1.BaseModel): - preset: typing.Optional[ChatGenerationPreset] = pydantic_v1.Field(default=None) +class DocsDocSearchResponse(pydantic_v1.BaseModel): + docs: typing.List[DocsDocReference] = pydantic_v1.Field() """ - Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) + The documents that were found + """ + + time: float = pydantic_v1.Field() + """ + The time taken to search in seconds """ def json(self, **kwargs: typing.Any) -> str: diff --git a/sdks/python/julep/api/types/docs_hybrid_doc_search_request.py b/sdks/python/julep/api/types/docs_hybrid_doc_search_request.py index faaccc53b..8e460c40f 100644 --- a/sdks/python/julep/api/types/docs_hybrid_doc_search_request.py +++ b/sdks/python/julep/api/types/docs_hybrid_doc_search_request.py @@ -6,19 +6,27 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .docs_base_doc_search_request import DocsBaseDocSearchRequest -from .docs_hybrid_doc_search_request_text import DocsHybridDocSearchRequestText -from .docs_hybrid_doc_search_request_vector import DocsHybridDocSearchRequestVector class DocsHybridDocSearchRequest(DocsBaseDocSearchRequest): - text: DocsHybridDocSearchRequestText = pydantic_v1.Field() + confidence: float = pydantic_v1.Field() """ - Text or texts to use in the search. In `hybrid` search mode, either `text` or both `text` and `vector` fields are required. + The confidence cutoff level """ - vector: DocsHybridDocSearchRequestVector = pydantic_v1.Field() + alpha: float = pydantic_v1.Field() """ - Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. + The weight to apply to BM25 vs Vector search results. 0 => pure BM25; 1 => pure vector; + """ + + text: str = pydantic_v1.Field() + """ + Text to use in the search. In `hybrid` search mode, either `text` or both `text` and `vector` fields are required. + """ + + vector: typing.List[float] = pydantic_v1.Field() + """ + Vector to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. """ def json(self, **kwargs: typing.Any) -> str: diff --git a/sdks/python/julep/api/types/docs_hybrid_doc_search_request_vector.py b/sdks/python/julep/api/types/docs_hybrid_doc_search_request_vector.py deleted file mode 100644 index 64624e57e..000000000 --- a/sdks/python/julep/api/types/docs_hybrid_doc_search_request_vector.py +++ /dev/null @@ -1,7 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DocsHybridDocSearchRequestVector = typing.Union[ - typing.List[float], typing.List[typing.List[float]] -] diff --git a/sdks/python/julep/api/types/user_docs_search_route_search_response.py b/sdks/python/julep/api/types/docs_snippet.py similarity index 88% rename from sdks/python/julep/api/types/user_docs_search_route_search_response.py rename to sdks/python/julep/api/types/docs_snippet.py index 9206fc909..9d080028a 100644 --- a/sdks/python/julep/api/types/user_docs_search_route_search_response.py +++ b/sdks/python/julep/api/types/docs_snippet.py @@ -5,11 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .docs_doc_reference import DocsDocReference -class UserDocsSearchRouteSearchResponse(pydantic_v1.BaseModel): - results: typing.List[DocsDocReference] +class DocsSnippet(pydantic_v1.BaseModel): + index: int + content: str def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { diff --git a/sdks/python/julep/api/types/docs_text_only_doc_search_request.py b/sdks/python/julep/api/types/docs_text_only_doc_search_request.py index 5f43e1b04..517b6a1ce 100644 --- a/sdks/python/julep/api/types/docs_text_only_doc_search_request.py +++ b/sdks/python/julep/api/types/docs_text_only_doc_search_request.py @@ -6,13 +6,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .docs_base_doc_search_request import DocsBaseDocSearchRequest -from .docs_text_only_doc_search_request_text import DocsTextOnlyDocSearchRequestText class DocsTextOnlyDocSearchRequest(DocsBaseDocSearchRequest): - text: DocsTextOnlyDocSearchRequestText = pydantic_v1.Field() + text: str = pydantic_v1.Field() """ - Text or texts to use in the search. + Text to use in the search. """ def json(self, **kwargs: typing.Any) -> str: diff --git a/sdks/python/julep/api/types/docs_text_only_doc_search_request_text.py b/sdks/python/julep/api/types/docs_text_only_doc_search_request_text.py deleted file mode 100644 index 5f957995e..000000000 --- a/sdks/python/julep/api/types/docs_text_only_doc_search_request_text.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DocsTextOnlyDocSearchRequestText = typing.Union[str, typing.List[str]] diff --git a/sdks/python/julep/api/types/docs_vector_doc_search_request.py b/sdks/python/julep/api/types/docs_vector_doc_search_request.py index 1401f7535..f9c103ec6 100644 --- a/sdks/python/julep/api/types/docs_vector_doc_search_request.py +++ b/sdks/python/julep/api/types/docs_vector_doc_search_request.py @@ -6,13 +6,17 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .docs_base_doc_search_request import DocsBaseDocSearchRequest -from .docs_vector_doc_search_request_vector import DocsVectorDocSearchRequestVector class DocsVectorDocSearchRequest(DocsBaseDocSearchRequest): - vector: DocsVectorDocSearchRequestVector = pydantic_v1.Field() + confidence: float = pydantic_v1.Field() """ - Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. + The confidence cutoff level + """ + + vector: typing.List[float] = pydantic_v1.Field() + """ + Vector to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. """ def json(self, **kwargs: typing.Any) -> str: diff --git a/sdks/python/julep/api/types/docs_vector_doc_search_request_vector.py b/sdks/python/julep/api/types/docs_vector_doc_search_request_vector.py deleted file mode 100644 index e64b2da82..000000000 --- a/sdks/python/julep/api/types/docs_vector_doc_search_request_vector.py +++ /dev/null @@ -1,7 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -DocsVectorDocSearchRequestVector = typing.Union[ - typing.List[float], typing.List[typing.List[float]] -] diff --git a/sdks/python/julep/api/types/entries_base_entry.py b/sdks/python/julep/api/types/entries_base_entry.py index cd9a8158f..a12c99c1c 100644 --- a/sdks/python/julep/api/types/entries_base_entry.py +++ b/sdks/python/julep/api/types/entries_base_entry.py @@ -15,8 +15,8 @@ class EntriesBaseEntry(pydantic_v1.BaseModel): name: typing.Optional[str] = None content: EntriesBaseEntryContent source: EntriesBaseEntrySource - tokenizer: typing.Optional[str] = None - token_count: typing.Optional[int] = None + tokenizer: str + token_count: int timestamp: float = pydantic_v1.Field() """ This is the time that this event refers to. diff --git a/sdks/python/julep/api/types/entries_chat_ml_message_content.py b/sdks/python/julep/api/types/entries_chat_ml_message_content.py deleted file mode 100644 index 3b80f2242..000000000 --- a/sdks/python/julep/api/types/entries_chat_ml_message_content.py +++ /dev/null @@ -1,9 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .entries_chat_ml_message_content_item import EntriesChatMlMessageContentItem - -EntriesChatMlMessageContent = typing.Union[ - str, typing.List[str], typing.List[EntriesChatMlMessageContentItem] -] diff --git a/sdks/python/julep/api/types/entries_chat_ml_message_content_item.py b/sdks/python/julep/api/types/entries_chat_ml_message_content_item.py deleted file mode 100644 index 1af6f3b06..000000000 --- a/sdks/python/julep/api/types/entries_chat_ml_message_content_item.py +++ /dev/null @@ -1,87 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -from __future__ import annotations - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .entries_image_url import EntriesImageUrl - - -class EntriesChatMlMessageContentItem_Text(pydantic_v1.BaseModel): - text: str - type: typing.Literal["text"] = "text" - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -class EntriesChatMlMessageContentItem_ImageUrl(pydantic_v1.BaseModel): - image_url: EntriesImageUrl - type: typing.Literal["image_url"] = "image_url" - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} - - -EntriesChatMlMessageContentItem = typing.Union[ - EntriesChatMlMessageContentItem_Text, EntriesChatMlMessageContentItem_ImageUrl -] diff --git a/sdks/python/julep/api/types/entries_chat_ml_role.py b/sdks/python/julep/api/types/entries_chat_ml_role.py index 9c61bb75a..0cc9a5d46 100644 --- a/sdks/python/julep/api/types/entries_chat_ml_role.py +++ b/sdks/python/julep/api/types/entries_chat_ml_role.py @@ -5,7 +5,7 @@ EntriesChatMlRole = typing.Union[ typing.Literal[ "user", - "agent", + "assistant", "system", "function", "function_response", diff --git a/sdks/python/julep/api/types/entries_history.py b/sdks/python/julep/api/types/entries_history.py index 78c56e424..d3d124ffe 100644 --- a/sdks/python/julep/api/types/entries_history.py +++ b/sdks/python/julep/api/types/entries_history.py @@ -6,12 +6,12 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .common_uuid import CommonUuid -from .entries_base_entry import EntriesBaseEntry +from .entries_entry import EntriesEntry from .entries_relation import EntriesRelation class EntriesHistory(pydantic_v1.BaseModel): - entries: typing.List[EntriesBaseEntry] + entries: typing.List[EntriesEntry] relations: typing.List[EntriesRelation] session_id: CommonUuid created_at: dt.datetime = pydantic_v1.Field() diff --git a/sdks/python/julep/api/types/executions_transition.py b/sdks/python/julep/api/types/executions_transition.py index c0982cfcc..901591f5e 100644 --- a/sdks/python/julep/api/types/executions_transition.py +++ b/sdks/python/julep/api/types/executions_transition.py @@ -6,6 +6,7 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .common_uuid import CommonUuid +from .executions_transition_target import ExecutionsTransitionTarget from .executions_transition_type import ExecutionsTransitionType @@ -13,8 +14,8 @@ class ExecutionsTransition(pydantic_v1.BaseModel): type: ExecutionsTransitionType execution_id: CommonUuid output: typing.Dict[str, typing.Any] - current: typing.List[typing.Any] - next: typing.Optional[typing.List[typing.Any]] = None + current: ExecutionsTransitionTarget + next: typing.Optional[ExecutionsTransitionTarget] = None id: CommonUuid metadata: typing.Optional[typing.Dict[str, typing.Any]] = None created_at: dt.datetime = pydantic_v1.Field() diff --git a/sdks/python/julep/api/types/executions_transition_target.py b/sdks/python/julep/api/types/executions_transition_target.py new file mode 100644 index 000000000..5efc68e37 --- /dev/null +++ b/sdks/python/julep/api/types/executions_transition_target.py @@ -0,0 +1,44 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode + + +class ExecutionsTransitionTarget(pydantic_v1.BaseModel): + workflow: CommonIdentifierSafeUnicode + step: int + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/sessions_create_or_update_session_request.py b/sdks/python/julep/api/types/sessions_create_or_update_session_request.py new file mode 100644 index 000000000..b5dc6bc40 --- /dev/null +++ b/sdks/python/julep/api/types/sessions_create_or_update_session_request.py @@ -0,0 +1,78 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_uuid import CommonUuid +from .sessions_context_overflow_type import SessionsContextOverflowType + + +class SessionsCreateOrUpdateSessionRequest(pydantic_v1.BaseModel): + id: CommonUuid + user: typing.Optional[CommonUuid] = pydantic_v1.Field(default=None) + """ + User ID of user associated with this session + """ + + users: typing.Optional[typing.List[CommonUuid]] = None + agent: typing.Optional[CommonUuid] = pydantic_v1.Field(default=None) + """ + Agent ID of agent associated with this session + """ + + agents: typing.Optional[typing.List[CommonUuid]] = None + token_budget: typing.Optional[int] = pydantic_v1.Field(default=None) + """ + Threshold value for the adaptive context functionality + """ + + context_overflow: typing.Optional[SessionsContextOverflowType] = pydantic_v1.Field( + default=None + ) + """ + Action to start on context window overflow + """ + + metadata: typing.Optional[typing.Dict[str, typing.Any]] = None + situation: str = pydantic_v1.Field() + """ + A specific situation that sets the background for this session + """ + + render_templates: bool = pydantic_v1.Field() + """ + Render system and assistant message content as jinja templates + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/sessions_route_list_response.py b/sdks/python/julep/api/types/sessions_route_list_response.py index 874d1bd1e..ff1807961 100644 --- a/sdks/python/julep/api/types/sessions_route_list_response.py +++ b/sdks/python/julep/api/types/sessions_route_list_response.py @@ -9,7 +9,7 @@ class SessionsRouteListResponse(pydantic_v1.BaseModel): - results: typing.List[SessionsSession] + items: typing.List[SessionsSession] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { diff --git a/sdks/python/julep/api/types/tasks_base_workflow_step.py b/sdks/python/julep/api/types/tasks_base_workflow_step.py index a5c68ac3d..f6798a97a 100644 --- a/sdks/python/julep/api/types/tasks_base_workflow_step.py +++ b/sdks/python/julep/api/types/tasks_base_workflow_step.py @@ -1,13 +1,303 @@ # This file was auto-generated by Fern from our API Definition. +from __future__ import annotations + import datetime as dt import typing from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings +from .common_py_expression import CommonPyExpression +from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_case_then import TasksCaseThen +from .tasks_foreach_do import TasksForeachDo +from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse +from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_map_over import TasksMapOver +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem +from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor + + +class TasksBaseWorkflowStep_ToolCall(pydantic_v1.BaseModel): + tool: CommonToolRef + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["tool_call"] = pydantic_v1.Field( + alias="kind_", default="tool_call" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Yield(pydantic_v1.BaseModel): + workflow: str + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["yield"] = pydantic_v1.Field(alias="kind_", default="yield") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Prompt(pydantic_v1.BaseModel): + prompt: TasksPromptStepPrompt + settings: ChatChatSettings + kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Error(pydantic_v1.BaseModel): + error: str + kind: typing.Literal["error"] = pydantic_v1.Field(alias="kind_", default="error") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} -class TasksBaseWorkflowStep(pydantic_v1.BaseModel): +class TasksBaseWorkflowStep_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { "by_alias": True, @@ -36,5 +326,383 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True + allow_population_by_field_name = True + populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_WaitForInput(pydantic_v1.BaseModel): + wait_for_input: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( + alias="kind_", default="wait_for_input" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_IfElse(pydantic_v1.BaseModel): + if_: CommonPyExpression = pydantic_v1.Field(alias="if") + then: TasksIfElseWorkflowStepThen + else_: TasksIfElseWorkflowStepElse = pydantic_v1.Field(alias="else") + kind: typing.Literal["if_else"] = pydantic_v1.Field( + alias="kind_", default="if_else" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Switch(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] + kind: typing.Literal["switch"] = pydantic_v1.Field(alias="kind_", default="switch") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Foreach(pydantic_v1.BaseModel): + foreach: TasksForeachDo + kind: typing.Literal["foreach"] = pydantic_v1.Field( + alias="kind_", default="foreach" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_Parallel(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] + kind: typing.Literal["parallel"] = pydantic_v1.Field( + alias="kind_", default="parallel" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksBaseWorkflowStep_MapReduce(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + reduce: typing.Optional[CommonPyExpression] = None + kind: typing.Literal["map_reduce"] = pydantic_v1.Field( + alias="kind_", default="map_reduce" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +TasksBaseWorkflowStep = typing.Union[ + TasksBaseWorkflowStep_ToolCall, + TasksBaseWorkflowStep_Yield, + TasksBaseWorkflowStep_Prompt, + TasksBaseWorkflowStep_Error, + TasksBaseWorkflowStep_Sleep, + TasksBaseWorkflowStep_Return, + TasksBaseWorkflowStep_Get, + TasksBaseWorkflowStep_Set, + TasksBaseWorkflowStep_Log, + TasksBaseWorkflowStep_Embed, + TasksBaseWorkflowStep_Search, + TasksBaseWorkflowStep_WaitForInput, + TasksBaseWorkflowStep_IfElse, + TasksBaseWorkflowStep_Switch, + TasksBaseWorkflowStep_Foreach, + TasksBaseWorkflowStep_Parallel, + TasksBaseWorkflowStep_MapReduce, +] diff --git a/sdks/python/julep/api/types/tasks_case_then.py b/sdks/python/julep/api/types/tasks_case_then.py new file mode 100644 index 000000000..1ab5ac484 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_case_then.py @@ -0,0 +1,52 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression +from .tasks_case_then_then import TasksCaseThenThen + + +class TasksCaseThen(pydantic_v1.BaseModel): + case: CommonPyExpression = pydantic_v1.Field() + """ + The condition to evaluate + """ + + then: TasksCaseThenThen = pydantic_v1.Field() + """ + The steps to run if the condition is true + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_case_then_then.py b/sdks/python/julep/api/types/tasks_case_then_then.py new file mode 100644 index 000000000..58be8eaab --- /dev/null +++ b/sdks/python/julep/api/types/tasks_case_then_then.py @@ -0,0 +1,589 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings +from .common_py_expression import CommonPyExpression +from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor + + +class TasksCaseThenThen_Evaluate(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + evaluate: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["evaluate"] = pydantic_v1.Field( + alias="kind_", default="evaluate" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_ToolCall(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + tool: CommonToolRef + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["tool_call"] = pydantic_v1.Field( + alias="kind_", default="tool_call" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Yield(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + workflow: str + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["yield"] = pydantic_v1.Field(alias="kind_", default="yield") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Prompt(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + prompt: TasksPromptStepPrompt + settings: ChatChatSettings + kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Error(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + error: str + kind: typing.Literal["error"] = pydantic_v1.Field(alias="kind_", default="error") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Sleep(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Return(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Get(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Set(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Log(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Embed(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_Search(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCaseThenThen_WaitForInput(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + wait_for_input: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( + alias="kind_", default="wait_for_input" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +TasksCaseThenThen = typing.Union[ + TasksCaseThenThen_Evaluate, + TasksCaseThenThen_ToolCall, + TasksCaseThenThen_Yield, + TasksCaseThenThen_Prompt, + TasksCaseThenThen_Error, + TasksCaseThenThen_Sleep, + TasksCaseThenThen_Return, + TasksCaseThenThen_Get, + TasksCaseThenThen_Set, + TasksCaseThenThen_Log, + TasksCaseThenThen_Embed, + TasksCaseThenThen_Search, + TasksCaseThenThen_WaitForInput, +] diff --git a/sdks/python/julep/api/types/tasks_create_task_request_main_item.py b/sdks/python/julep/api/types/tasks_create_task_request_main_item.py index f241c6fc7..64a8f1a98 100644 --- a/sdks/python/julep/api/types/tasks_create_task_request_main_item.py +++ b/sdks/python/julep/api/types/tasks_create_task_request_main_item.py @@ -7,13 +7,20 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings from .common_py_expression import CommonPyExpression from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_case_then import TasksCaseThen +from .tasks_foreach_do import TasksForeachDo from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_map_over import TasksMapOver +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem from .tasks_prompt_step_prompt import TasksPromptStepPrompt -from .tasks_prompt_step_settings import TasksPromptStepSettings -from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor class TasksCreateTaskRequestMainItem_Evaluate(pydantic_v1.BaseModel): @@ -58,7 +65,7 @@ class Config: class TasksCreateTaskRequestMainItem_ToolCall(pydantic_v1.BaseModel): tool: CommonToolRef - arguments: typing.Dict[str, typing.Any] + arguments: typing.Dict[str, CommonPyExpression] kind: typing.Literal["tool_call"] = pydantic_v1.Field( alias="kind_", default="tool_call" ) @@ -138,7 +145,7 @@ class Config: class TasksCreateTaskRequestMainItem_Prompt(pydantic_v1.BaseModel): prompt: TasksPromptStepPrompt - settings: TasksPromptStepSettings + settings: ChatChatSettings kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") def json(self, **kwargs: typing.Any) -> str: @@ -213,8 +220,274 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksCreateTaskRequestMainItem_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + class TasksCreateTaskRequestMainItem_WaitForInput(pydantic_v1.BaseModel): - info: TasksWaitForInputStepInfo + wait_for_input: typing.Dict[str, CommonPyExpression] kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( alias="kind_", default="wait_for_input" ) @@ -295,12 +568,182 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksCreateTaskRequestMainItem_Switch(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] + kind: typing.Literal["switch"] = pydantic_v1.Field(alias="kind_", default="switch") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Foreach(pydantic_v1.BaseModel): + foreach: TasksForeachDo + kind: typing.Literal["foreach"] = pydantic_v1.Field( + alias="kind_", default="foreach" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_Parallel(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] + kind: typing.Literal["parallel"] = pydantic_v1.Field( + alias="kind_", default="parallel" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksCreateTaskRequestMainItem_MapReduce(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + reduce: typing.Optional[CommonPyExpression] = None + kind: typing.Literal["map_reduce"] = pydantic_v1.Field( + alias="kind_", default="map_reduce" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + TasksCreateTaskRequestMainItem = typing.Union[ TasksCreateTaskRequestMainItem_Evaluate, TasksCreateTaskRequestMainItem_ToolCall, TasksCreateTaskRequestMainItem_Yield, TasksCreateTaskRequestMainItem_Prompt, TasksCreateTaskRequestMainItem_Error, + TasksCreateTaskRequestMainItem_Sleep, + TasksCreateTaskRequestMainItem_Return, + TasksCreateTaskRequestMainItem_Get, + TasksCreateTaskRequestMainItem_Set, + TasksCreateTaskRequestMainItem_Log, + TasksCreateTaskRequestMainItem_Embed, + TasksCreateTaskRequestMainItem_Search, TasksCreateTaskRequestMainItem_WaitForInput, TasksCreateTaskRequestMainItem_IfElse, + TasksCreateTaskRequestMainItem_Switch, + TasksCreateTaskRequestMainItem_Foreach, + TasksCreateTaskRequestMainItem_Parallel, + TasksCreateTaskRequestMainItem_MapReduce, ] diff --git a/sdks/python/julep/api/types/tasks_embed_step.py b/sdks/python/julep/api/types/tasks_embed_step.py new file mode 100644 index 000000000..3e3a2661d --- /dev/null +++ b/sdks/python/julep/api/types/tasks_embed_step.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .docs_embed_query_request import DocsEmbedQueryRequest + + +class TasksEmbedStep(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest = pydantic_v1.Field() + """ + The text to embed + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_error_workflow_step.py b/sdks/python/julep/api/types/tasks_error_workflow_step.py index 37bc482ef..cc3a4e641 100644 --- a/sdks/python/julep/api/types/tasks_error_workflow_step.py +++ b/sdks/python/julep/api/types/tasks_error_workflow_step.py @@ -5,10 +5,9 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .tasks_base_workflow_step import TasksBaseWorkflowStep -class TasksErrorWorkflowStep(TasksBaseWorkflowStep): +class TasksErrorWorkflowStep(pydantic_v1.BaseModel): error: str = pydantic_v1.Field() """ The error message @@ -42,7 +41,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_evaluate_step.py b/sdks/python/julep/api/types/tasks_evaluate_step.py index ebb8a9781..b16e9d1f5 100644 --- a/sdks/python/julep/api/types/tasks_evaluate_step.py +++ b/sdks/python/julep/api/types/tasks_evaluate_step.py @@ -6,10 +6,9 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .common_py_expression import CommonPyExpression -from .tasks_base_workflow_step import TasksBaseWorkflowStep -class TasksEvaluateStep(TasksBaseWorkflowStep): +class TasksEvaluateStep(pydantic_v1.BaseModel): evaluate: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field() """ The expression to evaluate @@ -43,7 +42,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_foreach_do.py b/sdks/python/julep/api/types/tasks_foreach_do.py new file mode 100644 index 000000000..9261240fc --- /dev/null +++ b/sdks/python/julep/api/types/tasks_foreach_do.py @@ -0,0 +1,54 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression +from .tasks_foreach_do_do import TasksForeachDoDo + + +class TasksForeachDo(pydantic_v1.BaseModel): + in_: CommonPyExpression = pydantic_v1.Field(alias="in") + """ + The variable to iterate over + """ + + do: TasksForeachDoDo = pydantic_v1.Field() + """ + The steps to run for each iteration + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_foreach_do_do.py b/sdks/python/julep/api/types/tasks_foreach_do_do.py new file mode 100644 index 000000000..90979bb54 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_foreach_do_do.py @@ -0,0 +1,589 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings +from .common_py_expression import CommonPyExpression +from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor + + +class TasksForeachDoDo_Evaluate(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + evaluate: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["evaluate"] = pydantic_v1.Field( + alias="kind_", default="evaluate" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_ToolCall(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + tool: CommonToolRef + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["tool_call"] = pydantic_v1.Field( + alias="kind_", default="tool_call" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Yield(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + workflow: str + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["yield"] = pydantic_v1.Field(alias="kind_", default="yield") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Prompt(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + prompt: TasksPromptStepPrompt + settings: ChatChatSettings + kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Error(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + error: str + kind: typing.Literal["error"] = pydantic_v1.Field(alias="kind_", default="error") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Sleep(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Return(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Get(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Set(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Log(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Embed(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_Search(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksForeachDoDo_WaitForInput(pydantic_v1.BaseModel): + """ + The steps to run for each iteration + """ + + wait_for_input: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( + alias="kind_", default="wait_for_input" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +TasksForeachDoDo = typing.Union[ + TasksForeachDoDo_Evaluate, + TasksForeachDoDo_ToolCall, + TasksForeachDoDo_Yield, + TasksForeachDoDo_Prompt, + TasksForeachDoDo_Error, + TasksForeachDoDo_Sleep, + TasksForeachDoDo_Return, + TasksForeachDoDo_Get, + TasksForeachDoDo_Set, + TasksForeachDoDo_Log, + TasksForeachDoDo_Embed, + TasksForeachDoDo_Search, + TasksForeachDoDo_WaitForInput, +] diff --git a/sdks/python/julep/api/types/tasks_foreach_step.py b/sdks/python/julep/api/types/tasks_foreach_step.py new file mode 100644 index 000000000..fc4be5c18 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_foreach_step.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_foreach_do import TasksForeachDo + + +class TasksForeachStep(pydantic_v1.BaseModel): + foreach: TasksForeachDo = pydantic_v1.Field() + """ + The steps to run for each iteration + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/agents_docs_search_route_search_response.py b/sdks/python/julep/api/types/tasks_get_step.py similarity index 88% rename from sdks/python/julep/api/types/agents_docs_search_route_search_response.py rename to sdks/python/julep/api/types/tasks_get_step.py index 400d1a51d..560a0c8f3 100644 --- a/sdks/python/julep/api/types/agents_docs_search_route_search_response.py +++ b/sdks/python/julep/api/types/tasks_get_step.py @@ -5,11 +5,13 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .docs_doc_reference import DocsDocReference -class AgentsDocsSearchRouteSearchResponse(pydantic_v1.BaseModel): - results: typing.List[DocsDocReference] +class TasksGetStep(pydantic_v1.BaseModel): + get: str = pydantic_v1.Field() + """ + The key to get + """ def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { diff --git a/sdks/python/julep/api/types/tasks_if_else_workflow_step.py b/sdks/python/julep/api/types/tasks_if_else_workflow_step.py index 2370d66cf..cc3670431 100644 --- a/sdks/python/julep/api/types/tasks_if_else_workflow_step.py +++ b/sdks/python/julep/api/types/tasks_if_else_workflow_step.py @@ -6,12 +6,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .common_py_expression import CommonPyExpression -from .tasks_base_workflow_step import TasksBaseWorkflowStep from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen -class TasksIfElseWorkflowStep(TasksBaseWorkflowStep): +class TasksIfElseWorkflowStep(pydantic_v1.BaseModel): if_: CommonPyExpression = pydantic_v1.Field(alias="if") """ The condition to evaluate diff --git a/sdks/python/julep/api/types/tasks_if_else_workflow_step_else.py b/sdks/python/julep/api/types/tasks_if_else_workflow_step_else.py index 37d64c117..4f3cf5111 100644 --- a/sdks/python/julep/api/types/tasks_if_else_workflow_step_else.py +++ b/sdks/python/julep/api/types/tasks_if_else_workflow_step_else.py @@ -1,18 +1,589 @@ # This file was auto-generated by Fern from our API Definition. +from __future__ import annotations + +import datetime as dt import typing -from .tasks_error_workflow_step import TasksErrorWorkflowStep -from .tasks_prompt_step import TasksPromptStep -from .tasks_tool_call_step import TasksToolCallStep -from .tasks_wait_for_input_step import TasksWaitForInputStep -from .tasks_yield_step import TasksYieldStep +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings +from .common_py_expression import CommonPyExpression +from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor + + +class TasksIfElseWorkflowStepElse_Evaluate(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + evaluate: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["evaluate"] = pydantic_v1.Field( + alias="kind_", default="evaluate" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_ToolCall(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + tool: CommonToolRef + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["tool_call"] = pydantic_v1.Field( + alias="kind_", default="tool_call" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Yield(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + workflow: str + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["yield"] = pydantic_v1.Field(alias="kind_", default="yield") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Prompt(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + prompt: TasksPromptStepPrompt + settings: ChatChatSettings + kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Error(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + error: str + kind: typing.Literal["error"] = pydantic_v1.Field(alias="kind_", default="error") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Sleep(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Return(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Get(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Set(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Log(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Embed(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_Search(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepElse_WaitForInput(pydantic_v1.BaseModel): + """ + The steps to run if the condition is false + """ + + wait_for_input: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( + alias="kind_", default="wait_for_input" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + TasksIfElseWorkflowStepElse = typing.Union[ - typing.Any, - TasksToolCallStep, - TasksYieldStep, - TasksPromptStep, - TasksErrorWorkflowStep, - TasksWaitForInputStep, + TasksIfElseWorkflowStepElse_Evaluate, + TasksIfElseWorkflowStepElse_ToolCall, + TasksIfElseWorkflowStepElse_Yield, + TasksIfElseWorkflowStepElse_Prompt, + TasksIfElseWorkflowStepElse_Error, + TasksIfElseWorkflowStepElse_Sleep, + TasksIfElseWorkflowStepElse_Return, + TasksIfElseWorkflowStepElse_Get, + TasksIfElseWorkflowStepElse_Set, + TasksIfElseWorkflowStepElse_Log, + TasksIfElseWorkflowStepElse_Embed, + TasksIfElseWorkflowStepElse_Search, + TasksIfElseWorkflowStepElse_WaitForInput, ] diff --git a/sdks/python/julep/api/types/tasks_if_else_workflow_step_then.py b/sdks/python/julep/api/types/tasks_if_else_workflow_step_then.py index a706d4c86..4d760543a 100644 --- a/sdks/python/julep/api/types/tasks_if_else_workflow_step_then.py +++ b/sdks/python/julep/api/types/tasks_if_else_workflow_step_then.py @@ -1,18 +1,589 @@ # This file was auto-generated by Fern from our API Definition. +from __future__ import annotations + +import datetime as dt import typing -from .tasks_error_workflow_step import TasksErrorWorkflowStep -from .tasks_prompt_step import TasksPromptStep -from .tasks_tool_call_step import TasksToolCallStep -from .tasks_wait_for_input_step import TasksWaitForInputStep -from .tasks_yield_step import TasksYieldStep +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings +from .common_py_expression import CommonPyExpression +from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor + + +class TasksIfElseWorkflowStepThen_Evaluate(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + evaluate: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["evaluate"] = pydantic_v1.Field( + alias="kind_", default="evaluate" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_ToolCall(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + tool: CommonToolRef + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["tool_call"] = pydantic_v1.Field( + alias="kind_", default="tool_call" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Yield(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + workflow: str + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["yield"] = pydantic_v1.Field(alias="kind_", default="yield") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Prompt(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + prompt: TasksPromptStepPrompt + settings: ChatChatSettings + kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Error(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + error: str + kind: typing.Literal["error"] = pydantic_v1.Field(alias="kind_", default="error") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Sleep(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Return(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Get(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Set(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Log(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Embed(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_Search(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksIfElseWorkflowStepThen_WaitForInput(pydantic_v1.BaseModel): + """ + The steps to run if the condition is true + """ + + wait_for_input: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( + alias="kind_", default="wait_for_input" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + TasksIfElseWorkflowStepThen = typing.Union[ - typing.Any, - TasksToolCallStep, - TasksYieldStep, - TasksPromptStep, - TasksErrorWorkflowStep, - TasksWaitForInputStep, + TasksIfElseWorkflowStepThen_Evaluate, + TasksIfElseWorkflowStepThen_ToolCall, + TasksIfElseWorkflowStepThen_Yield, + TasksIfElseWorkflowStepThen_Prompt, + TasksIfElseWorkflowStepThen_Error, + TasksIfElseWorkflowStepThen_Sleep, + TasksIfElseWorkflowStepThen_Return, + TasksIfElseWorkflowStepThen_Get, + TasksIfElseWorkflowStepThen_Set, + TasksIfElseWorkflowStepThen_Log, + TasksIfElseWorkflowStepThen_Embed, + TasksIfElseWorkflowStepThen_Search, + TasksIfElseWorkflowStepThen_WaitForInput, ] diff --git a/sdks/python/julep/api/types/tasks_log_step.py b/sdks/python/julep/api/types/tasks_log_step.py new file mode 100644 index 000000000..990301464 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_log_step.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression + + +class TasksLogStep(pydantic_v1.BaseModel): + log: CommonPyExpression = pydantic_v1.Field() + """ + The value to log + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_map_over.py b/sdks/python/julep/api/types/tasks_map_over.py new file mode 100644 index 000000000..d3377ff00 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_map_over.py @@ -0,0 +1,51 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression + + +class TasksMapOver(pydantic_v1.BaseModel): + over: CommonPyExpression = pydantic_v1.Field() + """ + The variable to iterate over + """ + + workflow: str = pydantic_v1.Field() + """ + The subworkflow to run for each iteration + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_map_reduce_step.py b/sdks/python/julep/api/types/tasks_map_reduce_step.py new file mode 100644 index 000000000..18a1bb091 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_map_reduce_step.py @@ -0,0 +1,54 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression +from .tasks_map_over import TasksMapOver + + +class TasksMapReduceStep(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + """ + The steps to run for each iteration + """ + + reduce: typing.Optional[CommonPyExpression] = pydantic_v1.Field(default=None) + """ + The expression to reduce the results (`_` is a list of outputs). If not provided, the results are returned as a list. + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_parallel_step.py b/sdks/python/julep/api/types/tasks_parallel_step.py new file mode 100644 index 000000000..71979f12f --- /dev/null +++ b/sdks/python/julep/api/types/tasks_parallel_step.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem + + +class TasksParallelStep(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] = pydantic_v1.Field() + """ + The steps to run in parallel. Max concurrency will depend on the platform + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_parallel_step_parallel_item.py b/sdks/python/julep/api/types/tasks_parallel_step_parallel_item.py new file mode 100644 index 000000000..72c55b610 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_parallel_step_parallel_item.py @@ -0,0 +1,537 @@ +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings +from .common_py_expression import CommonPyExpression +from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_prompt_step_prompt import TasksPromptStepPrompt +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor + + +class TasksParallelStepParallelItem_Evaluate(pydantic_v1.BaseModel): + evaluate: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["evaluate"] = pydantic_v1.Field( + alias="kind_", default="evaluate" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_ToolCall(pydantic_v1.BaseModel): + tool: CommonToolRef + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["tool_call"] = pydantic_v1.Field( + alias="kind_", default="tool_call" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Yield(pydantic_v1.BaseModel): + workflow: str + arguments: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["yield"] = pydantic_v1.Field(alias="kind_", default="yield") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Prompt(pydantic_v1.BaseModel): + prompt: TasksPromptStepPrompt + settings: ChatChatSettings + kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Error(pydantic_v1.BaseModel): + error: str + kind: typing.Literal["error"] = pydantic_v1.Field(alias="kind_", default="error") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksParallelStepParallelItem_WaitForInput(pydantic_v1.BaseModel): + wait_for_input: typing.Dict[str, CommonPyExpression] + kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( + alias="kind_", default="wait_for_input" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +TasksParallelStepParallelItem = typing.Union[ + TasksParallelStepParallelItem_Evaluate, + TasksParallelStepParallelItem_ToolCall, + TasksParallelStepParallelItem_Yield, + TasksParallelStepParallelItem_Prompt, + TasksParallelStepParallelItem_Error, + TasksParallelStepParallelItem_Sleep, + TasksParallelStepParallelItem_Return, + TasksParallelStepParallelItem_Get, + TasksParallelStepParallelItem_Set, + TasksParallelStepParallelItem_Log, + TasksParallelStepParallelItem_Embed, + TasksParallelStepParallelItem_Search, + TasksParallelStepParallelItem_WaitForInput, +] diff --git a/sdks/python/julep/api/types/tasks_patch_task_request_main_item.py b/sdks/python/julep/api/types/tasks_patch_task_request_main_item.py index 999373c34..bcf9ba690 100644 --- a/sdks/python/julep/api/types/tasks_patch_task_request_main_item.py +++ b/sdks/python/julep/api/types/tasks_patch_task_request_main_item.py @@ -7,13 +7,20 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings from .common_py_expression import CommonPyExpression from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_case_then import TasksCaseThen +from .tasks_foreach_do import TasksForeachDo from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_map_over import TasksMapOver +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem from .tasks_prompt_step_prompt import TasksPromptStepPrompt -from .tasks_prompt_step_settings import TasksPromptStepSettings -from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor class TasksPatchTaskRequestMainItem_Evaluate(pydantic_v1.BaseModel): @@ -58,7 +65,7 @@ class Config: class TasksPatchTaskRequestMainItem_ToolCall(pydantic_v1.BaseModel): tool: CommonToolRef - arguments: typing.Dict[str, typing.Any] + arguments: typing.Dict[str, CommonPyExpression] kind: typing.Literal["tool_call"] = pydantic_v1.Field( alias="kind_", default="tool_call" ) @@ -138,7 +145,7 @@ class Config: class TasksPatchTaskRequestMainItem_Prompt(pydantic_v1.BaseModel): prompt: TasksPromptStepPrompt - settings: TasksPromptStepSettings + settings: ChatChatSettings kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") def json(self, **kwargs: typing.Any) -> str: @@ -213,8 +220,274 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksPatchTaskRequestMainItem_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + class TasksPatchTaskRequestMainItem_WaitForInput(pydantic_v1.BaseModel): - info: TasksWaitForInputStepInfo + wait_for_input: typing.Dict[str, CommonPyExpression] kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( alias="kind_", default="wait_for_input" ) @@ -295,12 +568,182 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksPatchTaskRequestMainItem_Switch(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] + kind: typing.Literal["switch"] = pydantic_v1.Field(alias="kind_", default="switch") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Foreach(pydantic_v1.BaseModel): + foreach: TasksForeachDo + kind: typing.Literal["foreach"] = pydantic_v1.Field( + alias="kind_", default="foreach" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_Parallel(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] + kind: typing.Literal["parallel"] = pydantic_v1.Field( + alias="kind_", default="parallel" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksPatchTaskRequestMainItem_MapReduce(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + reduce: typing.Optional[CommonPyExpression] = None + kind: typing.Literal["map_reduce"] = pydantic_v1.Field( + alias="kind_", default="map_reduce" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + TasksPatchTaskRequestMainItem = typing.Union[ TasksPatchTaskRequestMainItem_Evaluate, TasksPatchTaskRequestMainItem_ToolCall, TasksPatchTaskRequestMainItem_Yield, TasksPatchTaskRequestMainItem_Prompt, TasksPatchTaskRequestMainItem_Error, + TasksPatchTaskRequestMainItem_Sleep, + TasksPatchTaskRequestMainItem_Return, + TasksPatchTaskRequestMainItem_Get, + TasksPatchTaskRequestMainItem_Set, + TasksPatchTaskRequestMainItem_Log, + TasksPatchTaskRequestMainItem_Embed, + TasksPatchTaskRequestMainItem_Search, TasksPatchTaskRequestMainItem_WaitForInput, TasksPatchTaskRequestMainItem_IfElse, + TasksPatchTaskRequestMainItem_Switch, + TasksPatchTaskRequestMainItem_Foreach, + TasksPatchTaskRequestMainItem_Parallel, + TasksPatchTaskRequestMainItem_MapReduce, ] diff --git a/sdks/python/julep/api/types/tasks_prompt_step.py b/sdks/python/julep/api/types/tasks_prompt_step.py index 17b79d10e..f99cb8192 100644 --- a/sdks/python/julep/api/types/tasks_prompt_step.py +++ b/sdks/python/julep/api/types/tasks_prompt_step.py @@ -5,18 +5,17 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .tasks_base_workflow_step import TasksBaseWorkflowStep +from .chat_chat_settings import ChatChatSettings from .tasks_prompt_step_prompt import TasksPromptStepPrompt -from .tasks_prompt_step_settings import TasksPromptStepSettings -class TasksPromptStep(TasksBaseWorkflowStep): +class TasksPromptStep(pydantic_v1.BaseModel): prompt: TasksPromptStepPrompt = pydantic_v1.Field() """ The prompt to run """ - settings: TasksPromptStepSettings = pydantic_v1.Field() + settings: ChatChatSettings = pydantic_v1.Field() """ Settings for the prompt """ @@ -49,7 +48,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_prompt_step_settings.py b/sdks/python/julep/api/types/tasks_prompt_step_settings.py deleted file mode 100644 index da86b2ad2..000000000 --- a/sdks/python/julep/api/types/tasks_prompt_step_settings.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -from .tasks_prompt_step_settings_agent import TasksPromptStepSettingsAgent -from .tasks_prompt_step_settings_frequency_penalty import ( - TasksPromptStepSettingsFrequencyPenalty, -) -from .tasks_prompt_step_settings_preset import TasksPromptStepSettingsPreset - -TasksPromptStepSettings = typing.Union[ - TasksPromptStepSettingsPreset, - TasksPromptStepSettingsFrequencyPenalty, - TasksPromptStepSettingsAgent, -] diff --git a/sdks/python/julep/api/types/tasks_prompt_step_settings_agent.py b/sdks/python/julep/api/types/tasks_prompt_step_settings_agent.py deleted file mode 100644 index 4f05effd6..000000000 --- a/sdks/python/julep/api/types/tasks_prompt_step_settings_agent.py +++ /dev/null @@ -1,115 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .chat_completion_response_format import ChatCompletionResponseFormat -from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode -from .common_logit_bias import CommonLogitBias -from .common_uuid import CommonUuid - - -class TasksPromptStepSettingsAgent(pydantic_v1.BaseModel): - model: typing.Optional[CommonIdentifierSafeUnicode] = pydantic_v1.Field( - default=None - ) - """ - Identifier of the model to be used - """ - - stream: bool = pydantic_v1.Field() - """ - Indicates if the server should stream the response as it's generated - """ - - stop: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) - """ - Up to 4 sequences where the API will stop generating further tokens. - """ - - seed: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - If specified, the system will make a best effort to sample deterministically for that particular seed value - """ - - max_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - The maximum number of tokens to generate in the chat completion - """ - - logit_bias: typing.Optional[typing.Dict[str, CommonLogitBias]] = pydantic_v1.Field( - default=None - ) - """ - Modify the likelihood of specified tokens appearing in the completion - """ - - response_format: typing.Optional[ChatCompletionResponseFormat] = pydantic_v1.Field( - default=None - ) - """ - Response format (set to `json_object` to restrict output to JSON) - """ - - agent: typing.Optional[CommonUuid] = pydantic_v1.Field(default=None) - """ - Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - """ - - repetition_penalty: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - """ - - length_penalty: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. - """ - - temperature: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - """ - - top_p: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - """ - - min_p: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Minimum probability compared to leading token to be considered - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_prompt_step_settings_frequency_penalty.py b/sdks/python/julep/api/types/tasks_prompt_step_settings_frequency_penalty.py deleted file mode 100644 index aaefaff25..000000000 --- a/sdks/python/julep/api/types/tasks_prompt_step_settings_frequency_penalty.py +++ /dev/null @@ -1,110 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -from ..core.datetime_utils import serialize_datetime -from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .chat_completion_response_format import ChatCompletionResponseFormat -from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode -from .common_logit_bias import CommonLogitBias -from .common_uuid import CommonUuid - - -class TasksPromptStepSettingsFrequencyPenalty(pydantic_v1.BaseModel): - model: typing.Optional[CommonIdentifierSafeUnicode] = pydantic_v1.Field( - default=None - ) - """ - Identifier of the model to be used - """ - - stream: bool = pydantic_v1.Field() - """ - Indicates if the server should stream the response as it's generated - """ - - stop: typing.Optional[typing.List[str]] = pydantic_v1.Field(default=None) - """ - Up to 4 sequences where the API will stop generating further tokens. - """ - - seed: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - If specified, the system will make a best effort to sample deterministically for that particular seed value - """ - - max_tokens: typing.Optional[int] = pydantic_v1.Field(default=None) - """ - The maximum number of tokens to generate in the chat completion - """ - - logit_bias: typing.Optional[typing.Dict[str, CommonLogitBias]] = pydantic_v1.Field( - default=None - ) - """ - Modify the likelihood of specified tokens appearing in the completion - """ - - response_format: typing.Optional[ChatCompletionResponseFormat] = pydantic_v1.Field( - default=None - ) - """ - Response format (set to `json_object` to restrict output to JSON) - """ - - agent: typing.Optional[CommonUuid] = pydantic_v1.Field(default=None) - """ - Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - """ - - frequency_penalty: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - """ - - presence_penalty: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - """ - - temperature: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - """ - - top_p: typing.Optional[float] = pydantic_v1.Field(default=None) - """ - Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - """ - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults_exclude_unset: typing.Any = { - "by_alias": True, - "exclude_unset": True, - **kwargs, - } - kwargs_with_defaults_exclude_none: typing.Any = { - "by_alias": True, - "exclude_none": True, - **kwargs, - } - - return deep_union_pydantic_dicts( - super().dict(**kwargs_with_defaults_exclude_unset), - super().dict(**kwargs_with_defaults_exclude_none), - ) - - class Config: - frozen = True - smart_union = True - extra = pydantic_v1.Extra.allow - json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_return_step.py b/sdks/python/julep/api/types/tasks_return_step.py new file mode 100644 index 000000000..c5e5f835c --- /dev/null +++ b/sdks/python/julep/api/types/tasks_return_step.py @@ -0,0 +1,48 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression + + +class TasksReturnStep(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + """ + The value to return + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_search_step.py b/sdks/python/julep/api/types/tasks_search_step.py new file mode 100644 index 000000000..01d1cce2b --- /dev/null +++ b/sdks/python/julep/api/types/tasks_search_step.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_search_step_search import TasksSearchStepSearch + + +class TasksSearchStep(pydantic_v1.BaseModel): + search: TasksSearchStepSearch = pydantic_v1.Field() + """ + The search query + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_search_step_search.py b/sdks/python/julep/api/types/tasks_search_step_search.py new file mode 100644 index 000000000..678e79d5f --- /dev/null +++ b/sdks/python/julep/api/types/tasks_search_step_search.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .docs_hybrid_doc_search_request import DocsHybridDocSearchRequest +from .docs_text_only_doc_search_request import DocsTextOnlyDocSearchRequest +from .docs_vector_doc_search_request import DocsVectorDocSearchRequest + +TasksSearchStepSearch = typing.Union[ + DocsVectorDocSearchRequest, DocsTextOnlyDocSearchRequest, DocsHybridDocSearchRequest +] diff --git a/sdks/python/julep/api/types/tasks_set_key.py b/sdks/python/julep/api/types/tasks_set_key.py new file mode 100644 index 000000000..c925599a2 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_set_key.py @@ -0,0 +1,51 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression + + +class TasksSetKey(pydantic_v1.BaseModel): + key: str = pydantic_v1.Field() + """ + The key to set + """ + + value: CommonPyExpression = pydantic_v1.Field() + """ + The value to set + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_set_step.py b/sdks/python/julep/api/types/tasks_set_step.py new file mode 100644 index 000000000..5195812a8 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_set_step.py @@ -0,0 +1,48 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_set_step_set import TasksSetStepSet + + +class TasksSetStep(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + """ + The value to set + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_set_step_set.py b/sdks/python/julep/api/types/tasks_set_step_set.py new file mode 100644 index 000000000..0b5c955c1 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_set_step_set.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .tasks_set_key import TasksSetKey + +TasksSetStepSet = typing.Union[TasksSetKey, typing.List[TasksSetKey]] diff --git a/sdks/python/julep/api/types/tasks_sleep_for.py b/sdks/python/julep/api/types/tasks_sleep_for.py new file mode 100644 index 000000000..44a3acd32 --- /dev/null +++ b/sdks/python/julep/api/types/tasks_sleep_for.py @@ -0,0 +1,60 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 + + +class TasksSleepFor(pydantic_v1.BaseModel): + seconds: int = pydantic_v1.Field() + """ + The number of seconds to sleep for + """ + + minutes: int = pydantic_v1.Field() + """ + The number of minutes to sleep for + """ + + hours: int = pydantic_v1.Field() + """ + The number of hours to sleep for + """ + + days: int = pydantic_v1.Field() + """ + The number of days to sleep for + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_sleep_step.py b/sdks/python/julep/api/types/tasks_sleep_step.py new file mode 100644 index 000000000..73c75079e --- /dev/null +++ b/sdks/python/julep/api/types/tasks_sleep_step.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_sleep_for import TasksSleepFor + + +class TasksSleepStep(pydantic_v1.BaseModel): + sleep: TasksSleepFor = pydantic_v1.Field() + """ + The duration to sleep for + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_switch_step.py b/sdks/python/julep/api/types/tasks_switch_step.py new file mode 100644 index 000000000..b7623975f --- /dev/null +++ b/sdks/python/julep/api/types/tasks_switch_step.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .tasks_case_then import TasksCaseThen + + +class TasksSwitchStep(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] = pydantic_v1.Field() + """ + The cond tree + """ + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_task_main_item.py b/sdks/python/julep/api/types/tasks_task_main_item.py index 4294ceadb..6cba1d2e2 100644 --- a/sdks/python/julep/api/types/tasks_task_main_item.py +++ b/sdks/python/julep/api/types/tasks_task_main_item.py @@ -7,13 +7,20 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings from .common_py_expression import CommonPyExpression from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_case_then import TasksCaseThen +from .tasks_foreach_do import TasksForeachDo from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_map_over import TasksMapOver +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem from .tasks_prompt_step_prompt import TasksPromptStepPrompt -from .tasks_prompt_step_settings import TasksPromptStepSettings -from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor class TasksTaskMainItem_Evaluate(pydantic_v1.BaseModel): @@ -58,7 +65,7 @@ class Config: class TasksTaskMainItem_ToolCall(pydantic_v1.BaseModel): tool: CommonToolRef - arguments: typing.Dict[str, typing.Any] + arguments: typing.Dict[str, CommonPyExpression] kind: typing.Literal["tool_call"] = pydantic_v1.Field( alias="kind_", default="tool_call" ) @@ -138,7 +145,7 @@ class Config: class TasksTaskMainItem_Prompt(pydantic_v1.BaseModel): prompt: TasksPromptStepPrompt - settings: TasksPromptStepSettings + settings: ChatChatSettings kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") def json(self, **kwargs: typing.Any) -> str: @@ -213,8 +220,274 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksTaskMainItem_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + class TasksTaskMainItem_WaitForInput(pydantic_v1.BaseModel): - info: TasksWaitForInputStepInfo + wait_for_input: typing.Dict[str, CommonPyExpression] kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( alias="kind_", default="wait_for_input" ) @@ -295,12 +568,182 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksTaskMainItem_Switch(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] + kind: typing.Literal["switch"] = pydantic_v1.Field(alias="kind_", default="switch") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Foreach(pydantic_v1.BaseModel): + foreach: TasksForeachDo + kind: typing.Literal["foreach"] = pydantic_v1.Field( + alias="kind_", default="foreach" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_Parallel(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] + kind: typing.Literal["parallel"] = pydantic_v1.Field( + alias="kind_", default="parallel" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksTaskMainItem_MapReduce(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + reduce: typing.Optional[CommonPyExpression] = None + kind: typing.Literal["map_reduce"] = pydantic_v1.Field( + alias="kind_", default="map_reduce" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + TasksTaskMainItem = typing.Union[ TasksTaskMainItem_Evaluate, TasksTaskMainItem_ToolCall, TasksTaskMainItem_Yield, TasksTaskMainItem_Prompt, TasksTaskMainItem_Error, + TasksTaskMainItem_Sleep, + TasksTaskMainItem_Return, + TasksTaskMainItem_Get, + TasksTaskMainItem_Set, + TasksTaskMainItem_Log, + TasksTaskMainItem_Embed, + TasksTaskMainItem_Search, TasksTaskMainItem_WaitForInput, TasksTaskMainItem_IfElse, + TasksTaskMainItem_Switch, + TasksTaskMainItem_Foreach, + TasksTaskMainItem_Parallel, + TasksTaskMainItem_MapReduce, ] diff --git a/sdks/python/julep/api/types/tasks_task_tool.py b/sdks/python/julep/api/types/tasks_task_tool.py index 56a891988..c5638b4ea 100644 --- a/sdks/python/julep/api/types/tasks_task_tool.py +++ b/sdks/python/julep/api/types/tasks_task_tool.py @@ -5,32 +5,15 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .common_valid_python_identifier import CommonValidPythonIdentifier -from .tools_function_def import ToolsFunctionDef -from .tools_tool_type import ToolsToolType +from .tools_create_tool_request import ToolsCreateToolRequest -class TasksTaskTool(pydantic_v1.BaseModel): +class TasksTaskTool(ToolsCreateToolRequest): inherited: typing.Optional[bool] = pydantic_v1.Field(default=None) """ Read-only: Whether the tool was inherited or not. Only applies within tasks. """ - type: ToolsToolType = pydantic_v1.Field() - """ - Whether this tool is a `function`, `api_call`, `system` etc. (Only `function` tool supported right now) - """ - - name: CommonValidPythonIdentifier = pydantic_v1.Field() - """ - Name of the tool (must be unique for this agent and a valid python identifier string ) - """ - - function: typing.Optional[ToolsFunctionDef] = None - integration: typing.Optional[typing.Any] = None - system: typing.Optional[typing.Any] = None - api_call: typing.Optional[typing.Any] = None - def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { "by_alias": True, @@ -59,5 +42,7 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True + allow_population_by_field_name = True + populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_tool_call_step.py b/sdks/python/julep/api/types/tasks_tool_call_step.py index 08e616a94..8923e9140 100644 --- a/sdks/python/julep/api/types/tasks_tool_call_step.py +++ b/sdks/python/julep/api/types/tasks_tool_call_step.py @@ -5,17 +5,17 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_py_expression import CommonPyExpression from .common_tool_ref import CommonToolRef -from .tasks_base_workflow_step import TasksBaseWorkflowStep -class TasksToolCallStep(TasksBaseWorkflowStep): +class TasksToolCallStep(pydantic_v1.BaseModel): tool: CommonToolRef = pydantic_v1.Field() """ The tool to run """ - arguments: typing.Dict[str, typing.Any] = pydantic_v1.Field() + arguments: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field() """ The input parameters for the tool """ @@ -48,7 +48,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_update_task_request_main_item.py b/sdks/python/julep/api/types/tasks_update_task_request_main_item.py index 6eb5a72b5..7eb7d9a75 100644 --- a/sdks/python/julep/api/types/tasks_update_task_request_main_item.py +++ b/sdks/python/julep/api/types/tasks_update_task_request_main_item.py @@ -7,13 +7,20 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .chat_chat_settings import ChatChatSettings from .common_py_expression import CommonPyExpression from .common_tool_ref import CommonToolRef +from .docs_embed_query_request import DocsEmbedQueryRequest +from .tasks_case_then import TasksCaseThen +from .tasks_foreach_do import TasksForeachDo from .tasks_if_else_workflow_step_else import TasksIfElseWorkflowStepElse from .tasks_if_else_workflow_step_then import TasksIfElseWorkflowStepThen +from .tasks_map_over import TasksMapOver +from .tasks_parallel_step_parallel_item import TasksParallelStepParallelItem from .tasks_prompt_step_prompt import TasksPromptStepPrompt -from .tasks_prompt_step_settings import TasksPromptStepSettings -from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo +from .tasks_search_step_search import TasksSearchStepSearch +from .tasks_set_step_set import TasksSetStepSet +from .tasks_sleep_for import TasksSleepFor class TasksUpdateTaskRequestMainItem_Evaluate(pydantic_v1.BaseModel): @@ -58,7 +65,7 @@ class Config: class TasksUpdateTaskRequestMainItem_ToolCall(pydantic_v1.BaseModel): tool: CommonToolRef - arguments: typing.Dict[str, typing.Any] + arguments: typing.Dict[str, CommonPyExpression] kind: typing.Literal["tool_call"] = pydantic_v1.Field( alias="kind_", default="tool_call" ) @@ -138,7 +145,7 @@ class Config: class TasksUpdateTaskRequestMainItem_Prompt(pydantic_v1.BaseModel): prompt: TasksPromptStepPrompt - settings: TasksPromptStepSettings + settings: ChatChatSettings kind: typing.Literal["prompt"] = pydantic_v1.Field(alias="kind_", default="prompt") def json(self, **kwargs: typing.Any) -> str: @@ -213,8 +220,274 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksUpdateTaskRequestMainItem_Sleep(pydantic_v1.BaseModel): + sleep: TasksSleepFor + kind: typing.Literal["sleep"] = pydantic_v1.Field(alias="kind_", default="sleep") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Return(pydantic_v1.BaseModel): + return_: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field(alias="return") + kind: typing.Literal["return"] = pydantic_v1.Field(alias="kind_", default="return") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Get(pydantic_v1.BaseModel): + get: str + kind: typing.Literal["get"] = pydantic_v1.Field(alias="kind_", default="get") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Set(pydantic_v1.BaseModel): + set_: TasksSetStepSet = pydantic_v1.Field(alias="set") + kind: typing.Literal["set"] = pydantic_v1.Field(alias="kind_", default="set") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Log(pydantic_v1.BaseModel): + log: CommonPyExpression + kind: typing.Literal["log"] = pydantic_v1.Field(alias="kind_", default="log") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Embed(pydantic_v1.BaseModel): + embed: DocsEmbedQueryRequest + kind: typing.Literal["embed"] = pydantic_v1.Field(alias="kind_", default="embed") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Search(pydantic_v1.BaseModel): + search: TasksSearchStepSearch + kind: typing.Literal["search"] = pydantic_v1.Field(alias="kind_", default="search") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + class TasksUpdateTaskRequestMainItem_WaitForInput(pydantic_v1.BaseModel): - info: TasksWaitForInputStepInfo + wait_for_input: typing.Dict[str, CommonPyExpression] kind: typing.Literal["wait_for_input"] = pydantic_v1.Field( alias="kind_", default="wait_for_input" ) @@ -295,12 +568,182 @@ class Config: json_encoders = {dt.datetime: serialize_datetime} +class TasksUpdateTaskRequestMainItem_Switch(pydantic_v1.BaseModel): + switch: typing.List[TasksCaseThen] + kind: typing.Literal["switch"] = pydantic_v1.Field(alias="kind_", default="switch") + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Foreach(pydantic_v1.BaseModel): + foreach: TasksForeachDo + kind: typing.Literal["foreach"] = pydantic_v1.Field( + alias="kind_", default="foreach" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_Parallel(pydantic_v1.BaseModel): + parallel: typing.List[TasksParallelStepParallelItem] + kind: typing.Literal["parallel"] = pydantic_v1.Field( + alias="kind_", default="parallel" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + +class TasksUpdateTaskRequestMainItem_MapReduce(pydantic_v1.BaseModel): + map_: TasksMapOver = pydantic_v1.Field(alias="map") + reduce: typing.Optional[CommonPyExpression] = None + kind: typing.Literal["map_reduce"] = pydantic_v1.Field( + alias="kind_", default="map_reduce" + ) + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} + + TasksUpdateTaskRequestMainItem = typing.Union[ TasksUpdateTaskRequestMainItem_Evaluate, TasksUpdateTaskRequestMainItem_ToolCall, TasksUpdateTaskRequestMainItem_Yield, TasksUpdateTaskRequestMainItem_Prompt, TasksUpdateTaskRequestMainItem_Error, + TasksUpdateTaskRequestMainItem_Sleep, + TasksUpdateTaskRequestMainItem_Return, + TasksUpdateTaskRequestMainItem_Get, + TasksUpdateTaskRequestMainItem_Set, + TasksUpdateTaskRequestMainItem_Log, + TasksUpdateTaskRequestMainItem_Embed, + TasksUpdateTaskRequestMainItem_Search, TasksUpdateTaskRequestMainItem_WaitForInput, TasksUpdateTaskRequestMainItem_IfElse, + TasksUpdateTaskRequestMainItem_Switch, + TasksUpdateTaskRequestMainItem_Foreach, + TasksUpdateTaskRequestMainItem_Parallel, + TasksUpdateTaskRequestMainItem_MapReduce, ] diff --git a/sdks/python/julep/api/types/tasks_wait_for_input_step.py b/sdks/python/julep/api/types/tasks_wait_for_input_step.py index 4ef962fb9..3d6aafda2 100644 --- a/sdks/python/julep/api/types/tasks_wait_for_input_step.py +++ b/sdks/python/julep/api/types/tasks_wait_for_input_step.py @@ -5,12 +5,11 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .tasks_base_workflow_step import TasksBaseWorkflowStep -from .tasks_wait_for_input_step_info import TasksWaitForInputStepInfo +from .common_py_expression import CommonPyExpression -class TasksWaitForInputStep(TasksBaseWorkflowStep): - info: TasksWaitForInputStepInfo = pydantic_v1.Field() +class TasksWaitForInputStep(pydantic_v1.BaseModel): + wait_for_input: typing.Dict[str, CommonPyExpression] = pydantic_v1.Field() """ Any additional info or data """ @@ -43,7 +42,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tasks_wait_for_input_step_info.py b/sdks/python/julep/api/types/tasks_wait_for_input_step_info.py deleted file mode 100644 index 275a35d28..000000000 --- a/sdks/python/julep/api/types/tasks_wait_for_input_step_info.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -TasksWaitForInputStepInfo = typing.Union[str, typing.Dict[str, typing.Any]] diff --git a/sdks/python/julep/api/types/tasks_yield_step.py b/sdks/python/julep/api/types/tasks_yield_step.py index 2c8eb48d3..e2632183f 100644 --- a/sdks/python/julep/api/types/tasks_yield_step.py +++ b/sdks/python/julep/api/types/tasks_yield_step.py @@ -6,10 +6,9 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .common_py_expression import CommonPyExpression -from .tasks_base_workflow_step import TasksBaseWorkflowStep -class TasksYieldStep(TasksBaseWorkflowStep): +class TasksYieldStep(pydantic_v1.BaseModel): workflow: str = pydantic_v1.Field() """ The subworkflow to run @@ -48,7 +47,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True - allow_population_by_field_name = True - populate_by_name = True extra = pydantic_v1.Extra.allow json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/tools_function_def_update.py b/sdks/python/julep/api/types/tools_create_tool_request.py similarity index 66% rename from sdks/python/julep/api/types/tools_function_def_update.py rename to sdks/python/julep/api/types/tools_create_tool_request.py index 9cfc3bd81..cb54b8434 100644 --- a/sdks/python/julep/api/types/tools_function_def_update.py +++ b/sdks/python/julep/api/types/tools_create_tool_request.py @@ -5,33 +5,30 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 -from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode from .common_valid_python_identifier import CommonValidPythonIdentifier +from .tools_function_def import ToolsFunctionDef +from .tools_tool_type import ToolsToolType -class ToolsFunctionDefUpdate(pydantic_v1.BaseModel): +class ToolsCreateToolRequest(pydantic_v1.BaseModel): """ - Function definition + Payload for creating a tool """ - name: typing.Optional[CommonValidPythonIdentifier] = pydantic_v1.Field(default=None) + type: ToolsToolType = pydantic_v1.Field() """ - DO NOT USE: This will be overriden by the tool name. Here only for compatibility reasons. + Whether this tool is a `function`, `api_call`, `system` etc. (Only `function` tool supported right now) """ - description: typing.Optional[CommonIdentifierSafeUnicode] = pydantic_v1.Field( - default=None - ) + name: CommonValidPythonIdentifier = pydantic_v1.Field() """ - Description of the function + Name of the tool (must be unique for this agent and a valid python identifier string ) """ - parameters: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field( - default=None - ) - """ - The parameters the function accepts - """ + function: typing.Optional[ToolsFunctionDef] = None + integration: typing.Optional[typing.Any] = None + system: typing.Optional[typing.Any] = None + api_call: typing.Optional[typing.Any] = None def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { diff --git a/sdks/python/julep/api/types/tools_function_def.py b/sdks/python/julep/api/types/tools_function_def.py index bc446685b..0c7e71ee3 100644 --- a/sdks/python/julep/api/types/tools_function_def.py +++ b/sdks/python/julep/api/types/tools_function_def.py @@ -6,7 +6,6 @@ from ..core.datetime_utils import serialize_datetime from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode -from .common_valid_python_identifier import CommonValidPythonIdentifier class ToolsFunctionDef(pydantic_v1.BaseModel): @@ -14,11 +13,7 @@ class ToolsFunctionDef(pydantic_v1.BaseModel): Function definition """ - name: typing.Optional[CommonValidPythonIdentifier] = pydantic_v1.Field(default=None) - """ - DO NOT USE: This will be overriden by the tool name. Here only for compatibility reasons. - """ - + name: typing.Optional[typing.Any] = None description: typing.Optional[CommonIdentifierSafeUnicode] = pydantic_v1.Field( default=None ) @@ -26,7 +21,9 @@ class ToolsFunctionDef(pydantic_v1.BaseModel): Description of the function """ - parameters: typing.Dict[str, typing.Any] = pydantic_v1.Field() + parameters: typing.Optional[typing.Dict[str, typing.Any]] = pydantic_v1.Field( + default=None + ) """ The parameters the function accepts """ diff --git a/sdks/python/julep/api/types/user_docs_search_route_search_request_direction.py b/sdks/python/julep/api/types/user_docs_search_route_search_request_direction.py deleted file mode 100644 index 3a2ef70a8..000000000 --- a/sdks/python/julep/api/types/user_docs_search_route_search_request_direction.py +++ /dev/null @@ -1,7 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -UserDocsSearchRouteSearchRequestDirection = typing.Union[ - typing.Literal["asc", "desc"], typing.Any -] diff --git a/sdks/python/julep/api/types/user_docs_search_route_search_request_sort_by.py b/sdks/python/julep/api/types/user_docs_search_route_search_request_sort_by.py deleted file mode 100644 index 8cf9538a6..000000000 --- a/sdks/python/julep/api/types/user_docs_search_route_search_request_sort_by.py +++ /dev/null @@ -1,7 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -UserDocsSearchRouteSearchRequestSortBy = typing.Union[ - typing.Literal["created_at", "updated_at"], typing.Any -] diff --git a/sdks/python/julep/api/types/users_create_or_update_user_request.py b/sdks/python/julep/api/types/users_create_or_update_user_request.py new file mode 100644 index 000000000..18e694bd6 --- /dev/null +++ b/sdks/python/julep/api/types/users_create_or_update_user_request.py @@ -0,0 +1,46 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +from ..core.datetime_utils import serialize_datetime +from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1 +from .common_uuid import CommonUuid +from .users_create_user_request import UsersCreateUserRequest + + +class UsersCreateOrUpdateUserRequest(UsersCreateUserRequest): + id: CommonUuid + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults_exclude_unset: typing.Any = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + kwargs_with_defaults_exclude_none: typing.Any = { + "by_alias": True, + "exclude_none": True, + **kwargs, + } + + return deep_union_pydantic_dicts( + super().dict(**kwargs_with_defaults_exclude_unset), + super().dict(**kwargs_with_defaults_exclude_none), + ) + + class Config: + frozen = True + smart_union = True + allow_population_by_field_name = True + populate_by_name = True + extra = pydantic_v1.Extra.allow + json_encoders = {dt.datetime: serialize_datetime} diff --git a/sdks/python/julep/api/types/users_update_user_request.py b/sdks/python/julep/api/types/users_create_user_request.py similarity index 93% rename from sdks/python/julep/api/types/users_update_user_request.py rename to sdks/python/julep/api/types/users_create_user_request.py index f747a2093..a9abbb461 100644 --- a/sdks/python/julep/api/types/users_update_user_request.py +++ b/sdks/python/julep/api/types/users_create_user_request.py @@ -8,9 +8,9 @@ from .common_identifier_safe_unicode import CommonIdentifierSafeUnicode -class UsersUpdateUserRequest(pydantic_v1.BaseModel): +class UsersCreateUserRequest(pydantic_v1.BaseModel): """ - Payload for updating a user + Payload for creating a user (and associated documents) """ metadata: typing.Optional[typing.Dict[str, typing.Any]] = None diff --git a/sdks/python/julep/api/types/users_route_list_response.py b/sdks/python/julep/api/types/users_route_list_response.py index 7cb75ac18..77b2b9fb5 100644 --- a/sdks/python/julep/api/types/users_route_list_response.py +++ b/sdks/python/julep/api/types/users_route_list_response.py @@ -9,7 +9,7 @@ class UsersRouteListResponse(pydantic_v1.BaseModel): - results: typing.List[UsersUser] + items: typing.List[UsersUser] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = { diff --git a/sdks/python/poetry.lock b/sdks/python/poetry.lock index 59d71e24a..c9b836639 100644 --- a/sdks/python/poetry.lock +++ b/sdks/python/poetry.lock @@ -154,32 +154,32 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" -version = "2.15.0" +version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.extras] @@ -312,63 +312,78 @@ files = [ [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] [package.dependencies] @@ -577,33 +592,33 @@ develop = ["coverage", "invoke", "path.py", "pylint", "pytest (>=3.2)", "pytest- [[package]] name = "debugpy" -version = "1.8.2" +version = "1.8.5" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" files = [ - {file = "debugpy-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2"}, - {file = "debugpy-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6"}, - {file = "debugpy-1.8.2-cp310-cp310-win32.whl", hash = "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47"}, - {file = "debugpy-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3"}, - {file = "debugpy-1.8.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a"}, - {file = "debugpy-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634"}, - {file = "debugpy-1.8.2-cp311-cp311-win32.whl", hash = "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad"}, - {file = "debugpy-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa"}, - {file = "debugpy-1.8.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835"}, - {file = "debugpy-1.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3"}, - {file = "debugpy-1.8.2-cp312-cp312-win32.whl", hash = "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e"}, - {file = "debugpy-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859"}, - {file = "debugpy-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:5a019d4574afedc6ead1daa22736c530712465c0c4cd44f820d803d937531b2d"}, - {file = "debugpy-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40f062d6877d2e45b112c0bbade9a17aac507445fd638922b1a5434df34aed02"}, - {file = "debugpy-1.8.2-cp38-cp38-win32.whl", hash = "sha256:c78ba1680f1015c0ca7115671fe347b28b446081dada3fedf54138f44e4ba031"}, - {file = "debugpy-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cf327316ae0c0e7dd81eb92d24ba8b5e88bb4d1b585b5c0d32929274a66a5210"}, - {file = "debugpy-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1523bc551e28e15147815d1397afc150ac99dbd3a8e64641d53425dba57b0ff9"}, - {file = "debugpy-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e24ccb0cd6f8bfaec68d577cb49e9c680621c336f347479b3fce060ba7c09ec1"}, - {file = "debugpy-1.8.2-cp39-cp39-win32.whl", hash = "sha256:7f8d57a98c5a486c5c7824bc0b9f2f11189d08d73635c326abef268f83950326"}, - {file = "debugpy-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:16c8dcab02617b75697a0a925a62943e26a0330da076e2a10437edd9f0bf3755"}, - {file = "debugpy-1.8.2-py2.py3-none-any.whl", hash = "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca"}, - {file = "debugpy-1.8.2.zip", hash = "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00"}, + {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, + {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, + {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, + {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, + {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, + {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, + {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, + {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, + {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, + {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, + {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, + {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, + {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, + {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, + {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, + {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, + {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, + {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, + {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, + {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, + {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, + {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, ] [[package]] @@ -789,13 +804,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -814,13 +829,13 @@ networkx = ">=2" [[package]] name = "importlib-metadata" -version = "8.2.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, - {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] @@ -833,21 +848,25 @@ test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "p [[package]] name = "importlib-resources" -version = "6.4.0" +version = "6.4.4" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, - {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, + {file = "importlib_resources-6.4.4-py3-none-any.whl", hash = "sha256:dda242603d1c9cd836c3368b1174ed74cb4049ecd209e7a1a0104620c18c5c11"}, + {file = "importlib_resources-6.4.4.tar.gz", hash = "sha256:20600c8b7361938dc0bb2d5ec0297802e575df486f5a544fa414da65e13721f7"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] +type = ["pytest-mypy"] [[package]] name = "ipykernel" @@ -971,6 +990,76 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jiter" +version = "0.5.0" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f"}, + {file = "jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e"}, + {file = "jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf"}, + {file = "jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646"}, + {file = "jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb"}, + {file = "jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae"}, + {file = "jiter-0.5.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9f664e7351604f91dcdd557603c57fc0d551bc65cc0a732fdacbf73ad335049a"}, + {file = "jiter-0.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:044f2f1148b5248ad2c8c3afb43430dccf676c5a5834d2f5089a4e6c5bbd64df"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:702e3520384c88b6e270c55c772d4bd6d7b150608dcc94dea87ceba1b6391248"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:528d742dcde73fad9d63e8242c036ab4a84389a56e04efd854062b660f559544"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf80e5fe6ab582c82f0c3331df27a7e1565e2dcf06265afd5173d809cdbf9ba"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:44dfc9ddfb9b51a5626568ef4e55ada462b7328996294fe4d36de02fce42721f"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c451f7922992751a936b96c5f5b9bb9312243d9b754c34b33d0cb72c84669f4e"}, + {file = "jiter-0.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:308fce789a2f093dca1ff91ac391f11a9f99c35369117ad5a5c6c4903e1b3e3a"}, + {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7f5ad4a7c6b0d90776fdefa294f662e8a86871e601309643de30bf94bb93a64e"}, + {file = "jiter-0.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ea189db75f8eca08807d02ae27929e890c7d47599ce3d0a6a5d41f2419ecf338"}, + {file = "jiter-0.5.0-cp312-none-win32.whl", hash = "sha256:e3bbe3910c724b877846186c25fe3c802e105a2c1fc2b57d6688b9f8772026e4"}, + {file = "jiter-0.5.0-cp312-none-win_amd64.whl", hash = "sha256:a586832f70c3f1481732919215f36d41c59ca080fa27a65cf23d9490e75b2ef5"}, + {file = "jiter-0.5.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f04bc2fc50dc77be9d10f73fcc4e39346402ffe21726ff41028f36e179b587e6"}, + {file = "jiter-0.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f433a4169ad22fcb550b11179bb2b4fd405de9b982601914ef448390b2954f3"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad4a6398c85d3a20067e6c69890ca01f68659da94d74c800298581724e426c7e"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6baa88334e7af3f4d7a5c66c3a63808e5efbc3698a1c57626541ddd22f8e4fbf"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ece0a115c05efca597c6d938f88c9357c843f8c245dbbb53361a1c01afd7148"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:335942557162ad372cc367ffaf93217117401bf930483b4b3ebdb1223dbddfa7"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649b0ee97a6e6da174bffcb3c8c051a5935d7d4f2f52ea1583b5b3e7822fbf14"}, + {file = "jiter-0.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4be354c5de82157886ca7f5925dbda369b77344b4b4adf2723079715f823989"}, + {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5206144578831a6de278a38896864ded4ed96af66e1e63ec5dd7f4a1fce38a3a"}, + {file = "jiter-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8120c60f8121ac3d6f072b97ef0e71770cc72b3c23084c72c4189428b1b1d3b6"}, + {file = "jiter-0.5.0-cp38-none-win32.whl", hash = "sha256:6f1223f88b6d76b519cb033a4d3687ca157c272ec5d6015c322fc5b3074d8a5e"}, + {file = "jiter-0.5.0-cp38-none-win_amd64.whl", hash = "sha256:c59614b225d9f434ea8fc0d0bec51ef5fa8c83679afedc0433905994fb36d631"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0af3838cfb7e6afee3f00dc66fa24695199e20ba87df26e942820345b0afc566"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:550b11d669600dbc342364fd4adbe987f14d0bbedaf06feb1b983383dcc4b961"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:489875bf1a0ffb3cb38a727b01e6673f0f2e395b2aad3c9387f94187cb214bbf"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b250ca2594f5599ca82ba7e68785a669b352156260c5362ea1b4e04a0f3e2389"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ea18e01f785c6667ca15407cd6dabbe029d77474d53595a189bdc813347218e"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462a52be85b53cd9bffd94e2d788a09984274fe6cebb893d6287e1c296d50653"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92cc68b48d50fa472c79c93965e19bd48f40f207cb557a8346daa020d6ba973b"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c834133e59a8521bc87ebcad773608c6fa6ab5c7a022df24a45030826cf10bc"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab3a71ff31cf2d45cb216dc37af522d335211f3a972d2fe14ea99073de6cb104"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cccd3af9c48ac500c95e1bcbc498020c87e1781ff0345dd371462d67b76643eb"}, + {file = "jiter-0.5.0-cp39-none-win32.whl", hash = "sha256:368084d8d5c4fc40ff7c3cc513c4f73e02c85f6009217922d0823a48ee7adf61"}, + {file = "jiter-0.5.0-cp39-none-win_amd64.whl", hash = "sha256:ce03f7b4129eb72f1687fa11300fbf677b02990618428934662406d2a76742a1"}, + {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, +] + [[package]] name = "json5" version = "0.9.25" @@ -1381,13 +1470,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.21.3" +version = "3.22.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.8" files = [ - {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, - {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, + {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, + {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, ] [package.dependencies] @@ -1395,7 +1484,7 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -1664,25 +1753,87 @@ files = [ {file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"}, ] +[[package]] +name = "numpy" +version = "2.1.0" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "numpy-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6326ab99b52fafdcdeccf602d6286191a79fe2fda0ae90573c5814cd2b0bc1b8"}, + {file = "numpy-2.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0937e54c09f7a9a68da6889362ddd2ff584c02d015ec92672c099b61555f8911"}, + {file = "numpy-2.1.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:30014b234f07b5fec20f4146f69e13cfb1e33ee9a18a1879a0142fbb00d47673"}, + {file = "numpy-2.1.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:899da829b362ade41e1e7eccad2cf274035e1cb36ba73034946fccd4afd8606b"}, + {file = "numpy-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08801848a40aea24ce16c2ecde3b756f9ad756586fb2d13210939eb69b023f5b"}, + {file = "numpy-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:398049e237d1aae53d82a416dade04defed1a47f87d18d5bd615b6e7d7e41d1f"}, + {file = "numpy-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0abb3916a35d9090088a748636b2c06dc9a6542f99cd476979fb156a18192b84"}, + {file = "numpy-2.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10e2350aea18d04832319aac0f887d5fcec1b36abd485d14f173e3e900b83e33"}, + {file = "numpy-2.1.0-cp310-cp310-win32.whl", hash = "sha256:f6b26e6c3b98adb648243670fddc8cab6ae17473f9dc58c51574af3e64d61211"}, + {file = "numpy-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:f505264735ee074250a9c78247ee8618292091d9d1fcc023290e9ac67e8f1afa"}, + {file = "numpy-2.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:76368c788ccb4f4782cf9c842b316140142b4cbf22ff8db82724e82fe1205dce"}, + {file = "numpy-2.1.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f8e93a01a35be08d31ae33021e5268f157a2d60ebd643cfc15de6ab8e4722eb1"}, + {file = "numpy-2.1.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9523f8b46485db6939bd069b28b642fec86c30909cea90ef550373787f79530e"}, + {file = "numpy-2.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54139e0eb219f52f60656d163cbe67c31ede51d13236c950145473504fa208cb"}, + {file = "numpy-2.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ebbf9fbdabed208d4ecd2e1dfd2c0741af2f876e7ae522c2537d404ca895c3"}, + {file = "numpy-2.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:378cb4f24c7d93066ee4103204f73ed046eb88f9ad5bb2275bb9fa0f6a02bd36"}, + {file = "numpy-2.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8f699a709120b220dfe173f79c73cb2a2cab2c0b88dd59d7b49407d032b8ebd"}, + {file = "numpy-2.1.0-cp311-cp311-win32.whl", hash = "sha256:ffbd6faeb190aaf2b5e9024bac9622d2ee549b7ec89ef3a9373fa35313d44e0e"}, + {file = "numpy-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0af3a5987f59d9c529c022c8c2a64805b339b7ef506509fba7d0556649b9714b"}, + {file = "numpy-2.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fe76d75b345dc045acdbc006adcb197cc680754afd6c259de60d358d60c93736"}, + {file = "numpy-2.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f358ea9e47eb3c2d6eba121ab512dfff38a88db719c38d1e67349af210bc7529"}, + {file = "numpy-2.1.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:dd94ce596bda40a9618324547cfaaf6650b1a24f5390350142499aa4e34e53d1"}, + {file = "numpy-2.1.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b47c551c6724960479cefd7353656498b86e7232429e3a41ab83be4da1b109e8"}, + {file = "numpy-2.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0756a179afa766ad7cb6f036de622e8a8f16ffdd55aa31f296c870b5679d745"}, + {file = "numpy-2.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24003ba8ff22ea29a8c306e61d316ac74111cebf942afbf692df65509a05f111"}, + {file = "numpy-2.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b34fa5e3b5d6dc7e0a4243fa0f81367027cb6f4a7215a17852979634b5544ee0"}, + {file = "numpy-2.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4f982715e65036c34897eb598d64aef15150c447be2cfc6643ec7a11af06574"}, + {file = "numpy-2.1.0-cp312-cp312-win32.whl", hash = "sha256:c4cd94dfefbefec3f8b544f61286584292d740e6e9d4677769bc76b8f41deb02"}, + {file = "numpy-2.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0cdef204199278f5c461a0bed6ed2e052998276e6d8ab2963d5b5c39a0500bc"}, + {file = "numpy-2.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8ab81ccd753859ab89e67199b9da62c543850f819993761c1e94a75a814ed667"}, + {file = "numpy-2.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:442596f01913656d579309edcd179a2a2f9977d9a14ff41d042475280fc7f34e"}, + {file = "numpy-2.1.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:848c6b5cad9898e4b9ef251b6f934fa34630371f2e916261070a4eb9092ffd33"}, + {file = "numpy-2.1.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:54c6a63e9d81efe64bfb7bcb0ec64332a87d0b87575f6009c8ba67ea6374770b"}, + {file = "numpy-2.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:652e92fc409e278abdd61e9505649e3938f6d04ce7ef1953f2ec598a50e7c195"}, + {file = "numpy-2.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab32eb9170bf8ffcbb14f11613f4a0b108d3ffee0832457c5d4808233ba8977"}, + {file = "numpy-2.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8fb49a0ba4d8f41198ae2d52118b050fd34dace4b8f3fb0ee34e23eb4ae775b1"}, + {file = "numpy-2.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44e44973262dc3ae79e9063a1284a73e09d01b894b534a769732ccd46c28cc62"}, + {file = "numpy-2.1.0-cp313-cp313-win32.whl", hash = "sha256:ab83adc099ec62e044b1fbb3a05499fa1e99f6d53a1dde102b2d85eff66ed324"}, + {file = "numpy-2.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:de844aaa4815b78f6023832590d77da0e3b6805c644c33ce94a1e449f16d6ab5"}, + {file = "numpy-2.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:343e3e152bf5a087511cd325e3b7ecfd5b92d369e80e74c12cd87826e263ec06"}, + {file = "numpy-2.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f07fa2f15dabe91259828ce7d71b5ca9e2eb7c8c26baa822c825ce43552f4883"}, + {file = "numpy-2.1.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5474dad8c86ee9ba9bb776f4b99ef2d41b3b8f4e0d199d4f7304728ed34d0300"}, + {file = "numpy-2.1.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1f817c71683fd1bb5cff1529a1d085a57f02ccd2ebc5cd2c566f9a01118e3b7d"}, + {file = "numpy-2.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a3336fbfa0d38d3deacd3fe7f3d07e13597f29c13abf4d15c3b6dc2291cbbdd"}, + {file = "numpy-2.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a894c51fd8c4e834f00ac742abad73fc485df1062f1b875661a3c1e1fb1c2f6"}, + {file = "numpy-2.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:9156ca1f79fc4acc226696e95bfcc2b486f165a6a59ebe22b2c1f82ab190384a"}, + {file = "numpy-2.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:624884b572dff8ca8f60fab591413f077471de64e376b17d291b19f56504b2bb"}, + {file = "numpy-2.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:15ef8b2177eeb7e37dd5ef4016f30b7659c57c2c0b57a779f1d537ff33a72c7b"}, + {file = "numpy-2.1.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:e5f0642cdf4636198a4990de7a71b693d824c56a757862230454629cf62e323d"}, + {file = "numpy-2.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15976718c004466406342789f31b6673776360f3b1e3c575f25302d7e789575"}, + {file = "numpy-2.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6c1de77ded79fef664d5098a66810d4d27ca0224e9051906e634b3f7ead134c2"}, + {file = "numpy-2.1.0.tar.gz", hash = "sha256:7dc90da0081f7e1da49ec4e398ede6a8e9cc4f5ebe5f9e06b443ed889ee9aaa2"}, +] + [[package]] name = "openai" -version = "1.38.0" +version = "1.42.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.38.0-py3-none-any.whl", hash = "sha256:a19ef052f1676320f52183ae6f9775da6d888fbe3aec57886117163c095d9f7c"}, - {file = "openai-1.38.0.tar.gz", hash = "sha256:30fb324bf452ecb1194ca7dbc64566a4d7aa054c6a5da857937ede7d517a220b"}, + {file = "openai-1.42.0-py3-none-any.whl", hash = "sha256:dc91e0307033a4f94931e5d03cc3b29b9717014ad5e73f9f2051b6cb5eda4d80"}, + {file = "openai-1.42.0.tar.gz", hash = "sha256:c9d31853b4e0bc2dc8bd08003b462a006035655a701471695d0bfdc08529cde3"}, ] [package.dependencies] anyio = ">=3.5.0,<5" distro = ">=1.7.0,<2" httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" tqdm = ">4" -typing-extensions = ">=4.7,<5" +typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] @@ -2223,13 +2374,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyright" -version = "1.1.374" +version = "1.1.377" description = "Command line wrapper for pyright" optional = false python-versions = ">=3.7" files = [ - {file = "pyright-1.1.374-py3-none-any.whl", hash = "sha256:55752bcf7a3646d293cd76710a983b71e16f6128aab2d42468e6eb7e46c0a70d"}, - {file = "pyright-1.1.374.tar.gz", hash = "sha256:d01b2daf864ba5e0362e56b844984865970d7204158e61eb685e2dab7804cb82"}, + {file = "pyright-1.1.377-py3-none-any.whl", hash = "sha256:af0dd2b6b636c383a6569a083f8c5a8748ae4dcde5df7914b3f3f267e14dd162"}, + {file = "pyright-1.1.377.tar.gz", hash = "sha256:aabc30fedce0ded34baa0c49b24f10e68f4bfc8f68ae7f3d175c4b0f256b4fcf"}, ] [package.dependencies] @@ -2363,158 +2514,182 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "pyzmq" -version = "26.0.3" +version = "26.2.0" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" files = [ - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, - {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, - {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, - {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, - {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, - {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, - {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, - {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, - {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, - {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, - {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, - {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, - {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, - {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, - {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, - {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, - {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, - {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, + {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, + {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, + {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, + {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, + {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, + {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, + {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, + {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, + {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, ] [package.dependencies] @@ -2601,114 +2776,114 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.19.1" +version = "0.20.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:aaf71f95b21f9dc708123335df22e5a2fef6307e3e6f9ed773b2e0938cc4d491"}, - {file = "rpds_py-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca0dda0c5715efe2ab35bb83f813f681ebcd2840d8b1b92bfc6fe3ab382fae4a"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81db2e7282cc0487f500d4db203edc57da81acde9e35f061d69ed983228ffe3b"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1a8dfa125b60ec00c7c9baef945bb04abf8ac772d8ebefd79dae2a5f316d7850"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271accf41b02687cef26367c775ab220372ee0f4925591c6796e7c148c50cab5"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9bc4161bd3b970cd6a6fcda70583ad4afd10f2750609fb1f3ca9505050d4ef3"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0cf2a0dbb5987da4bd92a7ca727eadb225581dd9681365beba9accbe5308f7d"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b5e28e56143750808c1c79c70a16519e9bc0a68b623197b96292b21b62d6055c"}, - {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c7af6f7b80f687b33a4cdb0a785a5d4de1fb027a44c9a049d8eb67d5bfe8a687"}, - {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e429fc517a1c5e2a70d576077231538a98d59a45dfc552d1ac45a132844e6dfb"}, - {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d2dbd8f4990d4788cb122f63bf000357533f34860d269c1a8e90ae362090ff3a"}, - {file = "rpds_py-0.19.1-cp310-none-win32.whl", hash = "sha256:e0f9d268b19e8f61bf42a1da48276bcd05f7ab5560311f541d22557f8227b866"}, - {file = "rpds_py-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:df7c841813f6265e636fe548a49664c77af31ddfa0085515326342a751a6ba51"}, - {file = "rpds_py-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:902cf4739458852fe917104365ec0efbea7d29a15e4276c96a8d33e6ed8ec137"}, - {file = "rpds_py-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f3d73022990ab0c8b172cce57c69fd9a89c24fd473a5e79cbce92df87e3d9c48"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3837c63dd6918a24de6c526277910e3766d8c2b1627c500b155f3eecad8fad65"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cdb7eb3cf3deb3dd9e7b8749323b5d970052711f9e1e9f36364163627f96da58"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26ab43b6d65d25b1a333c8d1b1c2f8399385ff683a35ab5e274ba7b8bb7dc61c"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75130df05aae7a7ac171b3b5b24714cffeabd054ad2ebc18870b3aa4526eba23"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c34f751bf67cab69638564eee34023909380ba3e0d8ee7f6fe473079bf93f09b"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2671cb47e50a97f419a02cd1e0c339b31de017b033186358db92f4d8e2e17d8"}, - {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c73254c256081704dba0a333457e2fb815364018788f9b501efe7c5e0ada401"}, - {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4383beb4a29935b8fa28aca8fa84c956bf545cb0c46307b091b8d312a9150e6a"}, - {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dbceedcf4a9329cc665452db1aaf0845b85c666e4885b92ee0cddb1dbf7e052a"}, - {file = "rpds_py-0.19.1-cp311-none-win32.whl", hash = "sha256:f0a6d4a93d2a05daec7cb885157c97bbb0be4da739d6f9dfb02e101eb40921cd"}, - {file = "rpds_py-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:c149a652aeac4902ecff2dd93c3b2681c608bd5208c793c4a99404b3e1afc87c"}, - {file = "rpds_py-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:56313be667a837ff1ea3508cebb1ef6681d418fa2913a0635386cf29cff35165"}, - {file = "rpds_py-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d1d7539043b2b31307f2c6c72957a97c839a88b2629a348ebabe5aa8b626d6b"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1dc59a5e7bc7f44bd0c048681f5e05356e479c50be4f2c1a7089103f1621d5"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8f78398e67a7227aefa95f876481485403eb974b29e9dc38b307bb6eb2315ea"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef07a0a1d254eeb16455d839cef6e8c2ed127f47f014bbda64a58b5482b6c836"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8124101e92c56827bebef084ff106e8ea11c743256149a95b9fd860d3a4f331f"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08ce9c95a0b093b7aec75676b356a27879901488abc27e9d029273d280438505"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b02dd77a2de6e49078c8937aadabe933ceac04b41c5dde5eca13a69f3cf144e"}, - {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4dd02e29c8cbed21a1875330b07246b71121a1c08e29f0ee3db5b4cfe16980c4"}, - {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9c7042488165f7251dc7894cd533a875d2875af6d3b0e09eda9c4b334627ad1c"}, - {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f809a17cc78bd331e137caa25262b507225854073fd319e987bd216bed911b7c"}, - {file = "rpds_py-0.19.1-cp312-none-win32.whl", hash = "sha256:3ddab996807c6b4227967fe1587febade4e48ac47bb0e2d3e7858bc621b1cace"}, - {file = "rpds_py-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:32e0db3d6e4f45601b58e4ac75c6f24afbf99818c647cc2066f3e4b192dabb1f"}, - {file = "rpds_py-0.19.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:747251e428406b05fc86fee3904ee19550c4d2d19258cef274e2151f31ae9d38"}, - {file = "rpds_py-0.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dc733d35f861f8d78abfaf54035461e10423422999b360966bf1c443cbc42705"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbda75f245caecff8faa7e32ee94dfaa8312a3367397975527f29654cd17a6ed"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd04d8cab16cab5b0a9ffc7d10f0779cf1120ab16c3925404428f74a0a43205a"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2d66eb41ffca6cc3c91d8387509d27ba73ad28371ef90255c50cb51f8953301"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdf4890cda3b59170009d012fca3294c00140e7f2abe1910e6a730809d0f3f9b"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1fa67ef839bad3815124f5f57e48cd50ff392f4911a9f3cf449d66fa3df62a5"}, - {file = "rpds_py-0.19.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b82c9514c6d74b89a370c4060bdb80d2299bc6857e462e4a215b4ef7aa7b090e"}, - {file = "rpds_py-0.19.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c7b07959866a6afb019abb9564d8a55046feb7a84506c74a6f197cbcdf8a208e"}, - {file = "rpds_py-0.19.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4f580ae79d0b861dfd912494ab9d477bea535bfb4756a2269130b6607a21802e"}, - {file = "rpds_py-0.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c6d20c8896c00775e6f62d8373aba32956aa0b850d02b5ec493f486c88e12859"}, - {file = "rpds_py-0.19.1-cp313-none-win32.whl", hash = "sha256:afedc35fe4b9e30ab240b208bb9dc8938cb4afe9187589e8d8d085e1aacb8309"}, - {file = "rpds_py-0.19.1-cp313-none-win_amd64.whl", hash = "sha256:1d4af2eb520d759f48f1073ad3caef997d1bfd910dc34e41261a595d3f038a94"}, - {file = "rpds_py-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:34bca66e2e3eabc8a19e9afe0d3e77789733c702c7c43cd008e953d5d1463fde"}, - {file = "rpds_py-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:24f8ae92c7fae7c28d0fae9b52829235df83f34847aa8160a47eb229d9666c7b"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71157f9db7f6bc6599a852852f3389343bea34315b4e6f109e5cbc97c1fb2963"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d494887d40dc4dd0d5a71e9d07324e5c09c4383d93942d391727e7a40ff810b"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3661e6d4ba63a094138032c1356d557de5b3ea6fd3cca62a195f623e381c76"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97fbb77eaeb97591efdc654b8b5f3ccc066406ccfb3175b41382f221ecc216e8"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cc4bc73e53af8e7a42c8fd7923bbe35babacfa7394ae9240b3430b5dcf16b2a"}, - {file = "rpds_py-0.19.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:35af5e4d5448fa179fd7fff0bba0fba51f876cd55212f96c8bbcecc5c684ae5c"}, - {file = "rpds_py-0.19.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3511f6baf8438326e351097cecd137eb45c5f019944fe0fd0ae2fea2fd26be39"}, - {file = "rpds_py-0.19.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:57863d16187995c10fe9cf911b897ed443ac68189179541734502353af33e693"}, - {file = "rpds_py-0.19.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9e318e6786b1e750a62f90c6f7fa8b542102bdcf97c7c4de2a48b50b61bd36ec"}, - {file = "rpds_py-0.19.1-cp38-none-win32.whl", hash = "sha256:53dbc35808c6faa2ce3e48571f8f74ef70802218554884787b86a30947842a14"}, - {file = "rpds_py-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:8df1c283e57c9cb4d271fdc1875f4a58a143a2d1698eb0d6b7c0d7d5f49c53a1"}, - {file = "rpds_py-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e76c902d229a3aa9d5ceb813e1cbcc69bf5bda44c80d574ff1ac1fa3136dea71"}, - {file = "rpds_py-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de1f7cd5b6b351e1afd7568bdab94934d656abe273d66cda0ceea43bbc02a0c2"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fc5a84777cb61692d17988989690d6f34f7f95968ac81398d67c0d0994a897"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:74129d5ffc4cde992d89d345f7f7d6758320e5d44a369d74d83493429dad2de5"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e360188b72f8080fefa3adfdcf3618604cc8173651c9754f189fece068d2a45"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13e6d4840897d4e4e6b2aa1443e3a8eca92b0402182aafc5f4ca1f5e24f9270a"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f09529d2332264a902688031a83c19de8fda5eb5881e44233286b9c9ec91856d"}, - {file = "rpds_py-0.19.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d4b52811dcbc1aba08fd88d475f75b4f6db0984ba12275d9bed1a04b2cae9b5"}, - {file = "rpds_py-0.19.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd635c2c4043222d80d80ca1ac4530a633102a9f2ad12252183bcf338c1b9474"}, - {file = "rpds_py-0.19.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f35b34a5184d5e0cc360b61664c1c06e866aab077b5a7c538a3e20c8fcdbf90b"}, - {file = "rpds_py-0.19.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d4ec0046facab83012d821b33cead742a35b54575c4edfb7ed7445f63441835f"}, - {file = "rpds_py-0.19.1-cp39-none-win32.whl", hash = "sha256:f5b8353ea1a4d7dfb59a7f45c04df66ecfd363bb5b35f33b11ea579111d4655f"}, - {file = "rpds_py-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:1fb93d3486f793d54a094e2bfd9cd97031f63fcb5bc18faeb3dd4b49a1c06523"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d5c7e32f3ee42f77d8ff1a10384b5cdcc2d37035e2e3320ded909aa192d32c3"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:89cc8921a4a5028d6dd388c399fcd2eef232e7040345af3d5b16c04b91cf3c7e"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca34e913d27401bda2a6f390d0614049f5a95b3b11cd8eff80fe4ec340a1208"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5953391af1405f968eb5701ebbb577ebc5ced8d0041406f9052638bafe52209d"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:840e18c38098221ea6201f091fc5d4de6128961d2930fbbc96806fb43f69aec1"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d8b735c4d162dc7d86a9cf3d717f14b6c73637a1f9cd57fe7e61002d9cb1972"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce757c7c90d35719b38fa3d4ca55654a76a40716ee299b0865f2de21c146801c"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9421b23c85f361a133aa7c5e8ec757668f70343f4ed8fdb5a4a14abd5437244"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3b823be829407393d84ee56dc849dbe3b31b6a326f388e171555b262e8456cc1"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:5e58b61dcbb483a442c6239c3836696b79f2cd8e7eec11e12155d3f6f2d886d1"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39d67896f7235b2c886fb1ee77b1491b77049dcef6fbf0f401e7b4cbed86bbd4"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8b32cd4ab6db50c875001ba4f5a6b30c0f42151aa1fbf9c2e7e3674893fb1dc4"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c32e41de995f39b6b315d66c27dea3ef7f7c937c06caab4c6a79a5e09e2c415"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a129c02b42d46758c87faeea21a9f574e1c858b9f358b6dd0bbd71d17713175"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:346557f5b1d8fd9966059b7a748fd79ac59f5752cd0e9498d6a40e3ac1c1875f"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31e450840f2f27699d014cfc8865cc747184286b26d945bcea6042bb6aa4d26e"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01227f8b3e6c8961490d869aa65c99653df80d2f0a7fde8c64ebddab2b9b02fd"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69084fd29bfeff14816666c93a466e85414fe6b7d236cfc108a9c11afa6f7301"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d2b88efe65544a7d5121b0c3b003ebba92bfede2ea3577ce548b69c5235185"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ea961a674172ed2235d990d7edf85d15d8dfa23ab8575e48306371c070cda67"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:5beffdbe766cfe4fb04f30644d822a1080b5359df7db3a63d30fa928375b2720"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:720f3108fb1bfa32e51db58b832898372eb5891e8472a8093008010911e324c5"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c2087dbb76a87ec2c619253e021e4fb20d1a72580feeaa6892b0b3d955175a71"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ddd50f18ebc05ec29a0d9271e9dbe93997536da3546677f8ca00b76d477680c"}, - {file = "rpds_py-0.19.1.tar.gz", hash = "sha256:31dd5794837f00b46f4096aa8ccaa5972f73a938982e32ed817bb520c465e520"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, + {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, + {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, + {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, + {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, + {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, + {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, + {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, + {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, + {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, + {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, + {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, + {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, + {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] [[package]] @@ -2755,19 +2930,19 @@ win32 = ["pywin32"] [[package]] name = "setuptools" -version = "72.1.0" +version = "73.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, - {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, + {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, + {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, ] [package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] [[package]] name = "shellingham" @@ -2804,13 +2979,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] @@ -2929,13 +3104,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.4" +version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [package.dependencies] @@ -3014,13 +3189,13 @@ files = [ [[package]] name = "types-python-dateutil" -version = "2.9.0.20240316" +version = "2.9.0.20240821" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, - {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, + {file = "types-python-dateutil-2.9.0.20240821.tar.gz", hash = "sha256:9649d1dcb6fef1046fb18bebe9ea2aa0028b160918518c34589a46045f6ebd98"}, + {file = "types_python_dateutil-2.9.0.20240821-py3-none-any.whl", hash = "sha256:f5889fcb4e63ed4aaa379b44f93c32593d50b9a94c9a60a0c854d8cc3511cd57"}, ] [[package]] @@ -3110,13 +3285,13 @@ files = [ [[package]] name = "webcolors" -version = "24.6.0" +version = "24.8.0" description = "A library for working with the color formats defined by HTML and CSS." optional = false python-versions = ">=3.8" files = [ - {file = "webcolors-24.6.0-py3-none-any.whl", hash = "sha256:8cf5bc7e28defd1d48b9e83d5fc30741328305a8195c29a8e668fa45586568a1"}, - {file = "webcolors-24.6.0.tar.gz", hash = "sha256:1d160d1de46b3e81e58d0a280d0c78b467dc80f47294b91b1ad8029d2cedb55b"}, + {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, + {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, ] [package.extras] @@ -3152,13 +3327,13 @@ test = ["websockets"] [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] diff --git a/sdks/ts/src/api/index.ts b/sdks/ts/src/api/index.ts index 61b146424..7b2e94710 100644 --- a/sdks/ts/src/api/index.ts +++ b/sdks/ts/src/api/index.ts @@ -12,27 +12,30 @@ export type { OpenAPIConfig } from "./core/OpenAPI"; export type { Agents_Agent } from "./models/Agents_Agent"; export type { Agents_CreateAgentRequest } from "./models/Agents_CreateAgentRequest"; +export type { Agents_CreateOrUpdateAgentRequest } from "./models/Agents_CreateOrUpdateAgentRequest"; export type { Agents_CreateOrUpdateAgentRequest_id } from "./models/Agents_CreateOrUpdateAgentRequest_id"; export type { Agents_PatchAgentRequest } from "./models/Agents_PatchAgentRequest"; export type { Agents_UpdateAgentRequest } from "./models/Agents_UpdateAgentRequest"; export type { Chat_BaseChatOutput } from "./models/Chat_BaseChatOutput"; export type { Chat_BaseChatResponse } from "./models/Chat_BaseChatResponse"; export type { Chat_BaseTokenLogProb } from "./models/Chat_BaseTokenLogProb"; +export type { Chat_ChatInput } from "./models/Chat_ChatInput"; +export type { Chat_ChatInputData } from "./models/Chat_ChatInputData"; export type { Chat_ChatOutputChunk } from "./models/Chat_ChatOutputChunk"; +export type { Chat_ChatSettings } from "./models/Chat_ChatSettings"; export type { Chat_ChunkChatResponse } from "./models/Chat_ChunkChatResponse"; export type { Chat_CompetionUsage } from "./models/Chat_CompetionUsage"; export type { Chat_CompletionResponseFormat } from "./models/Chat_CompletionResponseFormat"; +export type { Chat_DefaultChatSettings } from "./models/Chat_DefaultChatSettings"; export type { Chat_FinishReason } from "./models/Chat_FinishReason"; -export type { Chat_GenerationPreset } from "./models/Chat_GenerationPreset"; -export type { Chat_GenerationPresetSettings } from "./models/Chat_GenerationPresetSettings"; export type { Chat_LogProbResponse } from "./models/Chat_LogProbResponse"; export type { Chat_MessageChatResponse } from "./models/Chat_MessageChatResponse"; export type { Chat_MultipleChatOutput } from "./models/Chat_MultipleChatOutput"; export type { Chat_OpenAISettings } from "./models/Chat_OpenAISettings"; export type { Chat_SingleChatOutput } from "./models/Chat_SingleChatOutput"; export type { Chat_TokenLogProb } from "./models/Chat_TokenLogProb"; -export type { Chat_vLLMSettings } from "./models/Chat_vLLMSettings"; export type { Common_identifierSafeUnicode } from "./models/Common_identifierSafeUnicode"; +export type { Common_JinjaTemplate } from "./models/Common_JinjaTemplate"; export type { Common_limit } from "./models/Common_limit"; export type { Common_logit_bias } from "./models/Common_logit_bias"; export type { Common_offset } from "./models/Common_offset"; @@ -49,24 +52,22 @@ export type { Common_toolRef } from "./models/Common_toolRef"; export type { Common_uuid } from "./models/Common_uuid"; export type { Common_validPythonIdentifier } from "./models/Common_validPythonIdentifier"; export type { Docs_BaseDocSearchRequest } from "./models/Docs_BaseDocSearchRequest"; +export type { Docs_CreateDocRequest } from "./models/Docs_CreateDocRequest"; export type { Docs_Doc } from "./models/Docs_Doc"; export type { Docs_DocOwner } from "./models/Docs_DocOwner"; export type { Docs_DocReference } from "./models/Docs_DocReference"; +export type { Docs_DocSearchResponse } from "./models/Docs_DocSearchResponse"; export type { Docs_EmbedQueryRequest } from "./models/Docs_EmbedQueryRequest"; export type { Docs_EmbedQueryResponse } from "./models/Docs_EmbedQueryResponse"; export type { Docs_HybridDocSearchRequest } from "./models/Docs_HybridDocSearchRequest"; +export type { Docs_Snippet } from "./models/Docs_Snippet"; export type { Docs_TextOnlyDocSearchRequest } from "./models/Docs_TextOnlyDocSearchRequest"; export type { Docs_VectorDocSearchRequest } from "./models/Docs_VectorDocSearchRequest"; export type { Entries_BaseEntry } from "./models/Entries_BaseEntry"; -export type { Entries_ChatMLImageContentPart } from "./models/Entries_ChatMLImageContentPart"; -export type { Entries_ChatMLMessage } from "./models/Entries_ChatMLMessage"; export type { Entries_ChatMLRole } from "./models/Entries_ChatMLRole"; -export type { Entries_ChatMLTextContentPart } from "./models/Entries_ChatMLTextContentPart"; export type { Entries_Entry } from "./models/Entries_Entry"; export type { Entries_History } from "./models/Entries_History"; export type { Entries_ImageDetail } from "./models/Entries_ImageDetail"; -export type { Entries_ImageURL } from "./models/Entries_ImageURL"; -export type { Entries_InputChatMLMessage } from "./models/Entries_InputChatMLMessage"; export type { Entries_Relation } from "./models/Entries_Relation"; export type { Executions_CreateExecutionRequest } from "./models/Executions_CreateExecutionRequest"; export type { Executions_Execution } from "./models/Executions_Execution"; @@ -74,10 +75,12 @@ export type { Executions_ResumeExecutionRequest } from "./models/Executions_Resu export type { Executions_StopExecutionRequest } from "./models/Executions_StopExecutionRequest"; export type { Executions_TaskTokenResumeExecutionRequest } from "./models/Executions_TaskTokenResumeExecutionRequest"; export type { Executions_Transition } from "./models/Executions_Transition"; +export type { Executions_TransitionTarget } from "./models/Executions_TransitionTarget"; export type { Executions_UpdateExecutionRequest } from "./models/Executions_UpdateExecutionRequest"; export type { Jobs_JobState } from "./models/Jobs_JobState"; export type { Jobs_JobStatus } from "./models/Jobs_JobStatus"; export type { Sessions_ContextOverflowType } from "./models/Sessions_ContextOverflowType"; +export type { Sessions_CreateOrUpdateSessionRequest } from "./models/Sessions_CreateOrUpdateSessionRequest"; export type { Sessions_CreateOrUpdateSessionRequest_id } from "./models/Sessions_CreateOrUpdateSessionRequest_id"; export type { Sessions_CreateSessionRequest } from "./models/Sessions_CreateSessionRequest"; export type { Sessions_MultiAgentMultiUserSession } from "./models/Sessions_MultiAgentMultiUserSession"; @@ -89,25 +92,39 @@ export type { Sessions_SingleAgentMultiUserSession } from "./models/Sessions_Sin export type { Sessions_SingleAgentNoUserSession } from "./models/Sessions_SingleAgentNoUserSession"; export type { Sessions_SingleAgentSingleUserSession } from "./models/Sessions_SingleAgentSingleUserSession"; export type { Sessions_UpdateSessionRequest } from "./models/Sessions_UpdateSessionRequest"; -export type { Tasks_BaseWorkflowStep } from "./models/Tasks_BaseWorkflowStep"; +export type { Tasks_CaseThen } from "./models/Tasks_CaseThen"; export type { Tasks_CreateOrUpdateTaskRequest_id } from "./models/Tasks_CreateOrUpdateTaskRequest_id"; export type { Tasks_CreateTaskRequest } from "./models/Tasks_CreateTaskRequest"; +export type { Tasks_EmbedStep } from "./models/Tasks_EmbedStep"; export type { Tasks_ErrorWorkflowStep } from "./models/Tasks_ErrorWorkflowStep"; export type { Tasks_EvaluateStep } from "./models/Tasks_EvaluateStep"; +export type { Tasks_ForeachDo } from "./models/Tasks_ForeachDo"; +export type { Tasks_ForeachStep } from "./models/Tasks_ForeachStep"; +export type { Tasks_GetStep } from "./models/Tasks_GetStep"; export type { Tasks_IfElseWorkflowStep } from "./models/Tasks_IfElseWorkflowStep"; +export type { Tasks_LogStep } from "./models/Tasks_LogStep"; +export type { Tasks_ParallelStep } from "./models/Tasks_ParallelStep"; export type { Tasks_PatchTaskRequest } from "./models/Tasks_PatchTaskRequest"; export type { Tasks_PromptStep } from "./models/Tasks_PromptStep"; +export type { Tasks_ReturnStep } from "./models/Tasks_ReturnStep"; +export type { Tasks_SearchStep } from "./models/Tasks_SearchStep"; +export type { Tasks_SetKey } from "./models/Tasks_SetKey"; +export type { Tasks_SetStep } from "./models/Tasks_SetStep"; +export type { Tasks_SleepFor } from "./models/Tasks_SleepFor"; +export type { Tasks_SleepStep } from "./models/Tasks_SleepStep"; +export type { Tasks_SwitchStep } from "./models/Tasks_SwitchStep"; export type { Tasks_Task } from "./models/Tasks_Task"; export type { Tasks_TaskTool } from "./models/Tasks_TaskTool"; export type { Tasks_ToolCallStep } from "./models/Tasks_ToolCallStep"; export type { Tasks_UpdateTaskRequest } from "./models/Tasks_UpdateTaskRequest"; +export type { Tasks_WaitForInputInfo } from "./models/Tasks_WaitForInputInfo"; export type { Tasks_WaitForInputStep } from "./models/Tasks_WaitForInputStep"; export type { Tasks_YieldStep } from "./models/Tasks_YieldStep"; export type { Tools_ChosenFunctionCall } from "./models/Tools_ChosenFunctionCall"; export type { Tools_ChosenToolCall } from "./models/Tools_ChosenToolCall"; +export type { Tools_CreateToolRequest } from "./models/Tools_CreateToolRequest"; export type { Tools_FunctionCallOption } from "./models/Tools_FunctionCallOption"; export type { Tools_FunctionDef } from "./models/Tools_FunctionDef"; -export type { Tools_FunctionDefUpdate } from "./models/Tools_FunctionDefUpdate"; export type { Tools_FunctionTool } from "./models/Tools_FunctionTool"; export type { Tools_NamedFunctionChoice } from "./models/Tools_NamedFunctionChoice"; export type { Tools_NamedToolChoice } from "./models/Tools_NamedToolChoice"; @@ -116,7 +133,8 @@ export type { Tools_Tool } from "./models/Tools_Tool"; export type { Tools_ToolResponse } from "./models/Tools_ToolResponse"; export type { Tools_ToolType } from "./models/Tools_ToolType"; export type { Tools_UpdateToolRequest } from "./models/Tools_UpdateToolRequest"; -export type { Users_CreateOrUpdateUserRequest_id } from "./models/Users_CreateOrUpdateUserRequest_id"; +export type { Users_CreateOrUpdateUserRequest } from "./models/Users_CreateOrUpdateUserRequest"; +export type { Users_CreateOrUpdateUserRequest } from "./models/Users_CreateOrUpdateUserRequest"; export type { Users_CreateUserRequest } from "./models/Users_CreateUserRequest"; export type { Users_PatchUserRequest } from "./models/Users_PatchUserRequest"; export type { Users_UpdateUserRequest } from "./models/Users_UpdateUserRequest"; @@ -124,27 +142,30 @@ export type { Users_User } from "./models/Users_User"; export { $Agents_Agent } from "./schemas/$Agents_Agent"; export { $Agents_CreateAgentRequest } from "./schemas/$Agents_CreateAgentRequest"; +export { $Agents_CreateOrUpdateAgentRequest } from "./schemas/$Agents_CreateOrUpdateAgentRequest"; export { $Agents_CreateOrUpdateAgentRequest_id } from "./schemas/$Agents_CreateOrUpdateAgentRequest_id"; export { $Agents_PatchAgentRequest } from "./schemas/$Agents_PatchAgentRequest"; export { $Agents_UpdateAgentRequest } from "./schemas/$Agents_UpdateAgentRequest"; export { $Chat_BaseChatOutput } from "./schemas/$Chat_BaseChatOutput"; export { $Chat_BaseChatResponse } from "./schemas/$Chat_BaseChatResponse"; export { $Chat_BaseTokenLogProb } from "./schemas/$Chat_BaseTokenLogProb"; +export { $Chat_ChatInput } from "./schemas/$Chat_ChatInput"; +export { $Chat_ChatInputData } from "./schemas/$Chat_ChatInputData"; export { $Chat_ChatOutputChunk } from "./schemas/$Chat_ChatOutputChunk"; +export { $Chat_ChatSettings } from "./schemas/$Chat_ChatSettings"; export { $Chat_ChunkChatResponse } from "./schemas/$Chat_ChunkChatResponse"; export { $Chat_CompetionUsage } from "./schemas/$Chat_CompetionUsage"; export { $Chat_CompletionResponseFormat } from "./schemas/$Chat_CompletionResponseFormat"; +export { $Chat_DefaultChatSettings } from "./schemas/$Chat_DefaultChatSettings"; export { $Chat_FinishReason } from "./schemas/$Chat_FinishReason"; -export { $Chat_GenerationPreset } from "./schemas/$Chat_GenerationPreset"; -export { $Chat_GenerationPresetSettings } from "./schemas/$Chat_GenerationPresetSettings"; export { $Chat_LogProbResponse } from "./schemas/$Chat_LogProbResponse"; export { $Chat_MessageChatResponse } from "./schemas/$Chat_MessageChatResponse"; export { $Chat_MultipleChatOutput } from "./schemas/$Chat_MultipleChatOutput"; export { $Chat_OpenAISettings } from "./schemas/$Chat_OpenAISettings"; export { $Chat_SingleChatOutput } from "./schemas/$Chat_SingleChatOutput"; export { $Chat_TokenLogProb } from "./schemas/$Chat_TokenLogProb"; -export { $Chat_vLLMSettings } from "./schemas/$Chat_vLLMSettings"; export { $Common_identifierSafeUnicode } from "./schemas/$Common_identifierSafeUnicode"; +export { $Common_JinjaTemplate } from "./schemas/$Common_JinjaTemplate"; export { $Common_limit } from "./schemas/$Common_limit"; export { $Common_logit_bias } from "./schemas/$Common_logit_bias"; export { $Common_offset } from "./schemas/$Common_offset"; @@ -161,24 +182,22 @@ export { $Common_toolRef } from "./schemas/$Common_toolRef"; export { $Common_uuid } from "./schemas/$Common_uuid"; export { $Common_validPythonIdentifier } from "./schemas/$Common_validPythonIdentifier"; export { $Docs_BaseDocSearchRequest } from "./schemas/$Docs_BaseDocSearchRequest"; +export { $Docs_CreateDocRequest } from "./schemas/$Docs_CreateDocRequest"; export { $Docs_Doc } from "./schemas/$Docs_Doc"; export { $Docs_DocOwner } from "./schemas/$Docs_DocOwner"; export { $Docs_DocReference } from "./schemas/$Docs_DocReference"; +export { $Docs_DocSearchResponse } from "./schemas/$Docs_DocSearchResponse"; export { $Docs_EmbedQueryRequest } from "./schemas/$Docs_EmbedQueryRequest"; export { $Docs_EmbedQueryResponse } from "./schemas/$Docs_EmbedQueryResponse"; export { $Docs_HybridDocSearchRequest } from "./schemas/$Docs_HybridDocSearchRequest"; +export { $Docs_Snippet } from "./schemas/$Docs_Snippet"; export { $Docs_TextOnlyDocSearchRequest } from "./schemas/$Docs_TextOnlyDocSearchRequest"; export { $Docs_VectorDocSearchRequest } from "./schemas/$Docs_VectorDocSearchRequest"; export { $Entries_BaseEntry } from "./schemas/$Entries_BaseEntry"; -export { $Entries_ChatMLImageContentPart } from "./schemas/$Entries_ChatMLImageContentPart"; -export { $Entries_ChatMLMessage } from "./schemas/$Entries_ChatMLMessage"; export { $Entries_ChatMLRole } from "./schemas/$Entries_ChatMLRole"; -export { $Entries_ChatMLTextContentPart } from "./schemas/$Entries_ChatMLTextContentPart"; export { $Entries_Entry } from "./schemas/$Entries_Entry"; export { $Entries_History } from "./schemas/$Entries_History"; export { $Entries_ImageDetail } from "./schemas/$Entries_ImageDetail"; -export { $Entries_ImageURL } from "./schemas/$Entries_ImageURL"; -export { $Entries_InputChatMLMessage } from "./schemas/$Entries_InputChatMLMessage"; export { $Entries_Relation } from "./schemas/$Entries_Relation"; export { $Executions_CreateExecutionRequest } from "./schemas/$Executions_CreateExecutionRequest"; export { $Executions_Execution } from "./schemas/$Executions_Execution"; @@ -186,10 +205,12 @@ export { $Executions_ResumeExecutionRequest } from "./schemas/$Executions_Resume export { $Executions_StopExecutionRequest } from "./schemas/$Executions_StopExecutionRequest"; export { $Executions_TaskTokenResumeExecutionRequest } from "./schemas/$Executions_TaskTokenResumeExecutionRequest"; export { $Executions_Transition } from "./schemas/$Executions_Transition"; +export { $Executions_TransitionTarget } from "./schemas/$Executions_TransitionTarget"; export { $Executions_UpdateExecutionRequest } from "./schemas/$Executions_UpdateExecutionRequest"; export { $Jobs_JobState } from "./schemas/$Jobs_JobState"; export { $Jobs_JobStatus } from "./schemas/$Jobs_JobStatus"; export { $Sessions_ContextOverflowType } from "./schemas/$Sessions_ContextOverflowType"; +export { $Sessions_CreateOrUpdateSessionRequest } from "./schemas/$Sessions_CreateOrUpdateSessionRequest"; export { $Sessions_CreateOrUpdateSessionRequest_id } from "./schemas/$Sessions_CreateOrUpdateSessionRequest_id"; export { $Sessions_CreateSessionRequest } from "./schemas/$Sessions_CreateSessionRequest"; export { $Sessions_MultiAgentMultiUserSession } from "./schemas/$Sessions_MultiAgentMultiUserSession"; @@ -201,25 +222,39 @@ export { $Sessions_SingleAgentMultiUserSession } from "./schemas/$Sessions_Singl export { $Sessions_SingleAgentNoUserSession } from "./schemas/$Sessions_SingleAgentNoUserSession"; export { $Sessions_SingleAgentSingleUserSession } from "./schemas/$Sessions_SingleAgentSingleUserSession"; export { $Sessions_UpdateSessionRequest } from "./schemas/$Sessions_UpdateSessionRequest"; -export { $Tasks_BaseWorkflowStep } from "./schemas/$Tasks_BaseWorkflowStep"; +export { $Tasks_CaseThen } from "./schemas/$Tasks_CaseThen"; export { $Tasks_CreateOrUpdateTaskRequest_id } from "./schemas/$Tasks_CreateOrUpdateTaskRequest_id"; export { $Tasks_CreateTaskRequest } from "./schemas/$Tasks_CreateTaskRequest"; +export { $Tasks_EmbedStep } from "./schemas/$Tasks_EmbedStep"; export { $Tasks_ErrorWorkflowStep } from "./schemas/$Tasks_ErrorWorkflowStep"; export { $Tasks_EvaluateStep } from "./schemas/$Tasks_EvaluateStep"; +export { $Tasks_ForeachDo } from "./schemas/$Tasks_ForeachDo"; +export { $Tasks_ForeachStep } from "./schemas/$Tasks_ForeachStep"; +export { $Tasks_GetStep } from "./schemas/$Tasks_GetStep"; export { $Tasks_IfElseWorkflowStep } from "./schemas/$Tasks_IfElseWorkflowStep"; +export { $Tasks_LogStep } from "./schemas/$Tasks_LogStep"; +export { $Tasks_ParallelStep } from "./schemas/$Tasks_ParallelStep"; export { $Tasks_PatchTaskRequest } from "./schemas/$Tasks_PatchTaskRequest"; export { $Tasks_PromptStep } from "./schemas/$Tasks_PromptStep"; +export { $Tasks_ReturnStep } from "./schemas/$Tasks_ReturnStep"; +export { $Tasks_SearchStep } from "./schemas/$Tasks_SearchStep"; +export { $Tasks_SetKey } from "./schemas/$Tasks_SetKey"; +export { $Tasks_SetStep } from "./schemas/$Tasks_SetStep"; +export { $Tasks_SleepFor } from "./schemas/$Tasks_SleepFor"; +export { $Tasks_SleepStep } from "./schemas/$Tasks_SleepStep"; +export { $Tasks_SwitchStep } from "./schemas/$Tasks_SwitchStep"; export { $Tasks_Task } from "./schemas/$Tasks_Task"; export { $Tasks_TaskTool } from "./schemas/$Tasks_TaskTool"; export { $Tasks_ToolCallStep } from "./schemas/$Tasks_ToolCallStep"; export { $Tasks_UpdateTaskRequest } from "./schemas/$Tasks_UpdateTaskRequest"; +export { $Tasks_WaitForInputInfo } from "./schemas/$Tasks_WaitForInputInfo"; export { $Tasks_WaitForInputStep } from "./schemas/$Tasks_WaitForInputStep"; export { $Tasks_YieldStep } from "./schemas/$Tasks_YieldStep"; export { $Tools_ChosenFunctionCall } from "./schemas/$Tools_ChosenFunctionCall"; export { $Tools_ChosenToolCall } from "./schemas/$Tools_ChosenToolCall"; +export { $Tools_CreateToolRequest } from "./schemas/$Tools_CreateToolRequest"; export { $Tools_FunctionCallOption } from "./schemas/$Tools_FunctionCallOption"; export { $Tools_FunctionDef } from "./schemas/$Tools_FunctionDef"; -export { $Tools_FunctionDefUpdate } from "./schemas/$Tools_FunctionDefUpdate"; export { $Tools_FunctionTool } from "./schemas/$Tools_FunctionTool"; export { $Tools_NamedFunctionChoice } from "./schemas/$Tools_NamedFunctionChoice"; export { $Tools_NamedToolChoice } from "./schemas/$Tools_NamedToolChoice"; @@ -228,7 +263,8 @@ export { $Tools_Tool } from "./schemas/$Tools_Tool"; export { $Tools_ToolResponse } from "./schemas/$Tools_ToolResponse"; export { $Tools_ToolType } from "./schemas/$Tools_ToolType"; export { $Tools_UpdateToolRequest } from "./schemas/$Tools_UpdateToolRequest"; -export { $Users_CreateOrUpdateUserRequest_id } from "./schemas/$Users_CreateOrUpdateUserRequest_id"; +export { $Users_CreateOrUpdateUserRequest } from "./schemas/$Users_CreateOrUpdateUserRequest"; +export { $Users_CreateOrUpdateUserRequest } from "./schemas/$Users_CreateOrUpdateUserRequest"; export { $Users_CreateUserRequest } from "./schemas/$Users_CreateUserRequest"; export { $Users_PatchUserRequest } from "./schemas/$Users_PatchUserRequest"; export { $Users_UpdateUserRequest } from "./schemas/$Users_UpdateUserRequest"; diff --git a/sdks/ts/src/api/models/Agents_Agent.ts b/sdks/ts/src/api/models/Agents_Agent.ts index c92c84c33..bab63a6e2 100644 --- a/sdks/ts/src/api/models/Agents_Agent.ts +++ b/sdks/ts/src/api/models/Agents_Agent.ts @@ -2,9 +2,7 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Chat_GenerationPresetSettings } from "./Chat_GenerationPresetSettings"; -import type { Chat_OpenAISettings } from "./Chat_OpenAISettings"; -import type { Chat_vLLMSettings } from "./Chat_vLLMSettings"; +import type { Chat_DefaultChatSettings } from "./Chat_DefaultChatSettings"; import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; import type { Common_uuid } from "./Common_uuid"; export type Agents_Agent = { @@ -37,8 +35,5 @@ export type Agents_Agent = { /** * Default settings for all sessions created by this agent */ - default_settings?: - | Chat_GenerationPresetSettings - | Chat_OpenAISettings - | Chat_vLLMSettings; + default_settings?: Chat_DefaultChatSettings; }; diff --git a/sdks/ts/src/api/models/Agents_CreateAgentRequest.ts b/sdks/ts/src/api/models/Agents_CreateAgentRequest.ts index c9c95127b..73c576816 100644 --- a/sdks/ts/src/api/models/Agents_CreateAgentRequest.ts +++ b/sdks/ts/src/api/models/Agents_CreateAgentRequest.ts @@ -2,9 +2,7 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Chat_GenerationPresetSettings } from "./Chat_GenerationPresetSettings"; -import type { Chat_OpenAISettings } from "./Chat_OpenAISettings"; -import type { Chat_vLLMSettings } from "./Chat_vLLMSettings"; +import type { Chat_DefaultChatSettings } from "./Chat_DefaultChatSettings"; import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; /** * Payload for creating a agent (and associated documents) @@ -30,8 +28,5 @@ export type Agents_CreateAgentRequest = { /** * Default settings for all sessions created by this agent */ - default_settings?: - | Chat_GenerationPresetSettings - | Chat_OpenAISettings - | Chat_vLLMSettings; + default_settings?: Chat_DefaultChatSettings; }; diff --git a/sdks/ts/src/api/models/Agents_CreateOrUpdateAgentRequest.ts b/sdks/ts/src/api/models/Agents_CreateOrUpdateAgentRequest.ts new file mode 100644 index 000000000..19892b153 --- /dev/null +++ b/sdks/ts/src/api/models/Agents_CreateOrUpdateAgentRequest.ts @@ -0,0 +1,32 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Agents_CreateAgentRequest } from "./Agents_CreateAgentRequest"; +import type { Chat_DefaultChatSettings } from "./Chat_DefaultChatSettings"; +import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; +import type { Common_uuid } from "./Common_uuid"; +export type Agents_CreateOrUpdateAgentRequest = Agents_CreateAgentRequest & { + id: Common_uuid; + metadata?: Record; + /** + * Name of the agent + */ + name: Common_identifierSafeUnicode; + /** + * About the agent + */ + about: string; + /** + * Model name to use (gpt-4-turbo, gemini-nano etc) + */ + model: string; + /** + * Instructions for the agent + */ + instructions: string | Array; + /** + * Default settings for all sessions created by this agent + */ + default_settings?: Chat_DefaultChatSettings; +}; diff --git a/sdks/ts/src/api/models/Agents_PatchAgentRequest.ts b/sdks/ts/src/api/models/Agents_PatchAgentRequest.ts index 52f60bb04..e4334103d 100644 --- a/sdks/ts/src/api/models/Agents_PatchAgentRequest.ts +++ b/sdks/ts/src/api/models/Agents_PatchAgentRequest.ts @@ -2,9 +2,7 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Chat_GenerationPresetSettings } from "./Chat_GenerationPresetSettings"; -import type { Chat_OpenAISettings } from "./Chat_OpenAISettings"; -import type { Chat_vLLMSettings } from "./Chat_vLLMSettings"; +import type { Chat_DefaultChatSettings } from "./Chat_DefaultChatSettings"; import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; /** * Payload for patching a agent @@ -30,8 +28,5 @@ export type Agents_PatchAgentRequest = { /** * Default settings for all sessions created by this agent */ - default_settings?: - | Chat_GenerationPresetSettings - | Chat_OpenAISettings - | Chat_vLLMSettings; + default_settings?: Chat_DefaultChatSettings; }; diff --git a/sdks/ts/src/api/models/Agents_UpdateAgentRequest.ts b/sdks/ts/src/api/models/Agents_UpdateAgentRequest.ts index 1b5aa4c23..ead87abfe 100644 --- a/sdks/ts/src/api/models/Agents_UpdateAgentRequest.ts +++ b/sdks/ts/src/api/models/Agents_UpdateAgentRequest.ts @@ -2,9 +2,7 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Chat_GenerationPresetSettings } from "./Chat_GenerationPresetSettings"; -import type { Chat_OpenAISettings } from "./Chat_OpenAISettings"; -import type { Chat_vLLMSettings } from "./Chat_vLLMSettings"; +import type { Chat_DefaultChatSettings } from "./Chat_DefaultChatSettings"; import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; /** * Payload for updating a agent @@ -30,8 +28,5 @@ export type Agents_UpdateAgentRequest = { /** * Default settings for all sessions created by this agent */ - default_settings?: - | Chat_GenerationPresetSettings - | Chat_OpenAISettings - | Chat_vLLMSettings; + default_settings?: Chat_DefaultChatSettings; }; diff --git a/sdks/ts/src/api/models/Chat_BaseChatOutput.ts b/sdks/ts/src/api/models/Chat_BaseChatOutput.ts index 6770dd5d5..91c56285f 100644 --- a/sdks/ts/src/api/models/Chat_BaseChatOutput.ts +++ b/sdks/ts/src/api/models/Chat_BaseChatOutput.ts @@ -13,5 +13,5 @@ export type Chat_BaseChatOutput = { /** * The log probabilities of tokens */ - logprobs: Chat_LogProbResponse | null; + logprobs?: Chat_LogProbResponse; }; diff --git a/sdks/ts/src/api/models/Chat_BaseChatResponse.ts b/sdks/ts/src/api/models/Chat_BaseChatResponse.ts index 781dd3838..078b8873f 100644 --- a/sdks/ts/src/api/models/Chat_BaseChatResponse.ts +++ b/sdks/ts/src/api/models/Chat_BaseChatResponse.ts @@ -9,7 +9,7 @@ export type Chat_BaseChatResponse = { /** * Usage statistics for the completion request */ - usage: Chat_CompetionUsage | null; + usage?: Chat_CompetionUsage; /** * Background job IDs that may have been spawned from this interaction. */ diff --git a/sdks/ts/src/api/models/Chat_BaseTokenLogProb.ts b/sdks/ts/src/api/models/Chat_BaseTokenLogProb.ts index 41b715757..dd9711b4d 100644 --- a/sdks/ts/src/api/models/Chat_BaseTokenLogProb.ts +++ b/sdks/ts/src/api/models/Chat_BaseTokenLogProb.ts @@ -8,5 +8,5 @@ export type Chat_BaseTokenLogProb = { * The log probability of the token */ logprob: number; - bytes: Array | null; + bytes?: Array; }; diff --git a/sdks/ts/src/api/models/Chat_ChatInput.ts b/sdks/ts/src/api/models/Chat_ChatInput.ts new file mode 100644 index 000000000..e03fa2914 --- /dev/null +++ b/sdks/ts/src/api/models/Chat_ChatInput.ts @@ -0,0 +1,83 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Chat_ChatInputData } from "./Chat_ChatInputData"; +import type { Chat_CompletionResponseFormat } from "./Chat_CompletionResponseFormat"; +import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; +import type { Common_logit_bias } from "./Common_logit_bias"; +import type { Common_uuid } from "./Common_uuid"; +export type Chat_ChatInput = Chat_ChatInputData & { + /** + * DISABLED: Whether this interaction should form new memories or not (will be enabled in a future release) + */ + readonly remember: boolean; + /** + * Whether previous memories and docs should be recalled or not + */ + recall: boolean; + /** + * Whether this interaction should be stored in the session history or not + */ + save: boolean; + /** + * Identifier of the model to be used + */ + model?: Common_identifierSafeUnicode; + /** + * Indicates if the server should stream the response as it's generated + */ + stream: boolean; + /** + * Up to 4 sequences where the API will stop generating further tokens. + */ + stop?: Array; + /** + * If specified, the system will make a best effort to sample deterministically for that particular seed value + */ + seed?: number; + /** + * The maximum number of tokens to generate in the chat completion + */ + max_tokens?: number; + /** + * Modify the likelihood of specified tokens appearing in the completion + */ + logit_bias?: Record; + /** + * Response format (set to `json_object` to restrict output to JSON) + */ + response_format?: Chat_CompletionResponseFormat; + /** + * Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) + */ + agent?: Common_uuid; + /** + * Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + */ + repetition_penalty?: number; + /** + * Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. + */ + length_penalty?: number; + /** + * Minimum probability compared to leading token to be considered + */ + min_p?: number; + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + */ + frequency_penalty?: number; + /** + * Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + */ + presence_penalty?: number; + /** + * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. + */ + temperature?: number; + /** + * Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. + */ + top_p?: number; +}; diff --git a/sdks/ts/src/api/models/Chat_ChatInputData.ts b/sdks/ts/src/api/models/Chat_ChatInputData.ts new file mode 100644 index 000000000..ca502e70b --- /dev/null +++ b/sdks/ts/src/api/models/Chat_ChatInputData.ts @@ -0,0 +1,38 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Entries_ChatMLRole } from "./Entries_ChatMLRole"; +import type { Tools_FunctionTool } from "./Tools_FunctionTool"; +import type { Tools_NamedToolChoice } from "./Tools_NamedToolChoice"; +export type Chat_ChatInputData = { + /** + * A list of new input messages comprising the conversation so far. + */ + messages: Array<{ + /** + * The role of the message + */ + role: Entries_ChatMLRole; + /** + * The content parts of the message + */ + content: string | Array; + /** + * Name + */ + name?: string; + /** + * Whether to continue this message or return a new one + */ + continue?: boolean; + }>; + /** + * (Advanced) List of tools that are provided in addition to agent's default set of tools. + */ + tools?: Array; + /** + * Can be one of existing tools given to the agent earlier or the ones provided in this request. + */ + tool_choice?: "auto" | "none" | Tools_NamedToolChoice; +}; diff --git a/sdks/ts/src/api/models/Chat_ChatOutputChunk.ts b/sdks/ts/src/api/models/Chat_ChatOutputChunk.ts index 9d52a7fdc..af379458a 100644 --- a/sdks/ts/src/api/models/Chat_ChatOutputChunk.ts +++ b/sdks/ts/src/api/models/Chat_ChatOutputChunk.ts @@ -3,7 +3,7 @@ /* tslint:disable */ /* eslint-disable */ import type { Chat_BaseChatOutput } from "./Chat_BaseChatOutput"; -import type { Entries_ChatMLMessage } from "./Entries_ChatMLMessage"; +import type { Entries_ChatMLRole } from "./Entries_ChatMLRole"; /** * Streaming chat completion output */ @@ -11,5 +11,22 @@ export type Chat_ChatOutputChunk = Chat_BaseChatOutput & { /** * The message generated by the model */ - delta: Entries_ChatMLMessage; + delta: { + /** + * The role of the message + */ + role: Entries_ChatMLRole; + /** + * The content parts of the message + */ + content: string | Array; + /** + * Name + */ + name?: string; + /** + * Whether to continue this message or return a new one + */ + continue?: boolean; + }; }; diff --git a/sdks/ts/src/api/models/Chat_ChatSettings.ts b/sdks/ts/src/api/models/Chat_ChatSettings.ts new file mode 100644 index 000000000..0dd2f61cc --- /dev/null +++ b/sdks/ts/src/api/models/Chat_ChatSettings.ts @@ -0,0 +1,43 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Chat_CompletionResponseFormat } from "./Chat_CompletionResponseFormat"; +import type { Chat_DefaultChatSettings } from "./Chat_DefaultChatSettings"; +import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; +import type { Common_logit_bias } from "./Common_logit_bias"; +import type { Common_uuid } from "./Common_uuid"; +export type Chat_ChatSettings = Chat_DefaultChatSettings & { + /** + * Identifier of the model to be used + */ + model?: Common_identifierSafeUnicode; + /** + * Indicates if the server should stream the response as it's generated + */ + stream: boolean; + /** + * Up to 4 sequences where the API will stop generating further tokens. + */ + stop?: Array; + /** + * If specified, the system will make a best effort to sample deterministically for that particular seed value + */ + seed?: number; + /** + * The maximum number of tokens to generate in the chat completion + */ + max_tokens?: number; + /** + * Modify the likelihood of specified tokens appearing in the completion + */ + logit_bias?: Record; + /** + * Response format (set to `json_object` to restrict output to JSON) + */ + response_format?: Chat_CompletionResponseFormat; + /** + * Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) + */ + agent?: Common_uuid; +}; diff --git a/sdks/ts/src/api/models/Chat_CompetionUsage.ts b/sdks/ts/src/api/models/Chat_CompetionUsage.ts index 089b474f1..8422d5f36 100644 --- a/sdks/ts/src/api/models/Chat_CompetionUsage.ts +++ b/sdks/ts/src/api/models/Chat_CompetionUsage.ts @@ -9,13 +9,13 @@ export type Chat_CompetionUsage = { /** * Number of tokens in the generated completion */ - readonly completion_tokens: number; + readonly completion_tokens?: number; /** * Number of tokens in the prompt */ - readonly prompt_tokens: number; + readonly prompt_tokens?: number; /** * Total number of tokens used in the request (prompt + completion) */ - readonly total_tokens: number; + readonly total_tokens?: number; }; diff --git a/sdks/ts/src/api/models/Chat_vLLMSettings.ts b/sdks/ts/src/api/models/Chat_DefaultChatSettings.ts similarity index 51% rename from sdks/ts/src/api/models/Chat_vLLMSettings.ts rename to sdks/ts/src/api/models/Chat_DefaultChatSettings.ts index 8973da946..5bfeb6bc5 100644 --- a/sdks/ts/src/api/models/Chat_vLLMSettings.ts +++ b/sdks/ts/src/api/models/Chat_DefaultChatSettings.ts @@ -2,7 +2,11 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export type Chat_vLLMSettings = { +import type { Chat_OpenAISettings } from "./Chat_OpenAISettings"; +/** + * Default settings for the chat session (also used by the agent) + */ +export type Chat_DefaultChatSettings = Chat_OpenAISettings & { /** * Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. */ @@ -11,14 +15,6 @@ export type Chat_vLLMSettings = { * Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. */ length_penalty?: number; - /** - * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - */ - temperature?: number; - /** - * Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - */ - top_p?: number; /** * Minimum probability compared to leading token to be considered */ diff --git a/sdks/ts/src/api/models/Chat_GenerationPreset.ts b/sdks/ts/src/api/models/Chat_GenerationPreset.ts deleted file mode 100644 index 1083d3294..000000000 --- a/sdks/ts/src/api/models/Chat_GenerationPreset.ts +++ /dev/null @@ -1,17 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -/** - * Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) - */ -export type Chat_GenerationPreset = - | "problem_solving" - | "conversational" - | "fun" - | "prose" - | "creative" - | "business" - | "deterministic" - | "code" - | "multilingual"; diff --git a/sdks/ts/src/api/models/Chat_GenerationPresetSettings.ts b/sdks/ts/src/api/models/Chat_GenerationPresetSettings.ts deleted file mode 100644 index 230124c0a..000000000 --- a/sdks/ts/src/api/models/Chat_GenerationPresetSettings.ts +++ /dev/null @@ -1,11 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { Chat_GenerationPreset } from "./Chat_GenerationPreset"; -export type Chat_GenerationPresetSettings = { - /** - * Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) - */ - preset?: Chat_GenerationPreset; -}; diff --git a/sdks/ts/src/api/models/Chat_MessageChatResponse.ts b/sdks/ts/src/api/models/Chat_MessageChatResponse.ts index 32efea493..4ed0e111f 100644 --- a/sdks/ts/src/api/models/Chat_MessageChatResponse.ts +++ b/sdks/ts/src/api/models/Chat_MessageChatResponse.ts @@ -3,10 +3,11 @@ /* tslint:disable */ /* eslint-disable */ import type { Chat_BaseChatResponse } from "./Chat_BaseChatResponse"; -import type { Chat_ChatOutputChunk } from "./Chat_ChatOutputChunk"; +import type { Chat_MultipleChatOutput } from "./Chat_MultipleChatOutput"; +import type { Chat_SingleChatOutput } from "./Chat_SingleChatOutput"; export type Chat_MessageChatResponse = Chat_BaseChatResponse & { /** * The deltas generated by the model */ - choices: Array; + choices: Array; }; diff --git a/sdks/ts/src/api/models/Chat_MultipleChatOutput.ts b/sdks/ts/src/api/models/Chat_MultipleChatOutput.ts index 759edb0f5..89c725dfa 100644 --- a/sdks/ts/src/api/models/Chat_MultipleChatOutput.ts +++ b/sdks/ts/src/api/models/Chat_MultipleChatOutput.ts @@ -3,10 +3,27 @@ /* tslint:disable */ /* eslint-disable */ import type { Chat_BaseChatOutput } from "./Chat_BaseChatOutput"; -import type { Entries_ChatMLMessage } from "./Entries_ChatMLMessage"; +import type { Entries_ChatMLRole } from "./Entries_ChatMLRole"; /** * The output returned by the model. Note that, depending on the model provider, they might return more than one message. */ export type Chat_MultipleChatOutput = Chat_BaseChatOutput & { - messages: Array; + messages: Array<{ + /** + * The role of the message + */ + role: Entries_ChatMLRole; + /** + * The content parts of the message + */ + content: string | Array; + /** + * Name + */ + name?: string; + /** + * Whether to continue this message or return a new one + */ + continue?: boolean; + }>; }; diff --git a/sdks/ts/src/api/models/Chat_SingleChatOutput.ts b/sdks/ts/src/api/models/Chat_SingleChatOutput.ts index e4571c234..b90b8c953 100644 --- a/sdks/ts/src/api/models/Chat_SingleChatOutput.ts +++ b/sdks/ts/src/api/models/Chat_SingleChatOutput.ts @@ -3,10 +3,27 @@ /* tslint:disable */ /* eslint-disable */ import type { Chat_BaseChatOutput } from "./Chat_BaseChatOutput"; -import type { Entries_ChatMLMessage } from "./Entries_ChatMLMessage"; +import type { Entries_ChatMLRole } from "./Entries_ChatMLRole"; /** * The output returned by the model. Note that, depending on the model provider, they might return more than one message. */ export type Chat_SingleChatOutput = Chat_BaseChatOutput & { - message: Entries_ChatMLMessage; + message: { + /** + * The role of the message + */ + role: Entries_ChatMLRole; + /** + * The content parts of the message + */ + content: string | Array; + /** + * Name + */ + name?: string; + /** + * Whether to continue this message or return a new one + */ + continue?: boolean; + }; }; diff --git a/sdks/ts/src/api/models/Entries_ChatMLTextContentPart.ts b/sdks/ts/src/api/models/Common_JinjaTemplate.ts similarity index 51% rename from sdks/ts/src/api/models/Entries_ChatMLTextContentPart.ts rename to sdks/ts/src/api/models/Common_JinjaTemplate.ts index 26e266d4e..7eb9f4a48 100644 --- a/sdks/ts/src/api/models/Entries_ChatMLTextContentPart.ts +++ b/sdks/ts/src/api/models/Common_JinjaTemplate.ts @@ -2,10 +2,7 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export type Entries_ChatMLTextContentPart = { - text: string; - /** - * The type (fixed to 'text') - */ - type: "text"; -}; +/** + * A valid jinja template. + */ +export type Common_JinjaTemplate = string; diff --git a/sdks/ts/src/api/models/Docs_BaseDocSearchRequest.ts b/sdks/ts/src/api/models/Docs_BaseDocSearchRequest.ts index d99eefaf9..b6dd20f99 100644 --- a/sdks/ts/src/api/models/Docs_BaseDocSearchRequest.ts +++ b/sdks/ts/src/api/models/Docs_BaseDocSearchRequest.ts @@ -3,18 +3,7 @@ /* tslint:disable */ /* eslint-disable */ export type Docs_BaseDocSearchRequest = { - /** - * The confidence cutoff level - */ - confidence: number; - /** - * The weight to apply to BM25 vs Vector search results. 0 => pure BM25; 1 => pure vector; - */ - alpha: number; - /** - * Whether to include the MMR algorithm in the search. Optimizes for diversity in search results. - */ - mmr: boolean; + limit: number; /** * The language to be used for text-only search. Support for other languages coming soon. */ diff --git a/sdks/ts/src/api/models/Docs_CreateDocRequest.ts b/sdks/ts/src/api/models/Docs_CreateDocRequest.ts new file mode 100644 index 000000000..a972e0f83 --- /dev/null +++ b/sdks/ts/src/api/models/Docs_CreateDocRequest.ts @@ -0,0 +1,18 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +/** + * Payload for creating a doc + */ +export type Docs_CreateDocRequest = { + metadata?: Record; + /** + * Title describing what this document contains + */ + title: string; + /** + * Contents of the document + */ + content: string | Array; +}; diff --git a/sdks/ts/src/api/models/Docs_Doc.ts b/sdks/ts/src/api/models/Docs_Doc.ts index 3a4c7681d..9735170c6 100644 --- a/sdks/ts/src/api/models/Docs_Doc.ts +++ b/sdks/ts/src/api/models/Docs_Doc.ts @@ -2,7 +2,6 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; import type { Common_uuid } from "./Common_uuid"; export type Docs_Doc = { readonly id: Common_uuid; @@ -14,7 +13,7 @@ export type Docs_Doc = { /** * Title describing what this document contains */ - title: Common_identifierSafeUnicode; + title: string; /** * Contents of the document */ diff --git a/sdks/ts/src/api/models/Docs_DocReference.ts b/sdks/ts/src/api/models/Docs_DocReference.ts index 9848f4079..20fba1b7e 100644 --- a/sdks/ts/src/api/models/Docs_DocReference.ts +++ b/sdks/ts/src/api/models/Docs_DocReference.ts @@ -4,6 +4,7 @@ /* eslint-disable */ import type { Common_uuid } from "./Common_uuid"; import type { Docs_DocOwner } from "./Docs_DocOwner"; +import type { Docs_Snippet } from "./Docs_Snippet"; export type Docs_DocReference = { /** * The owner of this document. @@ -13,10 +14,7 @@ export type Docs_DocReference = { * ID of the document */ readonly id: Common_uuid; - /** - * Snippets referred to of the document - */ - snippet_index: Array; title?: string; - snippet?: string; + snippets: Array; + distance: number | null; }; diff --git a/sdks/ts/src/api/models/Docs_DocSearchResponse.ts b/sdks/ts/src/api/models/Docs_DocSearchResponse.ts new file mode 100644 index 000000000..cfb8ad225 --- /dev/null +++ b/sdks/ts/src/api/models/Docs_DocSearchResponse.ts @@ -0,0 +1,15 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Docs_DocReference } from "./Docs_DocReference"; +export type Docs_DocSearchResponse = { + /** + * The documents that were found + */ + docs: Array; + /** + * The time taken to search in seconds + */ + time: number; +}; diff --git a/sdks/ts/src/api/models/Docs_HybridDocSearchRequest.ts b/sdks/ts/src/api/models/Docs_HybridDocSearchRequest.ts index 1049e733d..a1ba32811 100644 --- a/sdks/ts/src/api/models/Docs_HybridDocSearchRequest.ts +++ b/sdks/ts/src/api/models/Docs_HybridDocSearchRequest.ts @@ -5,11 +5,19 @@ import type { Docs_BaseDocSearchRequest } from "./Docs_BaseDocSearchRequest"; export type Docs_HybridDocSearchRequest = Docs_BaseDocSearchRequest & { /** - * Text or texts to use in the search. In `hybrid` search mode, either `text` or both `text` and `vector` fields are required. + * The confidence cutoff level */ - text: string | Array; + confidence: number; /** - * Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. + * The weight to apply to BM25 vs Vector search results. 0 => pure BM25; 1 => pure vector; */ - vector: Array | Array>; + alpha: number; + /** + * Text to use in the search. In `hybrid` search mode, either `text` or both `text` and `vector` fields are required. + */ + text: string; + /** + * Vector to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. + */ + vector: Array; }; diff --git a/sdks/ts/src/api/schemas/$Chat_GenerationPreset.ts b/sdks/ts/src/api/models/Docs_Snippet.ts similarity index 66% rename from sdks/ts/src/api/schemas/$Chat_GenerationPreset.ts rename to sdks/ts/src/api/models/Docs_Snippet.ts index f4a671436..f39583199 100644 --- a/sdks/ts/src/api/schemas/$Chat_GenerationPreset.ts +++ b/sdks/ts/src/api/models/Docs_Snippet.ts @@ -2,6 +2,7 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export const $Chat_GenerationPreset = { - type: "Enum", -} as const; +export type Docs_Snippet = { + index: number; + content: string; +}; diff --git a/sdks/ts/src/api/models/Docs_TextOnlyDocSearchRequest.ts b/sdks/ts/src/api/models/Docs_TextOnlyDocSearchRequest.ts index 9a15040c4..2d05e9f9b 100644 --- a/sdks/ts/src/api/models/Docs_TextOnlyDocSearchRequest.ts +++ b/sdks/ts/src/api/models/Docs_TextOnlyDocSearchRequest.ts @@ -5,7 +5,7 @@ import type { Docs_BaseDocSearchRequest } from "./Docs_BaseDocSearchRequest"; export type Docs_TextOnlyDocSearchRequest = Docs_BaseDocSearchRequest & { /** - * Text or texts to use in the search. + * Text to use in the search. */ - text: string | Array; + text: string; }; diff --git a/sdks/ts/src/api/models/Docs_VectorDocSearchRequest.ts b/sdks/ts/src/api/models/Docs_VectorDocSearchRequest.ts index 949e87cbd..7a720c46a 100644 --- a/sdks/ts/src/api/models/Docs_VectorDocSearchRequest.ts +++ b/sdks/ts/src/api/models/Docs_VectorDocSearchRequest.ts @@ -5,7 +5,11 @@ import type { Docs_BaseDocSearchRequest } from "./Docs_BaseDocSearchRequest"; export type Docs_VectorDocSearchRequest = Docs_BaseDocSearchRequest & { /** - * Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. + * The confidence cutoff level */ - vector: Array | Array>; + confidence: number; + /** + * Vector to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. + */ + vector: Array; }; diff --git a/sdks/ts/src/api/models/Entries_BaseEntry.ts b/sdks/ts/src/api/models/Entries_BaseEntry.ts index e77df13c3..d397d851e 100644 --- a/sdks/ts/src/api/models/Entries_BaseEntry.ts +++ b/sdks/ts/src/api/models/Entries_BaseEntry.ts @@ -17,8 +17,8 @@ export type Entries_BaseEntry = { | "internal" | "summarizer" | "meta"; - tokenizer?: string; - token_count?: number; + tokenizer: string; + token_count: number; /** * This is the time that this event refers to. */ diff --git a/sdks/ts/src/api/models/Entries_ChatMLImageContentPart.ts b/sdks/ts/src/api/models/Entries_ChatMLImageContentPart.ts deleted file mode 100644 index ab07c9df2..000000000 --- a/sdks/ts/src/api/models/Entries_ChatMLImageContentPart.ts +++ /dev/null @@ -1,15 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { Entries_ImageURL } from "./Entries_ImageURL"; -export type Entries_ChatMLImageContentPart = { - /** - * The image URL - */ - image_url: Entries_ImageURL; - /** - * The type (fixed to 'image_url') - */ - type: "image_url"; -}; diff --git a/sdks/ts/src/api/models/Entries_ChatMLMessage.ts b/sdks/ts/src/api/models/Entries_ChatMLMessage.ts deleted file mode 100644 index 019cfb7db..000000000 --- a/sdks/ts/src/api/models/Entries_ChatMLMessage.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { Common_uuid } from "./Common_uuid"; -import type { Entries_ChatMLRole } from "./Entries_ChatMLRole"; -import type { Tools_ChosenToolCall } from "./Tools_ChosenToolCall"; -export type Entries_ChatMLMessage = { - /** - * The role of the message - */ - role: Entries_ChatMLRole; - /** - * The content parts of the message - */ - content: string | Array; - /** - * Name - */ - name?: string; - /** - * Tool calls generated by the model. - */ - readonly tool_calls: Array; - /** - * When this resource was created as UTC date-time - */ - readonly created_at: string; - readonly id: Common_uuid; -}; diff --git a/sdks/ts/src/api/models/Entries_ChatMLRole.ts b/sdks/ts/src/api/models/Entries_ChatMLRole.ts index 0a4789cde..d0ad7e4da 100644 --- a/sdks/ts/src/api/models/Entries_ChatMLRole.ts +++ b/sdks/ts/src/api/models/Entries_ChatMLRole.ts @@ -7,7 +7,7 @@ */ export type Entries_ChatMLRole = | "user" - | "agent" + | "assistant" | "system" | "function" | "function_response" diff --git a/sdks/ts/src/api/models/Entries_History.ts b/sdks/ts/src/api/models/Entries_History.ts index 6550afff3..784972435 100644 --- a/sdks/ts/src/api/models/Entries_History.ts +++ b/sdks/ts/src/api/models/Entries_History.ts @@ -3,10 +3,10 @@ /* tslint:disable */ /* eslint-disable */ import type { Common_uuid } from "./Common_uuid"; -import type { Entries_BaseEntry } from "./Entries_BaseEntry"; +import type { Entries_Entry } from "./Entries_Entry"; import type { Entries_Relation } from "./Entries_Relation"; export type Entries_History = { - entries: Array; + entries: Array; relations: Array; readonly session_id: Common_uuid; /** diff --git a/sdks/ts/src/api/models/Entries_ImageURL.ts b/sdks/ts/src/api/models/Entries_ImageURL.ts deleted file mode 100644 index 0fe36804d..000000000 --- a/sdks/ts/src/api/models/Entries_ImageURL.ts +++ /dev/null @@ -1,15 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { Entries_ImageDetail } from "./Entries_ImageDetail"; -export type Entries_ImageURL = { - /** - * Image URL or base64 data url (e.g. `data:image/jpeg;base64,`) - */ - url: string; - /** - * The detail level of the image - */ - detail: Entries_ImageDetail; -}; diff --git a/sdks/ts/src/api/models/Entries_InputChatMLMessage.ts b/sdks/ts/src/api/models/Entries_InputChatMLMessage.ts deleted file mode 100644 index e735b8bd1..000000000 --- a/sdks/ts/src/api/models/Entries_InputChatMLMessage.ts +++ /dev/null @@ -1,23 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { Entries_ChatMLRole } from "./Entries_ChatMLRole"; -export type Entries_InputChatMLMessage = { - /** - * The role of the message - */ - role: Entries_ChatMLRole; - /** - * The content parts of the message - */ - content: string | Array; - /** - * Name - */ - name?: string; - /** - * Whether to continue this message or return a new one - */ - continue?: boolean; -}; diff --git a/sdks/ts/src/api/models/Executions_TaskTokenResumeExecutionRequest.ts b/sdks/ts/src/api/models/Executions_TaskTokenResumeExecutionRequest.ts index 2f988e385..d81fdfa9c 100644 --- a/sdks/ts/src/api/models/Executions_TaskTokenResumeExecutionRequest.ts +++ b/sdks/ts/src/api/models/Executions_TaskTokenResumeExecutionRequest.ts @@ -4,10 +4,6 @@ /* eslint-disable */ export type Executions_TaskTokenResumeExecutionRequest = { status: "running"; - /** - * A Task Token is a unique identifier for a specific Task Execution. - */ - task_token: string; /** * The input to resume the execution with */ diff --git a/sdks/ts/src/api/models/Executions_Transition.ts b/sdks/ts/src/api/models/Executions_Transition.ts index edc8756d9..7d1194a03 100644 --- a/sdks/ts/src/api/models/Executions_Transition.ts +++ b/sdks/ts/src/api/models/Executions_Transition.ts @@ -3,12 +3,13 @@ /* tslint:disable */ /* eslint-disable */ import type { Common_uuid } from "./Common_uuid"; +import type { Executions_TransitionTarget } from "./Executions_TransitionTarget"; export type Executions_Transition = { readonly type: "finish" | "wait" | "resume" | "error" | "step" | "cancelled"; readonly execution_id: Common_uuid; - readonly output: Record; - readonly current: Array; - readonly next: Array | null; + readonly output: any; + readonly current: Executions_TransitionTarget; + readonly next: Executions_TransitionTarget | null; readonly id: Common_uuid; metadata?: Record; /** diff --git a/sdks/ts/src/api/models/Executions_TransitionTarget.ts b/sdks/ts/src/api/models/Executions_TransitionTarget.ts new file mode 100644 index 000000000..fa511c40d --- /dev/null +++ b/sdks/ts/src/api/models/Executions_TransitionTarget.ts @@ -0,0 +1,9 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; +export type Executions_TransitionTarget = { + workflow: Common_identifierSafeUnicode; + step: number; +}; diff --git a/sdks/ts/src/api/models/Sessions_CreateOrUpdateSessionRequest.ts b/sdks/ts/src/api/models/Sessions_CreateOrUpdateSessionRequest.ts new file mode 100644 index 000000000..44ebd67e4 --- /dev/null +++ b/sdks/ts/src/api/models/Sessions_CreateOrUpdateSessionRequest.ts @@ -0,0 +1,38 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_uuid } from "./Common_uuid"; +import type { Sessions_ContextOverflowType } from "./Sessions_ContextOverflowType"; +import type { Sessions_CreateSessionRequest } from "./Sessions_CreateSessionRequest"; +export type Sessions_CreateOrUpdateSessionRequest = + Sessions_CreateSessionRequest & { + id: Common_uuid; + /** + * User ID of user associated with this session + */ + user?: Common_uuid; + users?: Array; + /** + * Agent ID of agent associated with this session + */ + agent?: Common_uuid; + agents?: Array; + /** + * A specific situation that sets the background for this session + */ + situation: string; + /** + * Render system and assistant message content as jinja templates + */ + render_templates: boolean; + /** + * Threshold value for the adaptive context functionality + */ + token_budget: number | null; + /** + * Action to start on context window overflow + */ + context_overflow: Sessions_ContextOverflowType | null; + metadata?: Record; + }; diff --git a/sdks/ts/src/api/models/Tasks_CaseThen.ts b/sdks/ts/src/api/models/Tasks_CaseThen.ts new file mode 100644 index 000000000..a2e96c0d9 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_CaseThen.ts @@ -0,0 +1,41 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; +import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; +import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; +import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; +import type { Tasks_YieldStep } from "./Tasks_YieldStep"; +export type Tasks_CaseThen = { + /** + * The condition to evaluate + */ + case: Common_PyExpression | "_"; + /** + * The steps to run if the condition is true + */ + then: + | Tasks_EvaluateStep + | Tasks_ToolCallStep + | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_ReturnStep + | Tasks_SleepStep + | Tasks_ErrorWorkflowStep + | Tasks_YieldStep + | Tasks_WaitForInputStep; +}; diff --git a/sdks/ts/src/api/models/Tasks_CreateTaskRequest.ts b/sdks/ts/src/api/models/Tasks_CreateTaskRequest.ts index 369cae664..9590b4304 100644 --- a/sdks/ts/src/api/models/Tasks_CreateTaskRequest.ts +++ b/sdks/ts/src/api/models/Tasks_CreateTaskRequest.ts @@ -2,10 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_ForeachStep } from "./Tasks_ForeachStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; import type { Tasks_IfElseWorkflowStep } from "./Tasks_IfElseWorkflowStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_ParallelStep } from "./Tasks_ParallelStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_SwitchStep } from "./Tasks_SwitchStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; @@ -17,10 +28,51 @@ export type Tasks_CreateTaskRequest = Record< Array< | Tasks_EvaluateStep | Tasks_ToolCallStep - | Tasks_YieldStep | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_ReturnStep + | Tasks_SleepStep | Tasks_ErrorWorkflowStep + | Tasks_YieldStep | Tasks_WaitForInputStep | Tasks_IfElseWorkflowStep + | Tasks_SwitchStep + | Tasks_ForeachStep + | Tasks_ParallelStep + | ({ + /** + * The kind of step + */ + readonly kind_: "map_reduce"; + } & { + readonly kind_: "map_reduce"; + /** + * The variable to iterate over + */ + over: Common_PyExpression; + /** + * The steps to run for each iteration + */ + map: + | Tasks_EvaluateStep + | Tasks_ToolCallStep + | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep; + /** + * The expression to reduce the results. + * If not provided, the results are collected and returned as a list. + * A special parameter named `results` is the accumulator and `_` is the current value. + */ + reduce?: Common_PyExpression; + initial?: any; + }) > >; diff --git a/sdks/ts/src/api/models/Tasks_EmbedStep.ts b/sdks/ts/src/api/models/Tasks_EmbedStep.ts new file mode 100644 index 000000000..e9d321ed3 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_EmbedStep.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Docs_EmbedQueryRequest } from "./Docs_EmbedQueryRequest"; +export type Tasks_EmbedStep = { + /** + * The kind of step + */ + readonly kind_: "embed"; +} & { + readonly kind_: "embed"; + /** + * The text to embed + */ + embed: Docs_EmbedQueryRequest; +}; diff --git a/sdks/ts/src/api/models/Tasks_ErrorWorkflowStep.ts b/sdks/ts/src/api/models/Tasks_ErrorWorkflowStep.ts index 8e9b49cbc..1e9e74aaf 100644 --- a/sdks/ts/src/api/models/Tasks_ErrorWorkflowStep.ts +++ b/sdks/ts/src/api/models/Tasks_ErrorWorkflowStep.ts @@ -2,9 +2,13 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; -export type Tasks_ErrorWorkflowStep = Tasks_BaseWorkflowStep & { - kind_: "error"; +export type Tasks_ErrorWorkflowStep = { + /** + * The kind of step + */ + readonly kind_: "error"; +} & { + readonly kind_: "error"; /** * The error message */ diff --git a/sdks/ts/src/api/models/Tasks_EvaluateStep.ts b/sdks/ts/src/api/models/Tasks_EvaluateStep.ts index ccc16f71d..8d5962382 100644 --- a/sdks/ts/src/api/models/Tasks_EvaluateStep.ts +++ b/sdks/ts/src/api/models/Tasks_EvaluateStep.ts @@ -3,9 +3,13 @@ /* tslint:disable */ /* eslint-disable */ import type { Common_PyExpression } from "./Common_PyExpression"; -import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; -export type Tasks_EvaluateStep = Tasks_BaseWorkflowStep & { - kind_: "evaluate"; +export type Tasks_EvaluateStep = { + /** + * The kind of step + */ + readonly kind_: "evaluate"; +} & { + readonly kind_: "evaluate"; /** * The expression to evaluate */ diff --git a/sdks/ts/src/api/models/Tasks_ForeachDo.ts b/sdks/ts/src/api/models/Tasks_ForeachDo.ts new file mode 100644 index 000000000..db2861b06 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_ForeachDo.ts @@ -0,0 +1,32 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; +import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; +export type Tasks_ForeachDo = { + /** + * The variable to iterate over. + * VALIDATION: Should NOT return more than 1000 elements. + */ + in: Common_PyExpression; + /** + * The steps to run for each iteration + */ + do: + | Tasks_EvaluateStep + | Tasks_ToolCallStep + | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep; +}; diff --git a/sdks/ts/src/api/models/Tasks_ForeachStep.ts b/sdks/ts/src/api/models/Tasks_ForeachStep.ts new file mode 100644 index 000000000..ad3f53550 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_ForeachStep.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_ForeachDo } from "./Tasks_ForeachDo"; +export type Tasks_ForeachStep = { + /** + * The kind of step + */ + readonly kind_: "foreach"; +} & { + readonly kind_: "foreach"; + /** + * The steps to run for each iteration + */ + foreach: Tasks_ForeachDo; +}; diff --git a/sdks/ts/src/api/models/Tasks_BaseWorkflowStep.ts b/sdks/ts/src/api/models/Tasks_GetStep.ts similarity index 50% rename from sdks/ts/src/api/models/Tasks_BaseWorkflowStep.ts rename to sdks/ts/src/api/models/Tasks_GetStep.ts index c399bcaa8..b07732a92 100644 --- a/sdks/ts/src/api/models/Tasks_BaseWorkflowStep.ts +++ b/sdks/ts/src/api/models/Tasks_GetStep.ts @@ -2,16 +2,15 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export type Tasks_BaseWorkflowStep = { +export type Tasks_GetStep = { /** * The kind of step */ - kind_: - | "tool_call" - | "yield" - | "prompt" - | "evaluate" - | "if_else" - | "wait_for_input" - | "error"; + readonly kind_: "get"; +} & { + readonly kind_: "get"; + /** + * The key to get + */ + get: string; }; diff --git a/sdks/ts/src/api/models/Tasks_IfElseWorkflowStep.ts b/sdks/ts/src/api/models/Tasks_IfElseWorkflowStep.ts index d07ce4d07..f05a33f80 100644 --- a/sdks/ts/src/api/models/Tasks_IfElseWorkflowStep.ts +++ b/sdks/ts/src/api/models/Tasks_IfElseWorkflowStep.ts @@ -3,14 +3,26 @@ /* tslint:disable */ /* eslint-disable */ import type { Common_PyExpression } from "./Common_PyExpression"; -import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; +import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; -export type Tasks_IfElseWorkflowStep = Tasks_BaseWorkflowStep & { - kind_: "if_else"; +export type Tasks_IfElseWorkflowStep = { + /** + * The kind of step + */ + readonly kind_: "if_else"; +} & { + readonly kind_: "if_else"; /** * The condition to evaluate */ @@ -19,18 +31,34 @@ export type Tasks_IfElseWorkflowStep = Tasks_BaseWorkflowStep & { * The steps to run if the condition is true */ then: + | Tasks_EvaluateStep | Tasks_ToolCallStep - | Tasks_YieldStep | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_ReturnStep + | Tasks_SleepStep | Tasks_ErrorWorkflowStep + | Tasks_YieldStep | Tasks_WaitForInputStep; /** * The steps to run if the condition is false */ else: + | Tasks_EvaluateStep | Tasks_ToolCallStep - | Tasks_YieldStep | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_ReturnStep + | Tasks_SleepStep | Tasks_ErrorWorkflowStep + | Tasks_YieldStep | Tasks_WaitForInputStep; }; diff --git a/sdks/ts/src/api/models/Tasks_LogStep.ts b/sdks/ts/src/api/models/Tasks_LogStep.ts new file mode 100644 index 000000000..372bfccd3 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_LogStep.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +export type Tasks_LogStep = { + /** + * The kind of step + */ + readonly kind_: "log"; +} & { + readonly kind_: "log"; + /** + * The value to log + */ + log: Common_PyExpression; +}; diff --git a/sdks/ts/src/api/models/Tasks_ParallelStep.ts b/sdks/ts/src/api/models/Tasks_ParallelStep.ts new file mode 100644 index 000000000..9118d48dd --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_ParallelStep.ts @@ -0,0 +1,33 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; +import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; +export type Tasks_ParallelStep = { + /** + * The kind of step + */ + readonly kind_: "parallel"; +} & { + readonly kind_: "parallel"; + /** + * The steps to run in parallel. Max concurrency will depend on the platform. + */ + parallel: Array< + | Tasks_EvaluateStep + | Tasks_ToolCallStep + | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + >; +}; diff --git a/sdks/ts/src/api/models/Tasks_PatchTaskRequest.ts b/sdks/ts/src/api/models/Tasks_PatchTaskRequest.ts index c5493e70c..e511be553 100644 --- a/sdks/ts/src/api/models/Tasks_PatchTaskRequest.ts +++ b/sdks/ts/src/api/models/Tasks_PatchTaskRequest.ts @@ -2,10 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_ForeachStep } from "./Tasks_ForeachStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; import type { Tasks_IfElseWorkflowStep } from "./Tasks_IfElseWorkflowStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_ParallelStep } from "./Tasks_ParallelStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_SwitchStep } from "./Tasks_SwitchStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; @@ -17,10 +28,50 @@ export type Tasks_PatchTaskRequest = Record< Array< | Tasks_EvaluateStep | Tasks_ToolCallStep - | Tasks_YieldStep | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_ReturnStep + | Tasks_SleepStep | Tasks_ErrorWorkflowStep + | Tasks_YieldStep | Tasks_WaitForInputStep | Tasks_IfElseWorkflowStep + | Tasks_SwitchStep + | Tasks_ForeachStep + | Tasks_ParallelStep + | ({ + /** + * Discriminator property for BaseWorkflowStep. + */ + kind_?: string; + } & { + /** + * The variable to iterate over + */ + over: Common_PyExpression; + /** + * The steps to run for each iteration + */ + map: + | Tasks_EvaluateStep + | Tasks_ToolCallStep + | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep; + /** + * The expression to reduce the results. + * If not provided, the results are collected and returned as a list. + * A special parameter named `results` is the accumulator and `_` is the current value. + */ + reduce?: Common_PyExpression; + initial?: any; + }) > >; diff --git a/sdks/ts/src/api/models/Tasks_PromptStep.ts b/sdks/ts/src/api/models/Tasks_PromptStep.ts index 58cb82462..a4e200f02 100644 --- a/sdks/ts/src/api/models/Tasks_PromptStep.ts +++ b/sdks/ts/src/api/models/Tasks_PromptStep.ts @@ -2,163 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Chat_CompletionResponseFormat } from "./Chat_CompletionResponseFormat"; -import type { Chat_GenerationPreset } from "./Chat_GenerationPreset"; -import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; -import type { Common_logit_bias } from "./Common_logit_bias"; -import type { Common_uuid } from "./Common_uuid"; -import type { Entries_InputChatMLMessage } from "./Entries_InputChatMLMessage"; -import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; -export type Tasks_PromptStep = Tasks_BaseWorkflowStep & { - kind_: "prompt"; +import type { Chat_ChatSettings } from "./Chat_ChatSettings"; +import type { Common_JinjaTemplate } from "./Common_JinjaTemplate"; +export type Tasks_PromptStep = { + /** + * The kind of step + */ + readonly kind_: "prompt"; +} & { + readonly kind_: "prompt"; /** * The prompt to run */ - prompt: string | Array; + prompt: Common_JinjaTemplate; /** * Settings for the prompt */ - settings: - | { - /** - * Identifier of the model to be used - */ - model?: Common_identifierSafeUnicode; - /** - * Indicates if the server should stream the response as it's generated - */ - stream: boolean; - /** - * Up to 4 sequences where the API will stop generating further tokens. - */ - stop?: Array; - /** - * If specified, the system will make a best effort to sample deterministically for that particular seed value - */ - seed?: number; - /** - * The maximum number of tokens to generate in the chat completion - */ - max_tokens?: number; - /** - * Modify the likelihood of specified tokens appearing in the completion - */ - logit_bias?: Record; - /** - * Response format (set to `json_object` to restrict output to JSON) - */ - response_format?: Chat_CompletionResponseFormat; - /** - * Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - */ - agent?: Common_uuid; - /** - * Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) - */ - preset?: Chat_GenerationPreset; - } - | { - /** - * Identifier of the model to be used - */ - model?: Common_identifierSafeUnicode; - /** - * Indicates if the server should stream the response as it's generated - */ - stream: boolean; - /** - * Up to 4 sequences where the API will stop generating further tokens. - */ - stop?: Array; - /** - * If specified, the system will make a best effort to sample deterministically for that particular seed value - */ - seed?: number; - /** - * The maximum number of tokens to generate in the chat completion - */ - max_tokens?: number; - /** - * Modify the likelihood of specified tokens appearing in the completion - */ - logit_bias?: Record; - /** - * Response format (set to `json_object` to restrict output to JSON) - */ - response_format?: Chat_CompletionResponseFormat; - /** - * Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - */ - agent?: Common_uuid; - /** - * Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - */ - frequency_penalty?: number; - /** - * Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - */ - presence_penalty?: number; - /** - * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - */ - temperature?: number; - /** - * Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - */ - top_p?: number; - } - | { - /** - * Identifier of the model to be used - */ - model?: Common_identifierSafeUnicode; - /** - * Indicates if the server should stream the response as it's generated - */ - stream: boolean; - /** - * Up to 4 sequences where the API will stop generating further tokens. - */ - stop?: Array; - /** - * If specified, the system will make a best effort to sample deterministically for that particular seed value - */ - seed?: number; - /** - * The maximum number of tokens to generate in the chat completion - */ - max_tokens?: number; - /** - * Modify the likelihood of specified tokens appearing in the completion - */ - logit_bias?: Record; - /** - * Response format (set to `json_object` to restrict output to JSON) - */ - response_format?: Chat_CompletionResponseFormat; - /** - * Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - */ - agent?: Common_uuid; - /** - * Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - */ - repetition_penalty?: number; - /** - * Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. - */ - length_penalty?: number; - /** - * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - */ - temperature?: number; - /** - * Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - */ - top_p?: number; - /** - * Minimum probability compared to leading token to be considered - */ - min_p?: number; - }; + settings?: Chat_ChatSettings; }; diff --git a/sdks/ts/src/api/models/Tasks_ReturnStep.ts b/sdks/ts/src/api/models/Tasks_ReturnStep.ts new file mode 100644 index 000000000..7eda54161 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_ReturnStep.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +export type Tasks_ReturnStep = { + /** + * The kind of step + */ + readonly kind_: "return"; +} & { + readonly kind_: "return"; + /** + * The value to return + */ + return: Record; +}; diff --git a/sdks/ts/src/api/models/Tasks_SearchStep.ts b/sdks/ts/src/api/models/Tasks_SearchStep.ts new file mode 100644 index 000000000..3a2663fa9 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SearchStep.ts @@ -0,0 +1,22 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Docs_HybridDocSearchRequest } from "./Docs_HybridDocSearchRequest"; +import type { Docs_TextOnlyDocSearchRequest } from "./Docs_TextOnlyDocSearchRequest"; +import type { Docs_VectorDocSearchRequest } from "./Docs_VectorDocSearchRequest"; +export type Tasks_SearchStep = { + /** + * The kind of step + */ + readonly kind_: "search"; +} & { + readonly kind_: "search"; + /** + * The search query + */ + search: + | Docs_VectorDocSearchRequest + | Docs_TextOnlyDocSearchRequest + | Docs_HybridDocSearchRequest; +}; diff --git a/sdks/ts/src/api/models/Tasks_SetKey.ts b/sdks/ts/src/api/models/Tasks_SetKey.ts new file mode 100644 index 000000000..a7cef005d --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SetKey.ts @@ -0,0 +1,15 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +export type Tasks_SetKey = { + /** + * The key to set + */ + key: string; + /** + * The value to set + */ + value: Common_PyExpression; +}; diff --git a/sdks/ts/src/api/models/Tasks_SetStep.ts b/sdks/ts/src/api/models/Tasks_SetStep.ts new file mode 100644 index 000000000..2bccabeac --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SetStep.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_SetKey } from "./Tasks_SetKey"; +export type Tasks_SetStep = { + /** + * The kind of step + */ + readonly kind_: "set"; +} & { + readonly kind_: "set"; + /** + * The value to set + */ + set: Tasks_SetKey; +}; diff --git a/sdks/ts/src/api/models/Tasks_SleepFor.ts b/sdks/ts/src/api/models/Tasks_SleepFor.ts new file mode 100644 index 000000000..85b1ef55f --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SleepFor.ts @@ -0,0 +1,22 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type Tasks_SleepFor = { + /** + * The number of seconds to sleep for + */ + seconds: number; + /** + * The number of minutes to sleep for + */ + minutes: number; + /** + * The number of hours to sleep for + */ + hours: number; + /** + * The number of days to sleep for + */ + days: number; +}; diff --git a/sdks/ts/src/api/models/Tasks_SleepStep.ts b/sdks/ts/src/api/models/Tasks_SleepStep.ts new file mode 100644 index 000000000..e3b8c576b --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SleepStep.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_SleepFor } from "./Tasks_SleepFor"; +export type Tasks_SleepStep = { + /** + * The kind of step + */ + readonly kind_: "sleep"; +} & { + readonly kind_: "sleep"; + /** + * The duration to sleep for (max 31 days) + */ + sleep: Tasks_SleepFor; +}; diff --git a/sdks/ts/src/api/models/Tasks_SwitchStep.ts b/sdks/ts/src/api/models/Tasks_SwitchStep.ts new file mode 100644 index 000000000..a560cee13 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_SwitchStep.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Tasks_CaseThen } from "./Tasks_CaseThen"; +export type Tasks_SwitchStep = { + /** + * The kind of step + */ + readonly kind_: "switch"; +} & { + readonly kind_: "switch"; + /** + * The cond tree + */ + switch: Array; +}; diff --git a/sdks/ts/src/api/models/Tasks_Task.ts b/sdks/ts/src/api/models/Tasks_Task.ts index 9b273fc4a..e155187ae 100644 --- a/sdks/ts/src/api/models/Tasks_Task.ts +++ b/sdks/ts/src/api/models/Tasks_Task.ts @@ -2,10 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_ForeachStep } from "./Tasks_ForeachStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; import type { Tasks_IfElseWorkflowStep } from "./Tasks_IfElseWorkflowStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_ParallelStep } from "./Tasks_ParallelStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_SwitchStep } from "./Tasks_SwitchStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; @@ -17,10 +28,51 @@ export type Tasks_Task = Record< Array< | Tasks_EvaluateStep | Tasks_ToolCallStep - | Tasks_YieldStep | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_ReturnStep + | Tasks_SleepStep | Tasks_ErrorWorkflowStep + | Tasks_YieldStep | Tasks_WaitForInputStep | Tasks_IfElseWorkflowStep + | Tasks_SwitchStep + | Tasks_ForeachStep + | Tasks_ParallelStep + | ({ + /** + * The kind of step + */ + readonly kind_: "map_reduce"; + } & { + readonly kind_: "map_reduce"; + /** + * The variable to iterate over + */ + over: Common_PyExpression; + /** + * The steps to run for each iteration + */ + map: + | Tasks_EvaluateStep + | Tasks_ToolCallStep + | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep; + /** + * The expression to reduce the results. + * If not provided, the results are collected and returned as a list. + * A special parameter named `results` is the accumulator and `_` is the current value. + */ + reduce?: Common_PyExpression; + initial?: any; + }) > >; diff --git a/sdks/ts/src/api/models/Tasks_TaskTool.ts b/sdks/ts/src/api/models/Tasks_TaskTool.ts index 815d7f61b..8f53afa9e 100644 --- a/sdks/ts/src/api/models/Tasks_TaskTool.ts +++ b/sdks/ts/src/api/models/Tasks_TaskTool.ts @@ -2,24 +2,10 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Common_validPythonIdentifier } from "./Common_validPythonIdentifier"; -import type { Tools_FunctionDef } from "./Tools_FunctionDef"; -import type { Tools_ToolType } from "./Tools_ToolType"; -export type Tasks_TaskTool = { +import type { Tools_CreateToolRequest } from "./Tools_CreateToolRequest"; +export type Tasks_TaskTool = Tools_CreateToolRequest & { /** * Read-only: Whether the tool was inherited or not. Only applies within tasks. */ readonly inherited?: boolean; - /** - * Whether this tool is a `function`, `api_call`, `system` etc. (Only `function` tool supported right now) - */ - type: Tools_ToolType; - /** - * Name of the tool (must be unique for this agent and a valid python identifier string ) - */ - name: Common_validPythonIdentifier; - function?: Tools_FunctionDef; - integration?: any; - system?: any; - api_call?: any; }; diff --git a/sdks/ts/src/api/models/Tasks_ToolCallStep.ts b/sdks/ts/src/api/models/Tasks_ToolCallStep.ts index 84e34c703..9a3eadb51 100644 --- a/sdks/ts/src/api/models/Tasks_ToolCallStep.ts +++ b/sdks/ts/src/api/models/Tasks_ToolCallStep.ts @@ -2,16 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; import type { Common_toolRef } from "./Common_toolRef"; -import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; -export type Tasks_ToolCallStep = Tasks_BaseWorkflowStep & { - kind_: "tool_call"; +export type Tasks_ToolCallStep = { + /** + * The kind of step + */ + readonly kind_: "tool_call"; +} & { + readonly kind_: "tool_call"; /** * The tool to run */ tool: Common_toolRef; /** - * The input parameters for the tool + * The input parameters for the tool (defaults to last step output) */ - arguments: Record; + arguments: Record | "_"; }; diff --git a/sdks/ts/src/api/models/Tasks_UpdateTaskRequest.ts b/sdks/ts/src/api/models/Tasks_UpdateTaskRequest.ts index e823f29a4..8f3afc2c0 100644 --- a/sdks/ts/src/api/models/Tasks_UpdateTaskRequest.ts +++ b/sdks/ts/src/api/models/Tasks_UpdateTaskRequest.ts @@ -2,10 +2,21 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +import type { Tasks_EmbedStep } from "./Tasks_EmbedStep"; import type { Tasks_ErrorWorkflowStep } from "./Tasks_ErrorWorkflowStep"; import type { Tasks_EvaluateStep } from "./Tasks_EvaluateStep"; +import type { Tasks_ForeachStep } from "./Tasks_ForeachStep"; +import type { Tasks_GetStep } from "./Tasks_GetStep"; import type { Tasks_IfElseWorkflowStep } from "./Tasks_IfElseWorkflowStep"; +import type { Tasks_LogStep } from "./Tasks_LogStep"; +import type { Tasks_ParallelStep } from "./Tasks_ParallelStep"; import type { Tasks_PromptStep } from "./Tasks_PromptStep"; +import type { Tasks_ReturnStep } from "./Tasks_ReturnStep"; +import type { Tasks_SearchStep } from "./Tasks_SearchStep"; +import type { Tasks_SetStep } from "./Tasks_SetStep"; +import type { Tasks_SleepStep } from "./Tasks_SleepStep"; +import type { Tasks_SwitchStep } from "./Tasks_SwitchStep"; import type { Tasks_ToolCallStep } from "./Tasks_ToolCallStep"; import type { Tasks_WaitForInputStep } from "./Tasks_WaitForInputStep"; import type { Tasks_YieldStep } from "./Tasks_YieldStep"; @@ -17,10 +28,51 @@ export type Tasks_UpdateTaskRequest = Record< Array< | Tasks_EvaluateStep | Tasks_ToolCallStep - | Tasks_YieldStep | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep + | Tasks_ReturnStep + | Tasks_SleepStep | Tasks_ErrorWorkflowStep + | Tasks_YieldStep | Tasks_WaitForInputStep | Tasks_IfElseWorkflowStep + | Tasks_SwitchStep + | Tasks_ForeachStep + | Tasks_ParallelStep + | ({ + /** + * The kind of step + */ + readonly kind_: "map_reduce"; + } & { + readonly kind_: "map_reduce"; + /** + * The variable to iterate over + */ + over: Common_PyExpression; + /** + * The steps to run for each iteration + */ + map: + | Tasks_EvaluateStep + | Tasks_ToolCallStep + | Tasks_PromptStep + | Tasks_GetStep + | Tasks_SetStep + | Tasks_LogStep + | Tasks_EmbedStep + | Tasks_SearchStep; + /** + * The expression to reduce the results. + * If not provided, the results are collected and returned as a list. + * A special parameter named `results` is the accumulator and `_` is the current value. + */ + reduce?: Common_PyExpression; + initial?: any; + }) > >; diff --git a/sdks/ts/src/api/models/Tasks_WaitForInputInfo.ts b/sdks/ts/src/api/models/Tasks_WaitForInputInfo.ts new file mode 100644 index 000000000..adef0d462 --- /dev/null +++ b/sdks/ts/src/api/models/Tasks_WaitForInputInfo.ts @@ -0,0 +1,11 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_PyExpression } from "./Common_PyExpression"; +export type Tasks_WaitForInputInfo = { + /** + * Any additional info or data + */ + info: Record; +}; diff --git a/sdks/ts/src/api/models/Tasks_WaitForInputStep.ts b/sdks/ts/src/api/models/Tasks_WaitForInputStep.ts index d47196144..d7b537ea3 100644 --- a/sdks/ts/src/api/models/Tasks_WaitForInputStep.ts +++ b/sdks/ts/src/api/models/Tasks_WaitForInputStep.ts @@ -2,11 +2,16 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; -export type Tasks_WaitForInputStep = Tasks_BaseWorkflowStep & { - kind_: "wait_for_input"; +import type { Tasks_WaitForInputInfo } from "./Tasks_WaitForInputInfo"; +export type Tasks_WaitForInputStep = { + /** + * The kind of step + */ + readonly kind_: "wait_for_input"; +} & { + readonly kind_: "wait_for_input"; /** * Any additional info or data */ - info: string | Record; + wait_for_input: Tasks_WaitForInputInfo; }; diff --git a/sdks/ts/src/api/models/Tasks_YieldStep.ts b/sdks/ts/src/api/models/Tasks_YieldStep.ts index a1e298a41..3792539a2 100644 --- a/sdks/ts/src/api/models/Tasks_YieldStep.ts +++ b/sdks/ts/src/api/models/Tasks_YieldStep.ts @@ -3,15 +3,20 @@ /* tslint:disable */ /* eslint-disable */ import type { Common_PyExpression } from "./Common_PyExpression"; -import type { Tasks_BaseWorkflowStep } from "./Tasks_BaseWorkflowStep"; -export type Tasks_YieldStep = Tasks_BaseWorkflowStep & { - kind_: "yield"; +export type Tasks_YieldStep = { /** - * The subworkflow to run + * The kind of step + */ + readonly kind_: "yield"; +} & { + readonly kind_: "yield"; + /** + * The subworkflow to run. + * VALIDATION: Should resolve to a defined subworkflow. */ workflow: string; /** - * The input parameters for the subworkflow + * The input parameters for the subworkflow (defaults to last step output) */ - arguments: Record; + arguments: Record | "_"; }; diff --git a/sdks/ts/src/api/models/Tools_CreateToolRequest.ts b/sdks/ts/src/api/models/Tools_CreateToolRequest.ts new file mode 100644 index 000000000..51024282c --- /dev/null +++ b/sdks/ts/src/api/models/Tools_CreateToolRequest.ts @@ -0,0 +1,24 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Common_validPythonIdentifier } from "./Common_validPythonIdentifier"; +import type { Tools_FunctionDef } from "./Tools_FunctionDef"; +import type { Tools_ToolType } from "./Tools_ToolType"; +/** + * Payload for creating a tool + */ +export type Tools_CreateToolRequest = { + /** + * Whether this tool is a `function`, `api_call`, `system` etc. (Only `function` tool supported right now) + */ + type: Tools_ToolType; + /** + * Name of the tool (must be unique for this agent and a valid python identifier string ) + */ + name: Common_validPythonIdentifier; + function?: Tools_FunctionDef; + integration?: any; + system?: any; + api_call?: any; +}; diff --git a/sdks/ts/src/api/models/Tools_FunctionDef.ts b/sdks/ts/src/api/models/Tools_FunctionDef.ts index 777aa3155..8e57315a3 100644 --- a/sdks/ts/src/api/models/Tools_FunctionDef.ts +++ b/sdks/ts/src/api/models/Tools_FunctionDef.ts @@ -3,7 +3,6 @@ /* tslint:disable */ /* eslint-disable */ import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; -import type { Common_validPythonIdentifier } from "./Common_validPythonIdentifier"; /** * Function definition */ @@ -11,7 +10,7 @@ export type Tools_FunctionDef = { /** * DO NOT USE: This will be overriden by the tool name. Here only for compatibility reasons. */ - name?: Common_validPythonIdentifier; + name?: any; /** * Description of the function */ @@ -19,5 +18,5 @@ export type Tools_FunctionDef = { /** * The parameters the function accepts */ - parameters: Record; + parameters?: Record; }; diff --git a/sdks/ts/src/api/models/Tools_FunctionDefUpdate.ts b/sdks/ts/src/api/models/Tools_FunctionDefUpdate.ts deleted file mode 100644 index 7f4417abd..000000000 --- a/sdks/ts/src/api/models/Tools_FunctionDefUpdate.ts +++ /dev/null @@ -1,23 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -import type { Common_identifierSafeUnicode } from "./Common_identifierSafeUnicode"; -import type { Common_validPythonIdentifier } from "./Common_validPythonIdentifier"; -/** - * Function definition - */ -export type Tools_FunctionDefUpdate = { - /** - * DO NOT USE: This will be overriden by the tool name. Here only for compatibility reasons. - */ - name?: Common_validPythonIdentifier; - /** - * Description of the function - */ - description?: Common_identifierSafeUnicode; - /** - * The parameters the function accepts - */ - parameters?: Record; -}; diff --git a/sdks/ts/src/api/models/Tools_PatchToolRequest.ts b/sdks/ts/src/api/models/Tools_PatchToolRequest.ts index 0957c8db2..6ab88a01f 100644 --- a/sdks/ts/src/api/models/Tools_PatchToolRequest.ts +++ b/sdks/ts/src/api/models/Tools_PatchToolRequest.ts @@ -3,7 +3,7 @@ /* tslint:disable */ /* eslint-disable */ import type { Common_validPythonIdentifier } from "./Common_validPythonIdentifier"; -import type { Tools_FunctionDefUpdate } from "./Tools_FunctionDefUpdate"; +import type { Tools_FunctionDef } from "./Tools_FunctionDef"; import type { Tools_ToolType } from "./Tools_ToolType"; /** * Payload for patching a tool @@ -17,7 +17,7 @@ export type Tools_PatchToolRequest = { * Name of the tool (must be unique for this agent and a valid python identifier string ) */ name?: Common_validPythonIdentifier; - function?: Tools_FunctionDefUpdate; + function?: Tools_FunctionDef; integration?: any; system?: any; api_call?: any; diff --git a/sdks/ts/src/api/models/Users_CreateOrUpdateUserRequest_id.ts b/sdks/ts/src/api/models/Users_CreateOrUpdateUserRequest.ts similarity index 74% rename from sdks/ts/src/api/models/Users_CreateOrUpdateUserRequest_id.ts rename to sdks/ts/src/api/models/Users_CreateOrUpdateUserRequest.ts index 7338d9bed..8257ad304 100644 --- a/sdks/ts/src/api/models/Users_CreateOrUpdateUserRequest_id.ts +++ b/sdks/ts/src/api/models/Users_CreateOrUpdateUserRequest.ts @@ -3,4 +3,4 @@ /* tslint:disable */ /* eslint-disable */ import type { Common_uuid } from "./Common_uuid"; -export type Users_CreateOrUpdateUserRequest_id = Common_uuid; +export type Users_CreateOrUpdateUserRequest = Common_uuid; diff --git a/sdks/ts/src/api/schemas/$Agents_Agent.ts b/sdks/ts/src/api/schemas/$Agents_Agent.ts index 72bb0e2b3..0aadbfb6d 100644 --- a/sdks/ts/src/api/schemas/$Agents_Agent.ts +++ b/sdks/ts/src/api/schemas/$Agents_Agent.ts @@ -71,17 +71,11 @@ export const $Agents_Agent = { isRequired: true, }, default_settings: { - type: "any-of", + type: "all-of", description: `Default settings for all sessions created by this agent`, contains: [ { - type: "Chat_GenerationPresetSettings", - }, - { - type: "Chat_OpenAISettings", - }, - { - type: "Chat_vLLMSettings", + type: "Chat_DefaultChatSettings", }, ], }, diff --git a/sdks/ts/src/api/schemas/$Agents_CreateAgentRequest.ts b/sdks/ts/src/api/schemas/$Agents_CreateAgentRequest.ts index 035b10261..3271b5264 100644 --- a/sdks/ts/src/api/schemas/$Agents_CreateAgentRequest.ts +++ b/sdks/ts/src/api/schemas/$Agents_CreateAgentRequest.ts @@ -48,17 +48,11 @@ export const $Agents_CreateAgentRequest = { isRequired: true, }, default_settings: { - type: "any-of", + type: "all-of", description: `Default settings for all sessions created by this agent`, contains: [ { - type: "Chat_GenerationPresetSettings", - }, - { - type: "Chat_OpenAISettings", - }, - { - type: "Chat_vLLMSettings", + type: "Chat_DefaultChatSettings", }, ], }, diff --git a/sdks/ts/src/api/schemas/$Agents_CreateOrUpdateAgentRequest.ts b/sdks/ts/src/api/schemas/$Agents_CreateOrUpdateAgentRequest.ts new file mode 100644 index 000000000..3c30c3cf3 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Agents_CreateOrUpdateAgentRequest.ts @@ -0,0 +1,71 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Agents_CreateOrUpdateAgentRequest = { + type: "all-of", + contains: [ + { + type: "Agents_CreateAgentRequest", + }, + { + properties: { + id: { + type: "Common_uuid", + isRequired: true, + }, + metadata: { + type: "dictionary", + contains: { + properties: {}, + }, + }, + name: { + type: "all-of", + description: `Name of the agent`, + contains: [ + { + type: "Common_identifierSafeUnicode", + }, + ], + isRequired: true, + }, + about: { + type: "string", + description: `About the agent`, + isRequired: true, + }, + model: { + type: "string", + description: `Model name to use (gpt-4-turbo, gemini-nano etc)`, + isRequired: true, + }, + instructions: { + type: "any-of", + description: `Instructions for the agent`, + contains: [ + { + type: "string", + }, + { + type: "array", + contains: { + type: "string", + }, + }, + ], + isRequired: true, + }, + default_settings: { + type: "all-of", + description: `Default settings for all sessions created by this agent`, + contains: [ + { + type: "Chat_DefaultChatSettings", + }, + ], + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Agents_PatchAgentRequest.ts b/sdks/ts/src/api/schemas/$Agents_PatchAgentRequest.ts index 903376a1a..26a59a55f 100644 --- a/sdks/ts/src/api/schemas/$Agents_PatchAgentRequest.ts +++ b/sdks/ts/src/api/schemas/$Agents_PatchAgentRequest.ts @@ -44,17 +44,11 @@ export const $Agents_PatchAgentRequest = { ], }, default_settings: { - type: "any-of", + type: "all-of", description: `Default settings for all sessions created by this agent`, contains: [ { - type: "Chat_GenerationPresetSettings", - }, - { - type: "Chat_OpenAISettings", - }, - { - type: "Chat_vLLMSettings", + type: "Chat_DefaultChatSettings", }, ], }, diff --git a/sdks/ts/src/api/schemas/$Agents_UpdateAgentRequest.ts b/sdks/ts/src/api/schemas/$Agents_UpdateAgentRequest.ts index 6591bc329..c94cedc6e 100644 --- a/sdks/ts/src/api/schemas/$Agents_UpdateAgentRequest.ts +++ b/sdks/ts/src/api/schemas/$Agents_UpdateAgentRequest.ts @@ -48,17 +48,11 @@ export const $Agents_UpdateAgentRequest = { isRequired: true, }, default_settings: { - type: "any-of", + type: "all-of", description: `Default settings for all sessions created by this agent`, contains: [ { - type: "Chat_GenerationPresetSettings", - }, - { - type: "Chat_OpenAISettings", - }, - { - type: "Chat_vLLMSettings", + type: "Chat_DefaultChatSettings", }, ], }, diff --git a/sdks/ts/src/api/schemas/$Chat_BaseChatOutput.ts b/sdks/ts/src/api/schemas/$Chat_BaseChatOutput.ts index 3053f6412..e5c25070d 100644 --- a/sdks/ts/src/api/schemas/$Chat_BaseChatOutput.ts +++ b/sdks/ts/src/api/schemas/$Chat_BaseChatOutput.ts @@ -27,8 +27,6 @@ export const $Chat_BaseChatOutput = { type: "Chat_LogProbResponse", }, ], - isRequired: true, - isNullable: true, }, }, } as const; diff --git a/sdks/ts/src/api/schemas/$Chat_BaseChatResponse.ts b/sdks/ts/src/api/schemas/$Chat_BaseChatResponse.ts index 57a56bc70..1ca3e7008 100644 --- a/sdks/ts/src/api/schemas/$Chat_BaseChatResponse.ts +++ b/sdks/ts/src/api/schemas/$Chat_BaseChatResponse.ts @@ -12,8 +12,6 @@ export const $Chat_BaseChatResponse = { type: "Chat_CompetionUsage", }, ], - isRequired: true, - isNullable: true, }, jobs: { type: "array", diff --git a/sdks/ts/src/api/schemas/$Chat_BaseTokenLogProb.ts b/sdks/ts/src/api/schemas/$Chat_BaseTokenLogProb.ts index e9cd03715..e7af2a307 100644 --- a/sdks/ts/src/api/schemas/$Chat_BaseTokenLogProb.ts +++ b/sdks/ts/src/api/schemas/$Chat_BaseTokenLogProb.ts @@ -20,8 +20,6 @@ export const $Chat_BaseTokenLogProb = { type: "number", format: "uint16", }, - isRequired: true, - isNullable: true, }, }, } as const; diff --git a/sdks/ts/src/api/schemas/$Chat_ChatInput.ts b/sdks/ts/src/api/schemas/$Chat_ChatInput.ts new file mode 100644 index 000000000..538b90e76 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Chat_ChatInput.ts @@ -0,0 +1,133 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Chat_ChatInput = { + type: "all-of", + contains: [ + { + type: "Chat_ChatInputData", + }, + { + properties: { + remember: { + type: "boolean", + description: `DISABLED: Whether this interaction should form new memories or not (will be enabled in a future release)`, + isReadOnly: true, + isRequired: true, + }, + recall: { + type: "boolean", + description: `Whether previous memories and docs should be recalled or not`, + isRequired: true, + }, + save: { + type: "boolean", + description: `Whether this interaction should be stored in the session history or not`, + isRequired: true, + }, + model: { + type: "all-of", + description: `Identifier of the model to be used`, + contains: [ + { + type: "Common_identifierSafeUnicode", + }, + ], + }, + stream: { + type: "boolean", + description: `Indicates if the server should stream the response as it's generated`, + isRequired: true, + }, + stop: { + type: "array", + contains: { + type: "string", + }, + }, + seed: { + type: "number", + description: `If specified, the system will make a best effort to sample deterministically for that particular seed value`, + format: "int16", + maximum: 1000, + minimum: -1, + }, + max_tokens: { + type: "number", + description: `The maximum number of tokens to generate in the chat completion`, + format: "uint32", + minimum: 1, + }, + logit_bias: { + type: "dictionary", + contains: { + type: "Common_logit_bias", + }, + }, + response_format: { + type: "all-of", + description: `Response format (set to \`json_object\` to restrict output to JSON)`, + contains: [ + { + type: "Chat_CompletionResponseFormat", + }, + ], + }, + agent: { + type: "all-of", + description: `Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions)`, + contains: [ + { + type: "Common_uuid", + }, + ], + }, + repetition_penalty: { + type: "number", + description: `Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`, + format: "float", + maximum: 2, + }, + length_penalty: { + type: "number", + description: `Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated.`, + format: "float", + maximum: 2, + }, + min_p: { + type: "number", + description: `Minimum probability compared to leading token to be considered`, + format: "float", + maximum: 1, + }, + frequency_penalty: { + type: "number", + description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`, + format: "float", + maximum: 2, + minimum: -2, + }, + presence_penalty: { + type: "number", + description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`, + format: "float", + maximum: 2, + minimum: -2, + }, + temperature: { + type: "number", + description: `What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.`, + format: "float", + maximum: 5, + }, + top_p: { + type: "number", + description: `Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.`, + format: "float", + maximum: 1, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Chat_ChatInputData.ts b/sdks/ts/src/api/schemas/$Chat_ChatInputData.ts new file mode 100644 index 000000000..3206351d5 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Chat_ChatInputData.ts @@ -0,0 +1,68 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Chat_ChatInputData = { + properties: { + messages: { + type: "array", + contains: { + properties: { + role: { + type: "all-of", + description: `The role of the message`, + contains: [ + { + type: "Entries_ChatMLRole", + }, + ], + isRequired: true, + }, + content: { + type: "any-of", + description: `The content parts of the message`, + contains: [ + { + type: "string", + }, + { + type: "array", + contains: { + type: "string", + }, + }, + ], + isRequired: true, + }, + name: { + type: "string", + description: `Name`, + }, + continue: { + type: "boolean", + description: `Whether to continue this message or return a new one`, + }, + }, + }, + isRequired: true, + }, + tools: { + type: "array", + contains: { + type: "Tools_FunctionTool", + }, + }, + tool_choice: { + type: "any-of", + description: `Can be one of existing tools given to the agent earlier or the ones provided in this request.`, + contains: [ + { + type: "Enum", + }, + { + type: "Tools_NamedToolChoice", + }, + ], + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Chat_ChatOutputChunk.ts b/sdks/ts/src/api/schemas/$Chat_ChatOutputChunk.ts index 2ad1c1175..869be9d62 100644 --- a/sdks/ts/src/api/schemas/$Chat_ChatOutputChunk.ts +++ b/sdks/ts/src/api/schemas/$Chat_ChatOutputChunk.ts @@ -12,13 +12,43 @@ export const $Chat_ChatOutputChunk = { { properties: { delta: { - type: "all-of", description: `The message generated by the model`, - contains: [ - { - type: "Entries_ChatMLMessage", + properties: { + role: { + type: "all-of", + description: `The role of the message`, + contains: [ + { + type: "Entries_ChatMLRole", + }, + ], + isRequired: true, }, - ], + content: { + type: "any-of", + description: `The content parts of the message`, + contains: [ + { + type: "string", + }, + { + type: "array", + contains: { + type: "string", + }, + }, + ], + isRequired: true, + }, + name: { + type: "string", + description: `Name`, + }, + continue: { + type: "boolean", + description: `Whether to continue this message or return a new one`, + }, + }, isRequired: true, }, }, diff --git a/sdks/ts/src/api/schemas/$Chat_ChatSettings.ts b/sdks/ts/src/api/schemas/$Chat_ChatSettings.ts new file mode 100644 index 000000000..3c123c30c --- /dev/null +++ b/sdks/ts/src/api/schemas/$Chat_ChatSettings.ts @@ -0,0 +1,73 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Chat_ChatSettings = { + type: "all-of", + contains: [ + { + type: "Chat_DefaultChatSettings", + }, + { + properties: { + model: { + type: "all-of", + description: `Identifier of the model to be used`, + contains: [ + { + type: "Common_identifierSafeUnicode", + }, + ], + }, + stream: { + type: "boolean", + description: `Indicates if the server should stream the response as it's generated`, + isRequired: true, + }, + stop: { + type: "array", + contains: { + type: "string", + }, + }, + seed: { + type: "number", + description: `If specified, the system will make a best effort to sample deterministically for that particular seed value`, + format: "int16", + maximum: 1000, + minimum: -1, + }, + max_tokens: { + type: "number", + description: `The maximum number of tokens to generate in the chat completion`, + format: "uint32", + minimum: 1, + }, + logit_bias: { + type: "dictionary", + contains: { + type: "Common_logit_bias", + }, + }, + response_format: { + type: "all-of", + description: `Response format (set to \`json_object\` to restrict output to JSON)`, + contains: [ + { + type: "Chat_CompletionResponseFormat", + }, + ], + }, + agent: { + type: "all-of", + description: `Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions)`, + contains: [ + { + type: "Common_uuid", + }, + ], + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Chat_CompetionUsage.ts b/sdks/ts/src/api/schemas/$Chat_CompetionUsage.ts index 556114810..d8f34cb14 100644 --- a/sdks/ts/src/api/schemas/$Chat_CompetionUsage.ts +++ b/sdks/ts/src/api/schemas/$Chat_CompetionUsage.ts @@ -9,21 +9,18 @@ export const $Chat_CompetionUsage = { type: "number", description: `Number of tokens in the generated completion`, isReadOnly: true, - isRequired: true, format: "uint32", }, prompt_tokens: { type: "number", description: `Number of tokens in the prompt`, isReadOnly: true, - isRequired: true, format: "uint32", }, total_tokens: { type: "number", description: `Total number of tokens used in the request (prompt + completion)`, isReadOnly: true, - isRequired: true, format: "uint32", }, }, diff --git a/sdks/ts/src/api/schemas/$Chat_DefaultChatSettings.ts b/sdks/ts/src/api/schemas/$Chat_DefaultChatSettings.ts new file mode 100644 index 000000000..573bf31bd --- /dev/null +++ b/sdks/ts/src/api/schemas/$Chat_DefaultChatSettings.ts @@ -0,0 +1,35 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Chat_DefaultChatSettings = { + type: "all-of", + description: `Default settings for the chat session (also used by the agent)`, + contains: [ + { + type: "Chat_OpenAISettings", + }, + { + properties: { + repetition_penalty: { + type: "number", + description: `Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`, + format: "float", + maximum: 2, + }, + length_penalty: { + type: "number", + description: `Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated.`, + format: "float", + maximum: 2, + }, + min_p: { + type: "number", + description: `Minimum probability compared to leading token to be considered`, + format: "float", + maximum: 1, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Chat_GenerationPresetSettings.ts b/sdks/ts/src/api/schemas/$Chat_GenerationPresetSettings.ts deleted file mode 100644 index fd8b68db1..000000000 --- a/sdks/ts/src/api/schemas/$Chat_GenerationPresetSettings.ts +++ /dev/null @@ -1,17 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -export const $Chat_GenerationPresetSettings = { - properties: { - preset: { - type: "all-of", - description: `Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual)`, - contains: [ - { - type: "Chat_GenerationPreset", - }, - ], - }, - }, -} as const; diff --git a/sdks/ts/src/api/schemas/$Chat_MessageChatResponse.ts b/sdks/ts/src/api/schemas/$Chat_MessageChatResponse.ts index e8a2a0161..8b80d8698 100644 --- a/sdks/ts/src/api/schemas/$Chat_MessageChatResponse.ts +++ b/sdks/ts/src/api/schemas/$Chat_MessageChatResponse.ts @@ -13,7 +13,15 @@ export const $Chat_MessageChatResponse = { choices: { type: "array", contains: { - type: "Chat_ChatOutputChunk", + type: "any-of", + contains: [ + { + type: "Chat_SingleChatOutput", + }, + { + type: "Chat_MultipleChatOutput", + }, + ], }, isRequired: true, }, diff --git a/sdks/ts/src/api/schemas/$Chat_MultipleChatOutput.ts b/sdks/ts/src/api/schemas/$Chat_MultipleChatOutput.ts index 8dc1b3bb7..2b54621f5 100644 --- a/sdks/ts/src/api/schemas/$Chat_MultipleChatOutput.ts +++ b/sdks/ts/src/api/schemas/$Chat_MultipleChatOutput.ts @@ -14,7 +14,42 @@ export const $Chat_MultipleChatOutput = { messages: { type: "array", contains: { - type: "Entries_ChatMLMessage", + properties: { + role: { + type: "all-of", + description: `The role of the message`, + contains: [ + { + type: "Entries_ChatMLRole", + }, + ], + isRequired: true, + }, + content: { + type: "any-of", + description: `The content parts of the message`, + contains: [ + { + type: "string", + }, + { + type: "array", + contains: { + type: "string", + }, + }, + ], + isRequired: true, + }, + name: { + type: "string", + description: `Name`, + }, + continue: { + type: "boolean", + description: `Whether to continue this message or return a new one`, + }, + }, }, isRequired: true, }, diff --git a/sdks/ts/src/api/schemas/$Chat_SingleChatOutput.ts b/sdks/ts/src/api/schemas/$Chat_SingleChatOutput.ts index 1cd376f3f..e5f68cf9b 100644 --- a/sdks/ts/src/api/schemas/$Chat_SingleChatOutput.ts +++ b/sdks/ts/src/api/schemas/$Chat_SingleChatOutput.ts @@ -12,7 +12,42 @@ export const $Chat_SingleChatOutput = { { properties: { message: { - type: "Entries_ChatMLMessage", + properties: { + role: { + type: "all-of", + description: `The role of the message`, + contains: [ + { + type: "Entries_ChatMLRole", + }, + ], + isRequired: true, + }, + content: { + type: "any-of", + description: `The content parts of the message`, + contains: [ + { + type: "string", + }, + { + type: "array", + contains: { + type: "string", + }, + }, + ], + isRequired: true, + }, + name: { + type: "string", + description: `Name`, + }, + continue: { + type: "boolean", + description: `Whether to continue this message or return a new one`, + }, + }, isRequired: true, }, }, diff --git a/sdks/ts/src/api/schemas/$Chat_vLLMSettings.ts b/sdks/ts/src/api/schemas/$Chat_vLLMSettings.ts deleted file mode 100644 index 5006b6775..000000000 --- a/sdks/ts/src/api/schemas/$Chat_vLLMSettings.ts +++ /dev/null @@ -1,38 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -export const $Chat_vLLMSettings = { - properties: { - repetition_penalty: { - type: "number", - description: `Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`, - format: "float", - maximum: 2, - }, - length_penalty: { - type: "number", - description: `Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated.`, - format: "float", - maximum: 2, - }, - temperature: { - type: "number", - description: `What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.`, - format: "float", - maximum: 5, - }, - top_p: { - type: "number", - description: `Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.`, - format: "float", - maximum: 1, - }, - min_p: { - type: "number", - description: `Minimum probability compared to leading token to be considered`, - format: "float", - maximum: 1, - }, - }, -} as const; diff --git a/sdks/ts/src/api/schemas/$Common_JinjaTemplate.ts b/sdks/ts/src/api/schemas/$Common_JinjaTemplate.ts new file mode 100644 index 000000000..903a1579b --- /dev/null +++ b/sdks/ts/src/api/schemas/$Common_JinjaTemplate.ts @@ -0,0 +1,8 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Common_JinjaTemplate = { + type: "string", + description: `A valid jinja template.`, +} as const; diff --git a/sdks/ts/src/api/schemas/$Common_identifierSafeUnicode.ts b/sdks/ts/src/api/schemas/$Common_identifierSafeUnicode.ts index e3e2e9f0a..75cd72df6 100644 --- a/sdks/ts/src/api/schemas/$Common_identifierSafeUnicode.ts +++ b/sdks/ts/src/api/schemas/$Common_identifierSafeUnicode.ts @@ -7,6 +7,7 @@ export const $Common_identifierSafeUnicode = { description: `For Unicode character safety See: https://unicode.org/reports/tr31/ See: https://www.unicode.org/reports/tr39/#Identifier_Characters`, + maxLength: 120, pattern: "^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$", } as const; diff --git a/sdks/ts/src/api/schemas/$Common_validPythonIdentifier.ts b/sdks/ts/src/api/schemas/$Common_validPythonIdentifier.ts index 7be2759cc..89c378739 100644 --- a/sdks/ts/src/api/schemas/$Common_validPythonIdentifier.ts +++ b/sdks/ts/src/api/schemas/$Common_validPythonIdentifier.ts @@ -5,5 +5,6 @@ export const $Common_validPythonIdentifier = { type: "string", description: `Valid python identifier names`, + maxLength: 40, pattern: "^[^\\W0-9]\\w*$", } as const; diff --git a/sdks/ts/src/api/schemas/$Docs_BaseDocSearchRequest.ts b/sdks/ts/src/api/schemas/$Docs_BaseDocSearchRequest.ts index 00b992770..99188755e 100644 --- a/sdks/ts/src/api/schemas/$Docs_BaseDocSearchRequest.ts +++ b/sdks/ts/src/api/schemas/$Docs_BaseDocSearchRequest.ts @@ -4,22 +4,12 @@ /* eslint-disable */ export const $Docs_BaseDocSearchRequest = { properties: { - confidence: { + limit: { type: "number", - description: `The confidence cutoff level`, - isRequired: true, - maximum: 1, - }, - alpha: { - type: "number", - description: `The weight to apply to BM25 vs Vector search results. 0 => pure BM25; 1 => pure vector;`, - isRequired: true, - maximum: 1, - }, - mmr: { - type: "boolean", - description: `Whether to include the MMR algorithm in the search. Optimizes for diversity in search results.`, isRequired: true, + format: "uint16", + maximum: 100, + minimum: 1, }, lang: { type: "Enum", diff --git a/sdks/ts/src/api/schemas/$Entries_InputChatMLMessage.ts b/sdks/ts/src/api/schemas/$Docs_CreateDocRequest.ts similarity index 52% rename from sdks/ts/src/api/schemas/$Entries_InputChatMLMessage.ts rename to sdks/ts/src/api/schemas/$Docs_CreateDocRequest.ts index 2f888d08b..4af7128e1 100644 --- a/sdks/ts/src/api/schemas/$Entries_InputChatMLMessage.ts +++ b/sdks/ts/src/api/schemas/$Docs_CreateDocRequest.ts @@ -2,21 +2,24 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export const $Entries_InputChatMLMessage = { +export const $Docs_CreateDocRequest = { + description: `Payload for creating a doc`, properties: { - role: { - type: "all-of", - description: `The role of the message`, - contains: [ - { - type: "Entries_ChatMLRole", - }, - ], + metadata: { + type: "dictionary", + contains: { + properties: {}, + }, + }, + title: { + type: "string", + description: `Title describing what this document contains`, isRequired: true, + maxLength: 800, }, content: { type: "any-of", - description: `The content parts of the message`, + description: `Contents of the document`, contains: [ { type: "string", @@ -30,13 +33,5 @@ export const $Entries_InputChatMLMessage = { ], isRequired: true, }, - name: { - type: "string", - description: `Name`, - }, - continue: { - type: "boolean", - description: `Whether to continue this message or return a new one`, - }, }, } as const; diff --git a/sdks/ts/src/api/schemas/$Docs_Doc.ts b/sdks/ts/src/api/schemas/$Docs_Doc.ts index d6c622cb0..f77ec6d23 100644 --- a/sdks/ts/src/api/schemas/$Docs_Doc.ts +++ b/sdks/ts/src/api/schemas/$Docs_Doc.ts @@ -28,14 +28,10 @@ export const $Docs_Doc = { format: "date-time", }, title: { - type: "all-of", + type: "string", description: `Title describing what this document contains`, - contains: [ - { - type: "Common_identifierSafeUnicode", - }, - ], isRequired: true, + maxLength: 800, }, content: { type: "any-of", diff --git a/sdks/ts/src/api/schemas/$Docs_DocReference.ts b/sdks/ts/src/api/schemas/$Docs_DocReference.ts index 5c02863c3..e8788d4ba 100644 --- a/sdks/ts/src/api/schemas/$Docs_DocReference.ts +++ b/sdks/ts/src/api/schemas/$Docs_DocReference.ts @@ -25,19 +25,20 @@ export const $Docs_DocReference = { isReadOnly: true, isRequired: true, }, - snippet_index: { + title: { + type: "string", + }, + snippets: { type: "array", contains: { - type: "number", - format: "uint16", + type: "Docs_Snippet", }, isRequired: true, }, - title: { - type: "string", - }, - snippet: { - type: "string", + distance: { + type: "number", + isRequired: true, + isNullable: true, }, }, } as const; diff --git a/sdks/ts/src/api/schemas/$Docs_DocSearchResponse.ts b/sdks/ts/src/api/schemas/$Docs_DocSearchResponse.ts new file mode 100644 index 000000000..df2b37b48 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Docs_DocSearchResponse.ts @@ -0,0 +1,21 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Docs_DocSearchResponse = { + properties: { + docs: { + type: "array", + contains: { + type: "Docs_DocReference", + }, + isRequired: true, + }, + time: { + type: "number", + description: `The time taken to search in seconds`, + isRequired: true, + exclusiveMinimum: true, + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Docs_HybridDocSearchRequest.ts b/sdks/ts/src/api/schemas/$Docs_HybridDocSearchRequest.ts index a6e9dfcad..2bc5005fb 100644 --- a/sdks/ts/src/api/schemas/$Docs_HybridDocSearchRequest.ts +++ b/sdks/ts/src/api/schemas/$Docs_HybridDocSearchRequest.ts @@ -10,42 +10,28 @@ export const $Docs_HybridDocSearchRequest = { }, { properties: { + confidence: { + type: "number", + description: `The confidence cutoff level`, + isRequired: true, + maximum: 1, + }, + alpha: { + type: "number", + description: `The weight to apply to BM25 vs Vector search results. 0 => pure BM25; 1 => pure vector;`, + isRequired: true, + maximum: 1, + }, text: { - type: "any-of", - description: `Text or texts to use in the search. In \`hybrid\` search mode, either \`text\` or both \`text\` and \`vector\` fields are required.`, - contains: [ - { - type: "string", - }, - { - type: "array", - contains: { - type: "string", - }, - }, - ], + type: "string", + description: `Text to use in the search. In \`hybrid\` search mode, either \`text\` or both \`text\` and \`vector\` fields are required.`, isRequired: true, }, vector: { - type: "any-of", - description: `Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown.`, - contains: [ - { - type: "array", - contains: { - type: "number", - }, - }, - { - type: "array", - contains: { - type: "array", - contains: { - type: "number", - }, - }, - }, - ], + type: "array", + contains: { + type: "number", + }, isRequired: true, }, }, diff --git a/sdks/ts/src/api/schemas/$Entries_ChatMLTextContentPart.ts b/sdks/ts/src/api/schemas/$Docs_Snippet.ts similarity index 70% rename from sdks/ts/src/api/schemas/$Entries_ChatMLTextContentPart.ts rename to sdks/ts/src/api/schemas/$Docs_Snippet.ts index 1701225c5..36b6deef2 100644 --- a/sdks/ts/src/api/schemas/$Entries_ChatMLTextContentPart.ts +++ b/sdks/ts/src/api/schemas/$Docs_Snippet.ts @@ -2,14 +2,15 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export const $Entries_ChatMLTextContentPart = { +export const $Docs_Snippet = { properties: { - text: { - type: "string", + index: { + type: "number", isRequired: true, + format: "uint16", }, - type: { - type: "Enum", + content: { + type: "string", isRequired: true, }, }, diff --git a/sdks/ts/src/api/schemas/$Docs_TextOnlyDocSearchRequest.ts b/sdks/ts/src/api/schemas/$Docs_TextOnlyDocSearchRequest.ts index 94a19e134..b9711dbc4 100644 --- a/sdks/ts/src/api/schemas/$Docs_TextOnlyDocSearchRequest.ts +++ b/sdks/ts/src/api/schemas/$Docs_TextOnlyDocSearchRequest.ts @@ -11,19 +11,8 @@ export const $Docs_TextOnlyDocSearchRequest = { { properties: { text: { - type: "any-of", - description: `Text or texts to use in the search.`, - contains: [ - { - type: "string", - }, - { - type: "array", - contains: { - type: "string", - }, - }, - ], + type: "string", + description: `Text to use in the search.`, isRequired: true, }, }, diff --git a/sdks/ts/src/api/schemas/$Docs_VectorDocSearchRequest.ts b/sdks/ts/src/api/schemas/$Docs_VectorDocSearchRequest.ts index cceba2263..af6de0b12 100644 --- a/sdks/ts/src/api/schemas/$Docs_VectorDocSearchRequest.ts +++ b/sdks/ts/src/api/schemas/$Docs_VectorDocSearchRequest.ts @@ -10,26 +10,17 @@ export const $Docs_VectorDocSearchRequest = { }, { properties: { + confidence: { + type: "number", + description: `The confidence cutoff level`, + isRequired: true, + maximum: 1, + }, vector: { - type: "any-of", - description: `Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown.`, - contains: [ - { - type: "array", - contains: { - type: "number", - }, - }, - { - type: "array", - contains: { - type: "array", - contains: { - type: "number", - }, - }, - }, - ], + type: "array", + contains: { + type: "number", + }, isRequired: true, }, }, diff --git a/sdks/ts/src/api/schemas/$Entries_BaseEntry.ts b/sdks/ts/src/api/schemas/$Entries_BaseEntry.ts index 6aad5206a..bcdb7122e 100644 --- a/sdks/ts/src/api/schemas/$Entries_BaseEntry.ts +++ b/sdks/ts/src/api/schemas/$Entries_BaseEntry.ts @@ -37,9 +37,11 @@ export const $Entries_BaseEntry = { }, tokenizer: { type: "string", + isRequired: true, }, token_count: { type: "number", + isRequired: true, format: "uint16", }, timestamp: { diff --git a/sdks/ts/src/api/schemas/$Entries_ChatMLMessage.ts b/sdks/ts/src/api/schemas/$Entries_ChatMLMessage.ts deleted file mode 100644 index a9a55cee2..000000000 --- a/sdks/ts/src/api/schemas/$Entries_ChatMLMessage.ts +++ /dev/null @@ -1,63 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -export const $Entries_ChatMLMessage = { - properties: { - role: { - type: "all-of", - description: `The role of the message`, - contains: [ - { - type: "Entries_ChatMLRole", - }, - ], - isRequired: true, - }, - content: { - type: "any-of", - description: `The content parts of the message`, - contains: [ - { - type: "string", - }, - { - type: "array", - contains: { - type: "string", - }, - }, - ], - isRequired: true, - }, - name: { - type: "string", - description: `Name`, - }, - tool_calls: { - type: "array", - contains: { - type: "Tools_ChosenToolCall", - }, - isReadOnly: true, - isRequired: true, - }, - created_at: { - type: "string", - description: `When this resource was created as UTC date-time`, - isReadOnly: true, - isRequired: true, - format: "date-time", - }, - id: { - type: "all-of", - contains: [ - { - type: "Common_uuid", - }, - ], - isReadOnly: true, - isRequired: true, - }, - }, -} as const; diff --git a/sdks/ts/src/api/schemas/$Entries_History.ts b/sdks/ts/src/api/schemas/$Entries_History.ts index 208d12b46..c75c70678 100644 --- a/sdks/ts/src/api/schemas/$Entries_History.ts +++ b/sdks/ts/src/api/schemas/$Entries_History.ts @@ -7,7 +7,7 @@ export const $Entries_History = { entries: { type: "array", contains: { - type: "Entries_BaseEntry", + type: "Entries_Entry", }, isRequired: true, }, diff --git a/sdks/ts/src/api/schemas/$Entries_ImageURL.ts b/sdks/ts/src/api/schemas/$Entries_ImageURL.ts deleted file mode 100644 index 07dc23052..000000000 --- a/sdks/ts/src/api/schemas/$Entries_ImageURL.ts +++ /dev/null @@ -1,24 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -export const $Entries_ImageURL = { - properties: { - url: { - type: "string", - description: `Image URL or base64 data url (e.g. \`data:image/jpeg;base64,\`)`, - isRequired: true, - format: "uri", - }, - detail: { - type: "all-of", - description: `The detail level of the image`, - contains: [ - { - type: "Entries_ImageDetail", - }, - ], - isRequired: true, - }, - }, -} as const; diff --git a/sdks/ts/src/api/schemas/$Executions_TaskTokenResumeExecutionRequest.ts b/sdks/ts/src/api/schemas/$Executions_TaskTokenResumeExecutionRequest.ts index c6ecd34b4..fd85a8716 100644 --- a/sdks/ts/src/api/schemas/$Executions_TaskTokenResumeExecutionRequest.ts +++ b/sdks/ts/src/api/schemas/$Executions_TaskTokenResumeExecutionRequest.ts @@ -8,11 +8,6 @@ export const $Executions_TaskTokenResumeExecutionRequest = { type: "Enum", isRequired: true, }, - task_token: { - type: "string", - description: `A Task Token is a unique identifier for a specific Task Execution.`, - isRequired: true, - }, input: { type: "dictionary", contains: { diff --git a/sdks/ts/src/api/schemas/$Executions_Transition.ts b/sdks/ts/src/api/schemas/$Executions_Transition.ts index 223abfb13..45731aff9 100644 --- a/sdks/ts/src/api/schemas/$Executions_Transition.ts +++ b/sdks/ts/src/api/schemas/$Executions_Transition.ts @@ -20,26 +20,27 @@ export const $Executions_Transition = { isRequired: true, }, output: { - type: "dictionary", - contains: { - properties: {}, - }, + properties: {}, isReadOnly: true, isRequired: true, }, current: { - type: "array", - contains: { - properties: {}, - }, + type: "all-of", + contains: [ + { + type: "Executions_TransitionTarget", + }, + ], isReadOnly: true, isRequired: true, }, next: { - type: "array", - contains: { - properties: {}, - }, + type: "all-of", + contains: [ + { + type: "Executions_TransitionTarget", + }, + ], isReadOnly: true, isRequired: true, isNullable: true, diff --git a/sdks/ts/src/api/schemas/$Executions_TransitionTarget.ts b/sdks/ts/src/api/schemas/$Executions_TransitionTarget.ts new file mode 100644 index 000000000..62d5b4962 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Executions_TransitionTarget.ts @@ -0,0 +1,17 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Executions_TransitionTarget = { + properties: { + workflow: { + type: "Common_identifierSafeUnicode", + isRequired: true, + }, + step: { + type: "number", + isRequired: true, + format: "uint16", + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Sessions_CreateOrUpdateSessionRequest.ts b/sdks/ts/src/api/schemas/$Sessions_CreateOrUpdateSessionRequest.ts new file mode 100644 index 000000000..16720dd71 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Sessions_CreateOrUpdateSessionRequest.ts @@ -0,0 +1,84 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Sessions_CreateOrUpdateSessionRequest = { + type: "all-of", + contains: [ + { + type: "Sessions_CreateSessionRequest", + }, + { + properties: { + id: { + type: "Common_uuid", + isRequired: true, + }, + user: { + type: "all-of", + description: `User ID of user associated with this session`, + contains: [ + { + type: "Common_uuid", + }, + ], + }, + users: { + type: "array", + contains: { + type: "Common_uuid", + }, + }, + agent: { + type: "all-of", + description: `Agent ID of agent associated with this session`, + contains: [ + { + type: "Common_uuid", + }, + ], + }, + agents: { + type: "array", + contains: { + type: "Common_uuid", + }, + }, + situation: { + type: "string", + description: `A specific situation that sets the background for this session`, + isRequired: true, + }, + render_templates: { + type: "boolean", + description: `Render system and assistant message content as jinja templates`, + isRequired: true, + }, + token_budget: { + type: "number", + description: `Threshold value for the adaptive context functionality`, + isRequired: true, + isNullable: true, + format: "uint16", + }, + context_overflow: { + type: "one-of", + description: `Action to start on context window overflow`, + contains: [ + { + type: "Sessions_ContextOverflowType", + }, + ], + isRequired: true, + isNullable: true, + }, + metadata: { + type: "dictionary", + contains: { + properties: {}, + }, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_CaseThen.ts b/sdks/ts/src/api/schemas/$Tasks_CaseThen.ts new file mode 100644 index 000000000..4507fa803 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_CaseThen.ts @@ -0,0 +1,67 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_CaseThen = { + properties: { + case: { + type: "any-of", + description: `The condition to evaluate`, + contains: [ + { + type: "Common_PyExpression", + }, + { + type: "Enum", + }, + ], + isRequired: true, + }, + then: { + type: "any-of", + description: `The steps to run if the condition is true`, + contains: [ + { + type: "Tasks_EvaluateStep", + }, + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_SleepStep", + }, + { + type: "Tasks_ErrorWorkflowStep", + }, + { + type: "Tasks_YieldStep", + }, + { + type: "Tasks_WaitForInputStep", + }, + ], + isRequired: true, + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_CreateTaskRequest.ts b/sdks/ts/src/api/schemas/$Tasks_CreateTaskRequest.ts index 7c15a448d..882c1cf13 100644 --- a/sdks/ts/src/api/schemas/$Tasks_CreateTaskRequest.ts +++ b/sdks/ts/src/api/schemas/$Tasks_CreateTaskRequest.ts @@ -16,20 +16,128 @@ export const $Tasks_CreateTaskRequest = { type: "Tasks_ToolCallStep", }, { - type: "Tasks_YieldStep", + type: "Tasks_PromptStep", }, { - type: "Tasks_PromptStep", + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_SleepStep", }, { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_YieldStep", + }, { type: "Tasks_WaitForInputStep", }, { type: "Tasks_IfElseWorkflowStep", }, + { + type: "Tasks_SwitchStep", + }, + { + type: "Tasks_ForeachStep", + }, + { + type: "Tasks_ParallelStep", + }, + { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + over: { + type: "all-of", + description: `The variable to iterate over`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + map: { + type: "any-of", + description: `The steps to run for each iteration`, + contains: [ + { + type: "Tasks_EvaluateStep", + }, + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + ], + isRequired: true, + }, + reduce: { + type: "all-of", + description: `The expression to reduce the results. + If not provided, the results are collected and returned as a list. + A special parameter named \`results\` is the accumulator and \`_\` is the current value.`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + }, + initial: { + properties: {}, + }, + }, + }, + ], + }, ], }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_EmbedStep.ts b/sdks/ts/src/api/schemas/$Tasks_EmbedStep.ts new file mode 100644 index 000000000..007215dec --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_EmbedStep.ts @@ -0,0 +1,37 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_EmbedStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + embed: { + type: "all-of", + description: `The text to embed`, + contains: [ + { + type: "Docs_EmbedQueryRequest", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_ErrorWorkflowStep.ts b/sdks/ts/src/api/schemas/$Tasks_ErrorWorkflowStep.ts index 31e9cd44b..371a8952a 100644 --- a/sdks/ts/src/api/schemas/$Tasks_ErrorWorkflowStep.ts +++ b/sdks/ts/src/api/schemas/$Tasks_ErrorWorkflowStep.ts @@ -6,12 +6,19 @@ export const $Tasks_ErrorWorkflowStep = { type: "all-of", contains: [ { - type: "Tasks_BaseWorkflowStep", + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, }, { properties: { kind_: { type: "Enum", + isReadOnly: true, isRequired: true, }, error: { diff --git a/sdks/ts/src/api/schemas/$Tasks_EvaluateStep.ts b/sdks/ts/src/api/schemas/$Tasks_EvaluateStep.ts index 5f2fbf871..1ba2687fe 100644 --- a/sdks/ts/src/api/schemas/$Tasks_EvaluateStep.ts +++ b/sdks/ts/src/api/schemas/$Tasks_EvaluateStep.ts @@ -6,12 +6,19 @@ export const $Tasks_EvaluateStep = { type: "all-of", contains: [ { - type: "Tasks_BaseWorkflowStep", + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, }, { properties: { kind_: { type: "Enum", + isReadOnly: true, isRequired: true, }, evaluate: { diff --git a/sdks/ts/src/api/schemas/$Tasks_ForeachDo.ts b/sdks/ts/src/api/schemas/$Tasks_ForeachDo.ts new file mode 100644 index 000000000..920c95f63 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_ForeachDo.ts @@ -0,0 +1,50 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_ForeachDo = { + properties: { + in: { + type: "all-of", + description: `The variable to iterate over. + VALIDATION: Should NOT return more than 1000 elements.`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + do: { + type: "any-of", + description: `The steps to run for each iteration`, + contains: [ + { + type: "Tasks_EvaluateStep", + }, + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + ], + isRequired: true, + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_ForeachStep.ts b/sdks/ts/src/api/schemas/$Tasks_ForeachStep.ts new file mode 100644 index 000000000..d458e4bd5 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_ForeachStep.ts @@ -0,0 +1,37 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_ForeachStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + foreach: { + type: "all-of", + description: `The steps to run for each iteration`, + contains: [ + { + type: "Tasks_ForeachDo", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_GetStep.ts b/sdks/ts/src/api/schemas/$Tasks_GetStep.ts new file mode 100644 index 000000000..d09852322 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_GetStep.ts @@ -0,0 +1,32 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_GetStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + get: { + type: "string", + description: `The key to get`, + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_IfElseWorkflowStep.ts b/sdks/ts/src/api/schemas/$Tasks_IfElseWorkflowStep.ts index ae1c3bbaf..06fa47e33 100644 --- a/sdks/ts/src/api/schemas/$Tasks_IfElseWorkflowStep.ts +++ b/sdks/ts/src/api/schemas/$Tasks_IfElseWorkflowStep.ts @@ -6,12 +6,19 @@ export const $Tasks_IfElseWorkflowStep = { type: "all-of", contains: [ { - type: "Tasks_BaseWorkflowStep", + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, }, { properties: { kind_: { type: "Enum", + isReadOnly: true, isRequired: true, }, if: { @@ -29,17 +36,41 @@ export const $Tasks_IfElseWorkflowStep = { description: `The steps to run if the condition is true`, contains: [ { - type: "Tasks_ToolCallStep", + type: "Tasks_EvaluateStep", }, { - type: "Tasks_YieldStep", + type: "Tasks_ToolCallStep", }, { type: "Tasks_PromptStep", }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_SleepStep", + }, { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_YieldStep", + }, { type: "Tasks_WaitForInputStep", }, @@ -51,17 +82,41 @@ export const $Tasks_IfElseWorkflowStep = { description: `The steps to run if the condition is false`, contains: [ { - type: "Tasks_ToolCallStep", + type: "Tasks_EvaluateStep", }, { - type: "Tasks_YieldStep", + type: "Tasks_ToolCallStep", }, { type: "Tasks_PromptStep", }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_SleepStep", + }, { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_YieldStep", + }, { type: "Tasks_WaitForInputStep", }, diff --git a/sdks/ts/src/api/schemas/$Tasks_LogStep.ts b/sdks/ts/src/api/schemas/$Tasks_LogStep.ts new file mode 100644 index 000000000..0ed1b16df --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_LogStep.ts @@ -0,0 +1,37 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_LogStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + log: { + type: "all-of", + description: `The value to log`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_ParallelStep.ts b/sdks/ts/src/api/schemas/$Tasks_ParallelStep.ts new file mode 100644 index 000000000..fd6fab551 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_ParallelStep.ts @@ -0,0 +1,60 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_ParallelStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + parallel: { + type: "array", + contains: { + type: "any-of", + contains: [ + { + type: "Tasks_EvaluateStep", + }, + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + ], + }, + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_PatchTaskRequest.ts b/sdks/ts/src/api/schemas/$Tasks_PatchTaskRequest.ts index 50c6201fe..45ad2927f 100644 --- a/sdks/ts/src/api/schemas/$Tasks_PatchTaskRequest.ts +++ b/sdks/ts/src/api/schemas/$Tasks_PatchTaskRequest.ts @@ -16,20 +16,122 @@ export const $Tasks_PatchTaskRequest = { type: "Tasks_ToolCallStep", }, { - type: "Tasks_YieldStep", + type: "Tasks_PromptStep", }, { - type: "Tasks_PromptStep", + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_SleepStep", }, { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_YieldStep", + }, { type: "Tasks_WaitForInputStep", }, { type: "Tasks_IfElseWorkflowStep", }, + { + type: "Tasks_SwitchStep", + }, + { + type: "Tasks_ForeachStep", + }, + { + type: "Tasks_ParallelStep", + }, + { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "string", + description: `Discriminator property for BaseWorkflowStep.`, + }, + }, + }, + { + properties: { + over: { + type: "all-of", + description: `The variable to iterate over`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + map: { + type: "any-of", + description: `The steps to run for each iteration`, + contains: [ + { + type: "Tasks_EvaluateStep", + }, + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + ], + isRequired: true, + }, + reduce: { + type: "all-of", + description: `The expression to reduce the results. + If not provided, the results are collected and returned as a list. + A special parameter named \`results\` is the accumulator and \`_\` is the current value.`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + }, + initial: { + properties: {}, + }, + }, + }, + ], + }, ], }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_PromptStep.ts b/sdks/ts/src/api/schemas/$Tasks_PromptStep.ts index 2abb023b2..2e133936f 100644 --- a/sdks/ts/src/api/schemas/$Tasks_PromptStep.ts +++ b/sdks/ts/src/api/schemas/$Tasks_PromptStep.ts @@ -6,12 +6,19 @@ export const $Tasks_PromptStep = { type: "all-of", contains: [ { - type: "Tasks_BaseWorkflowStep", + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, }, { properties: { kind_: { type: "Enum", + isReadOnly: true, isRequired: true, }, prompt: { @@ -19,271 +26,19 @@ export const $Tasks_PromptStep = { description: `The prompt to run`, contains: [ { - type: "string", - }, - { - type: "array", - contains: { - type: "Entries_InputChatMLMessage", - }, + type: "Common_JinjaTemplate", }, ], isRequired: true, }, settings: { - type: "any-of", + type: "all-of", description: `Settings for the prompt`, contains: [ { - properties: { - model: { - type: "all-of", - description: `Identifier of the model to be used`, - contains: [ - { - type: "Common_identifierSafeUnicode", - }, - ], - }, - stream: { - type: "boolean", - description: `Indicates if the server should stream the response as it's generated`, - isRequired: true, - }, - stop: { - type: "array", - contains: { - type: "string", - }, - }, - seed: { - type: "number", - description: `If specified, the system will make a best effort to sample deterministically for that particular seed value`, - format: "int16", - maximum: 1000, - minimum: -1, - }, - max_tokens: { - type: "number", - description: `The maximum number of tokens to generate in the chat completion`, - format: "uint32", - minimum: 1, - }, - logit_bias: { - type: "dictionary", - contains: { - type: "Common_logit_bias", - }, - }, - response_format: { - type: "all-of", - description: `Response format (set to \`json_object\` to restrict output to JSON)`, - contains: [ - { - type: "Chat_CompletionResponseFormat", - }, - ], - }, - agent: { - type: "all-of", - description: `Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions)`, - contains: [ - { - type: "Common_uuid", - }, - ], - }, - preset: { - type: "all-of", - description: `Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual)`, - contains: [ - { - type: "Chat_GenerationPreset", - }, - ], - }, - }, - }, - { - properties: { - model: { - type: "all-of", - description: `Identifier of the model to be used`, - contains: [ - { - type: "Common_identifierSafeUnicode", - }, - ], - }, - stream: { - type: "boolean", - description: `Indicates if the server should stream the response as it's generated`, - isRequired: true, - }, - stop: { - type: "array", - contains: { - type: "string", - }, - }, - seed: { - type: "number", - description: `If specified, the system will make a best effort to sample deterministically for that particular seed value`, - format: "int16", - maximum: 1000, - minimum: -1, - }, - max_tokens: { - type: "number", - description: `The maximum number of tokens to generate in the chat completion`, - format: "uint32", - minimum: 1, - }, - logit_bias: { - type: "dictionary", - contains: { - type: "Common_logit_bias", - }, - }, - response_format: { - type: "all-of", - description: `Response format (set to \`json_object\` to restrict output to JSON)`, - contains: [ - { - type: "Chat_CompletionResponseFormat", - }, - ], - }, - agent: { - type: "all-of", - description: `Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions)`, - contains: [ - { - type: "Common_uuid", - }, - ], - }, - frequency_penalty: { - type: "number", - description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`, - format: "float", - maximum: 2, - minimum: -2, - }, - presence_penalty: { - type: "number", - description: `Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`, - format: "float", - maximum: 2, - minimum: -2, - }, - temperature: { - type: "number", - description: `What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.`, - format: "float", - maximum: 5, - }, - top_p: { - type: "number", - description: `Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.`, - format: "float", - maximum: 1, - }, - }, - }, - { - properties: { - model: { - type: "all-of", - description: `Identifier of the model to be used`, - contains: [ - { - type: "Common_identifierSafeUnicode", - }, - ], - }, - stream: { - type: "boolean", - description: `Indicates if the server should stream the response as it's generated`, - isRequired: true, - }, - stop: { - type: "array", - contains: { - type: "string", - }, - }, - seed: { - type: "number", - description: `If specified, the system will make a best effort to sample deterministically for that particular seed value`, - format: "int16", - maximum: 1000, - minimum: -1, - }, - max_tokens: { - type: "number", - description: `The maximum number of tokens to generate in the chat completion`, - format: "uint32", - minimum: 1, - }, - logit_bias: { - type: "dictionary", - contains: { - type: "Common_logit_bias", - }, - }, - response_format: { - type: "all-of", - description: `Response format (set to \`json_object\` to restrict output to JSON)`, - contains: [ - { - type: "Chat_CompletionResponseFormat", - }, - ], - }, - agent: { - type: "all-of", - description: `Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions)`, - contains: [ - { - type: "Common_uuid", - }, - ], - }, - repetition_penalty: { - type: "number", - description: `Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.`, - format: "float", - maximum: 2, - }, - length_penalty: { - type: "number", - description: `Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated.`, - format: "float", - maximum: 2, - }, - temperature: { - type: "number", - description: `What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.`, - format: "float", - maximum: 5, - }, - top_p: { - type: "number", - description: `Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.`, - format: "float", - maximum: 1, - }, - min_p: { - type: "number", - description: `Minimum probability compared to leading token to be considered`, - format: "float", - maximum: 1, - }, - }, + type: "Chat_ChatSettings", }, ], - isRequired: true, }, }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_ReturnStep.ts b/sdks/ts/src/api/schemas/$Tasks_ReturnStep.ts new file mode 100644 index 000000000..ae2b95deb --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_ReturnStep.ts @@ -0,0 +1,34 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_ReturnStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + return: { + type: "dictionary", + contains: { + type: "Common_PyExpression", + }, + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SearchStep.ts b/sdks/ts/src/api/schemas/$Tasks_SearchStep.ts new file mode 100644 index 000000000..6adbd50d8 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SearchStep.ts @@ -0,0 +1,43 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SearchStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + search: { + type: "any-of", + description: `The search query`, + contains: [ + { + type: "Docs_VectorDocSearchRequest", + }, + { + type: "Docs_TextOnlyDocSearchRequest", + }, + { + type: "Docs_HybridDocSearchRequest", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Entries_ChatMLImageContentPart.ts b/sdks/ts/src/api/schemas/$Tasks_SetKey.ts similarity index 60% rename from sdks/ts/src/api/schemas/$Entries_ChatMLImageContentPart.ts rename to sdks/ts/src/api/schemas/$Tasks_SetKey.ts index 28026a78b..c93f367d1 100644 --- a/sdks/ts/src/api/schemas/$Entries_ChatMLImageContentPart.ts +++ b/sdks/ts/src/api/schemas/$Tasks_SetKey.ts @@ -2,21 +2,22 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export const $Entries_ChatMLImageContentPart = { +export const $Tasks_SetKey = { properties: { - image_url: { + key: { + type: "string", + description: `The key to set`, + isRequired: true, + }, + value: { type: "all-of", - description: `The image URL`, + description: `The value to set`, contains: [ { - type: "Entries_ImageURL", + type: "Common_PyExpression", }, ], isRequired: true, }, - type: { - type: "Enum", - isRequired: true, - }, }, } as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SetStep.ts b/sdks/ts/src/api/schemas/$Tasks_SetStep.ts new file mode 100644 index 000000000..579d25a13 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SetStep.ts @@ -0,0 +1,37 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SetStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + set: { + type: "all-of", + description: `The value to set`, + contains: [ + { + type: "Tasks_SetKey", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SleepFor.ts b/sdks/ts/src/api/schemas/$Tasks_SleepFor.ts new file mode 100644 index 000000000..025d83c26 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SleepFor.ts @@ -0,0 +1,36 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SleepFor = { + properties: { + seconds: { + type: "number", + description: `The number of seconds to sleep for`, + isRequired: true, + format: "uint16", + maximum: 60, + }, + minutes: { + type: "number", + description: `The number of minutes to sleep for`, + isRequired: true, + format: "uint16", + maximum: 60, + }, + hours: { + type: "number", + description: `The number of hours to sleep for`, + isRequired: true, + format: "uint16", + maximum: 24, + }, + days: { + type: "number", + description: `The number of days to sleep for`, + isRequired: true, + format: "uint16", + maximum: 30, + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SleepStep.ts b/sdks/ts/src/api/schemas/$Tasks_SleepStep.ts new file mode 100644 index 000000000..2b69dcf92 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SleepStep.ts @@ -0,0 +1,37 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SleepStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + sleep: { + type: "all-of", + description: `The duration to sleep for (max 31 days)`, + contains: [ + { + type: "Tasks_SleepFor", + }, + ], + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_SwitchStep.ts b/sdks/ts/src/api/schemas/$Tasks_SwitchStep.ts new file mode 100644 index 000000000..31926aacb --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tasks_SwitchStep.ts @@ -0,0 +1,34 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tasks_SwitchStep = { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + switch: { + type: "array", + contains: { + type: "Tasks_CaseThen", + }, + isRequired: true, + }, + }, + }, + ], +} as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_Task.ts b/sdks/ts/src/api/schemas/$Tasks_Task.ts index d0431107d..c89bade35 100644 --- a/sdks/ts/src/api/schemas/$Tasks_Task.ts +++ b/sdks/ts/src/api/schemas/$Tasks_Task.ts @@ -16,20 +16,128 @@ export const $Tasks_Task = { type: "Tasks_ToolCallStep", }, { - type: "Tasks_YieldStep", + type: "Tasks_PromptStep", }, { - type: "Tasks_PromptStep", + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_SleepStep", }, { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_YieldStep", + }, { type: "Tasks_WaitForInputStep", }, { type: "Tasks_IfElseWorkflowStep", }, + { + type: "Tasks_SwitchStep", + }, + { + type: "Tasks_ForeachStep", + }, + { + type: "Tasks_ParallelStep", + }, + { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + over: { + type: "all-of", + description: `The variable to iterate over`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + map: { + type: "any-of", + description: `The steps to run for each iteration`, + contains: [ + { + type: "Tasks_EvaluateStep", + }, + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + ], + isRequired: true, + }, + reduce: { + type: "all-of", + description: `The expression to reduce the results. + If not provided, the results are collected and returned as a list. + A special parameter named \`results\` is the accumulator and \`_\` is the current value.`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + }, + initial: { + properties: {}, + }, + }, + }, + ], + }, ], }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_TaskTool.ts b/sdks/ts/src/api/schemas/$Tasks_TaskTool.ts index f13fe0486..f329deb7c 100644 --- a/sdks/ts/src/api/schemas/$Tasks_TaskTool.ts +++ b/sdks/ts/src/api/schemas/$Tasks_TaskTool.ts @@ -3,43 +3,19 @@ /* tslint:disable */ /* eslint-disable */ export const $Tasks_TaskTool = { - properties: { - inherited: { - type: "boolean", - description: `Read-only: Whether the tool was inherited or not. Only applies within tasks.`, - isReadOnly: true, + type: "all-of", + contains: [ + { + type: "Tools_CreateToolRequest", }, - type: { - type: "all-of", - description: `Whether this tool is a \`function\`, \`api_call\`, \`system\` etc. (Only \`function\` tool supported right now)`, - contains: [ - { - type: "Tools_ToolType", + { + properties: { + inherited: { + type: "boolean", + description: `Read-only: Whether the tool was inherited or not. Only applies within tasks.`, + isReadOnly: true, }, - ], - isRequired: true, + }, }, - name: { - type: "all-of", - description: `Name of the tool (must be unique for this agent and a valid python identifier string )`, - contains: [ - { - type: "Common_validPythonIdentifier", - }, - ], - isRequired: true, - }, - function: { - type: "Tools_FunctionDef", - }, - integration: { - properties: {}, - }, - system: { - properties: {}, - }, - api_call: { - properties: {}, - }, - }, + ], } as const; diff --git a/sdks/ts/src/api/schemas/$Tasks_ToolCallStep.ts b/sdks/ts/src/api/schemas/$Tasks_ToolCallStep.ts index 37f6b0179..9be4582a1 100644 --- a/sdks/ts/src/api/schemas/$Tasks_ToolCallStep.ts +++ b/sdks/ts/src/api/schemas/$Tasks_ToolCallStep.ts @@ -6,12 +6,19 @@ export const $Tasks_ToolCallStep = { type: "all-of", contains: [ { - type: "Tasks_BaseWorkflowStep", + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, }, { properties: { kind_: { type: "Enum", + isReadOnly: true, isRequired: true, }, tool: { @@ -25,10 +32,19 @@ export const $Tasks_ToolCallStep = { isRequired: true, }, arguments: { - type: "dictionary", - contains: { - properties: {}, - }, + type: "any-of", + description: `The input parameters for the tool (defaults to last step output)`, + contains: [ + { + type: "dictionary", + contains: { + type: "Common_PyExpression", + }, + }, + { + type: "Enum", + }, + ], isRequired: true, }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_UpdateTaskRequest.ts b/sdks/ts/src/api/schemas/$Tasks_UpdateTaskRequest.ts index 5127d8f62..84255c8ca 100644 --- a/sdks/ts/src/api/schemas/$Tasks_UpdateTaskRequest.ts +++ b/sdks/ts/src/api/schemas/$Tasks_UpdateTaskRequest.ts @@ -16,20 +16,128 @@ export const $Tasks_UpdateTaskRequest = { type: "Tasks_ToolCallStep", }, { - type: "Tasks_YieldStep", + type: "Tasks_PromptStep", }, { - type: "Tasks_PromptStep", + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + { + type: "Tasks_ReturnStep", + }, + { + type: "Tasks_SleepStep", }, { type: "Tasks_ErrorWorkflowStep", }, + { + type: "Tasks_YieldStep", + }, { type: "Tasks_WaitForInputStep", }, { type: "Tasks_IfElseWorkflowStep", }, + { + type: "Tasks_SwitchStep", + }, + { + type: "Tasks_ForeachStep", + }, + { + type: "Tasks_ParallelStep", + }, + { + type: "all-of", + contains: [ + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, + }, + { + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + over: { + type: "all-of", + description: `The variable to iterate over`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + isRequired: true, + }, + map: { + type: "any-of", + description: `The steps to run for each iteration`, + contains: [ + { + type: "Tasks_EvaluateStep", + }, + { + type: "Tasks_ToolCallStep", + }, + { + type: "Tasks_PromptStep", + }, + { + type: "Tasks_GetStep", + }, + { + type: "Tasks_SetStep", + }, + { + type: "Tasks_LogStep", + }, + { + type: "Tasks_EmbedStep", + }, + { + type: "Tasks_SearchStep", + }, + ], + isRequired: true, + }, + reduce: { + type: "all-of", + description: `The expression to reduce the results. + If not provided, the results are collected and returned as a list. + A special parameter named \`results\` is the accumulator and \`_\` is the current value.`, + contains: [ + { + type: "Common_PyExpression", + }, + ], + }, + initial: { + properties: {}, + }, + }, + }, + ], + }, ], }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_BaseWorkflowStep.ts b/sdks/ts/src/api/schemas/$Tasks_WaitForInputInfo.ts similarity index 57% rename from sdks/ts/src/api/schemas/$Tasks_BaseWorkflowStep.ts rename to sdks/ts/src/api/schemas/$Tasks_WaitForInputInfo.ts index 71375552d..81526c6a1 100644 --- a/sdks/ts/src/api/schemas/$Tasks_BaseWorkflowStep.ts +++ b/sdks/ts/src/api/schemas/$Tasks_WaitForInputInfo.ts @@ -2,10 +2,13 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export const $Tasks_BaseWorkflowStep = { +export const $Tasks_WaitForInputInfo = { properties: { - kind_: { - type: "Enum", + info: { + type: "dictionary", + contains: { + type: "Common_PyExpression", + }, isRequired: true, }, }, diff --git a/sdks/ts/src/api/schemas/$Tasks_WaitForInputStep.ts b/sdks/ts/src/api/schemas/$Tasks_WaitForInputStep.ts index 7e61ce760..91a9e7bc9 100644 --- a/sdks/ts/src/api/schemas/$Tasks_WaitForInputStep.ts +++ b/sdks/ts/src/api/schemas/$Tasks_WaitForInputStep.ts @@ -6,26 +6,27 @@ export const $Tasks_WaitForInputStep = { type: "all-of", contains: [ { - type: "Tasks_BaseWorkflowStep", + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, }, { properties: { kind_: { type: "Enum", + isReadOnly: true, isRequired: true, }, - info: { - type: "any-of", + wait_for_input: { + type: "all-of", description: `Any additional info or data`, contains: [ { - type: "string", - }, - { - type: "dictionary", - contains: { - properties: {}, - }, + type: "Tasks_WaitForInputInfo", }, ], isRequired: true, diff --git a/sdks/ts/src/api/schemas/$Tasks_YieldStep.ts b/sdks/ts/src/api/schemas/$Tasks_YieldStep.ts index 778bdeb90..b0ad18252 100644 --- a/sdks/ts/src/api/schemas/$Tasks_YieldStep.ts +++ b/sdks/ts/src/api/schemas/$Tasks_YieldStep.ts @@ -6,24 +6,41 @@ export const $Tasks_YieldStep = { type: "all-of", contains: [ { - type: "Tasks_BaseWorkflowStep", + properties: { + kind_: { + type: "Enum", + isReadOnly: true, + isRequired: true, + }, + }, }, { properties: { kind_: { type: "Enum", + isReadOnly: true, isRequired: true, }, workflow: { type: "string", - description: `The subworkflow to run`, + description: `The subworkflow to run. + VALIDATION: Should resolve to a defined subworkflow.`, isRequired: true, }, arguments: { - type: "dictionary", - contains: { - type: "Common_PyExpression", - }, + type: "any-of", + description: `The input parameters for the subworkflow (defaults to last step output)`, + contains: [ + { + type: "dictionary", + contains: { + type: "Common_PyExpression", + }, + }, + { + type: "Enum", + }, + ], isRequired: true, }, }, diff --git a/sdks/ts/src/api/schemas/$Tools_CreateToolRequest.ts b/sdks/ts/src/api/schemas/$Tools_CreateToolRequest.ts new file mode 100644 index 000000000..bea3b4740 --- /dev/null +++ b/sdks/ts/src/api/schemas/$Tools_CreateToolRequest.ts @@ -0,0 +1,41 @@ +/* generated using openapi-typescript-codegen -- do no edit */ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Tools_CreateToolRequest = { + description: `Payload for creating a tool`, + properties: { + type: { + type: "all-of", + description: `Whether this tool is a \`function\`, \`api_call\`, \`system\` etc. (Only \`function\` tool supported right now)`, + contains: [ + { + type: "Tools_ToolType", + }, + ], + isRequired: true, + }, + name: { + type: "all-of", + description: `Name of the tool (must be unique for this agent and a valid python identifier string )`, + contains: [ + { + type: "Common_validPythonIdentifier", + }, + ], + isRequired: true, + }, + function: { + type: "Tools_FunctionDef", + }, + integration: { + properties: {}, + }, + system: { + properties: {}, + }, + api_call: { + properties: {}, + }, + }, +} as const; diff --git a/sdks/ts/src/api/schemas/$Tools_FunctionDef.ts b/sdks/ts/src/api/schemas/$Tools_FunctionDef.ts index 0d9e5f2b3..57fba7cdc 100644 --- a/sdks/ts/src/api/schemas/$Tools_FunctionDef.ts +++ b/sdks/ts/src/api/schemas/$Tools_FunctionDef.ts @@ -6,13 +6,9 @@ export const $Tools_FunctionDef = { description: `Function definition`, properties: { name: { - type: "all-of", description: `DO NOT USE: This will be overriden by the tool name. Here only for compatibility reasons.`, - contains: [ - { - type: "Common_validPythonIdentifier", - }, - ], + properties: {}, + isNullable: true, }, description: { type: "all-of", @@ -28,7 +24,6 @@ export const $Tools_FunctionDef = { contains: { properties: {}, }, - isRequired: true, }, }, } as const; diff --git a/sdks/ts/src/api/schemas/$Tools_FunctionDefUpdate.ts b/sdks/ts/src/api/schemas/$Tools_FunctionDefUpdate.ts deleted file mode 100644 index c79a3478e..000000000 --- a/sdks/ts/src/api/schemas/$Tools_FunctionDefUpdate.ts +++ /dev/null @@ -1,33 +0,0 @@ -/* generated using openapi-typescript-codegen -- do no edit */ -/* istanbul ignore file */ -/* tslint:disable */ -/* eslint-disable */ -export const $Tools_FunctionDefUpdate = { - description: `Function definition`, - properties: { - name: { - type: "all-of", - description: `DO NOT USE: This will be overriden by the tool name. Here only for compatibility reasons.`, - contains: [ - { - type: "Common_validPythonIdentifier", - }, - ], - }, - description: { - type: "all-of", - description: `Description of the function`, - contains: [ - { - type: "Common_identifierSafeUnicode", - }, - ], - }, - parameters: { - type: "dictionary", - contains: { - properties: {}, - }, - }, - }, -} as const; diff --git a/sdks/ts/src/api/schemas/$Tools_PatchToolRequest.ts b/sdks/ts/src/api/schemas/$Tools_PatchToolRequest.ts index 4170be793..316eccd14 100644 --- a/sdks/ts/src/api/schemas/$Tools_PatchToolRequest.ts +++ b/sdks/ts/src/api/schemas/$Tools_PatchToolRequest.ts @@ -24,7 +24,7 @@ export const $Tools_PatchToolRequest = { ], }, function: { - type: "Tools_FunctionDefUpdate", + type: "Tools_FunctionDef", }, integration: { properties: {}, diff --git a/sdks/ts/src/api/schemas/$Users_CreateOrUpdateUserRequest_id.ts b/sdks/ts/src/api/schemas/$Users_CreateOrUpdateUserRequest.ts similarity index 75% rename from sdks/ts/src/api/schemas/$Users_CreateOrUpdateUserRequest_id.ts rename to sdks/ts/src/api/schemas/$Users_CreateOrUpdateUserRequest.ts index 47f504e4c..3edb83c22 100644 --- a/sdks/ts/src/api/schemas/$Users_CreateOrUpdateUserRequest_id.ts +++ b/sdks/ts/src/api/schemas/$Users_CreateOrUpdateUserRequest.ts @@ -2,6 +2,6 @@ /* istanbul ignore file */ /* tslint:disable */ /* eslint-disable */ -export const $Users_CreateOrUpdateUserRequest_id = { +export const $Users_CreateOrUpdateUserRequest = { type: "Common_uuid", } as const; diff --git a/sdks/ts/src/api/services/DefaultService.ts b/sdks/ts/src/api/services/DefaultService.ts index 2c8b9be9a..9e9540ed9 100644 --- a/sdks/ts/src/api/services/DefaultService.ts +++ b/sdks/ts/src/api/services/DefaultService.ts @@ -6,27 +6,24 @@ import type { Agents_Agent } from "../models/Agents_Agent"; import type { Agents_CreateAgentRequest } from "../models/Agents_CreateAgentRequest"; import type { Agents_PatchAgentRequest } from "../models/Agents_PatchAgentRequest"; import type { Agents_UpdateAgentRequest } from "../models/Agents_UpdateAgentRequest"; +import type { Chat_ChatInput } from "../models/Chat_ChatInput"; import type { Chat_ChunkChatResponse } from "../models/Chat_ChunkChatResponse"; -import type { Chat_CompletionResponseFormat } from "../models/Chat_CompletionResponseFormat"; -import type { Chat_GenerationPreset } from "../models/Chat_GenerationPreset"; import type { Chat_MessageChatResponse } from "../models/Chat_MessageChatResponse"; -import type { Common_identifierSafeUnicode } from "../models/Common_identifierSafeUnicode"; import type { Common_limit } from "../models/Common_limit"; -import type { Common_logit_bias } from "../models/Common_logit_bias"; import type { Common_offset } from "../models/Common_offset"; import type { Common_ResourceCreatedResponse } from "../models/Common_ResourceCreatedResponse"; import type { Common_ResourceDeletedResponse } from "../models/Common_ResourceDeletedResponse"; import type { Common_ResourceUpdatedResponse } from "../models/Common_ResourceUpdatedResponse"; import type { Common_uuid } from "../models/Common_uuid"; +import type { Docs_CreateDocRequest } from "../models/Docs_CreateDocRequest"; import type { Docs_Doc } from "../models/Docs_Doc"; -import type { Docs_DocReference } from "../models/Docs_DocReference"; +import type { Docs_DocSearchResponse } from "../models/Docs_DocSearchResponse"; import type { Docs_EmbedQueryRequest } from "../models/Docs_EmbedQueryRequest"; import type { Docs_EmbedQueryResponse } from "../models/Docs_EmbedQueryResponse"; import type { Docs_HybridDocSearchRequest } from "../models/Docs_HybridDocSearchRequest"; import type { Docs_TextOnlyDocSearchRequest } from "../models/Docs_TextOnlyDocSearchRequest"; import type { Docs_VectorDocSearchRequest } from "../models/Docs_VectorDocSearchRequest"; import type { Entries_History } from "../models/Entries_History"; -import type { Entries_InputChatMLMessage } from "../models/Entries_InputChatMLMessage"; import type { Executions_CreateExecutionRequest } from "../models/Executions_CreateExecutionRequest"; import type { Executions_Execution } from "../models/Executions_Execution"; import type { Executions_TaskTokenResumeExecutionRequest } from "../models/Executions_TaskTokenResumeExecutionRequest"; @@ -41,8 +38,6 @@ import type { Tasks_CreateTaskRequest } from "../models/Tasks_CreateTaskRequest" import type { Tasks_PatchTaskRequest } from "../models/Tasks_PatchTaskRequest"; import type { Tasks_Task } from "../models/Tasks_Task"; import type { Tasks_UpdateTaskRequest } from "../models/Tasks_UpdateTaskRequest"; -import type { Tools_FunctionTool } from "../models/Tools_FunctionTool"; -import type { Tools_NamedToolChoice } from "../models/Tools_NamedToolChoice"; import type { Tools_PatchToolRequest } from "../models/Tools_PatchToolRequest"; import type { Tools_Tool } from "../models/Tools_Tool"; import type { Tools_UpdateToolRequest } from "../models/Tools_UpdateToolRequest"; @@ -87,7 +82,7 @@ export class DefaultService { */ metadataFilter?: string; }): CancelablePromise<{ - results: Array; + items: Array; }> { return this.httpRequest.request({ method: "GET", @@ -287,19 +282,66 @@ export class DefaultService { }, }); } + /** + * Create a Doc for this Agent + * @returns Common_ResourceCreatedResponse The request has succeeded and a new resource has been created as a result. + * @throws ApiError + */ + public agentDocsRouteCreate({ + id, + requestBody, + }: { + /** + * ID of parent resource + */ + id: Common_uuid; + requestBody: Docs_CreateDocRequest; + }): CancelablePromise { + return this.httpRequest.request({ + method: "POST", + url: "/agents/{id}/docs", + path: { + id: id, + }, + body: requestBody, + mediaType: "application/json", + }); + } + /** + * Delete a Doc for this Agent + * @returns Common_ResourceDeletedResponse The request has been accepted for processing, but processing has not yet completed. + * @throws ApiError + */ + public agentDocsRouteDelete({ + id, + childId, + }: { + /** + * ID of parent resource + */ + id: Common_uuid; + /** + * ID of the resource to be deleted + */ + childId: Common_uuid; + }): CancelablePromise { + return this.httpRequest.request({ + method: "DELETE", + url: "/agents/{id}/docs/{child_id}", + path: { + id: id, + child_id: childId, + }, + }); + } /** * Search Docs owned by an Agent - * @returns any The request has succeeded. + * @returns Docs_DocSearchResponse The request has succeeded. * @throws ApiError */ public agentsDocsSearchRouteSearch({ id, requestBody, - limit = 100, - offset, - sortBy = "created_at", - direction = "asc", - metadataFilter = "{}", }: { /** * ID of the parent @@ -311,42 +353,13 @@ export class DefaultService { | Docs_TextOnlyDocSearchRequest | Docs_HybridDocSearchRequest; }; - /** - * Limit the number of items returned - */ - limit?: Common_limit; - /** - * Offset the items returned - */ - offset: Common_offset; - /** - * Sort by a field - */ - sortBy?: "created_at" | "updated_at"; - /** - * Sort direction - */ - direction?: "asc" | "desc"; - /** - * JSON string of object that should be used to filter objects by metadata - */ - metadataFilter?: string; - }): CancelablePromise<{ - results: Array; - }> { + }): CancelablePromise { return this.httpRequest.request({ method: "POST", url: "/agents/{id}/search", path: { id: id, }, - query: { - limit: limit, - offset: offset, - sort_by: sortBy, - direction: direction, - metadata_filter: metadataFilter, - }, body: requestBody, mediaType: "application/json", }); @@ -408,7 +421,7 @@ export class DefaultService { } /** * Create a new task - * @returns Common_ResourceCreatedResponse The request has succeeded and a new resource has been created as a result. + * @returns Common_ResourceCreatedResponse The request has succeeded. * @throws ApiError */ public tasksRouteCreate({ @@ -691,7 +704,7 @@ export class DefaultService { } /** * Create or update a task - * @returns Common_ResourceUpdatedResponse The request has succeeded. + * @returns Common_ResourceUpdatedResponse The request has succeeded and a new resource has been created as a result. * @throws ApiError */ public tasksCreateOrUpdateRouteCreateOrUpdate({ @@ -700,7 +713,7 @@ export class DefaultService { requestBody, }: { /** - * ID of parent resource + * ID of the agent */ parentId: Common_uuid; id: Common_uuid; @@ -738,27 +751,6 @@ export class DefaultService { }, }); } - /** - * Delete an existing Doc by id - * @returns Common_ResourceDeletedResponse The request has been accepted for processing, but processing has not yet completed. - * @throws ApiError - */ - public individualDocsRouteDelete({ - id, - }: { - /** - * ID of the resource - */ - id: Common_uuid; - }): CancelablePromise { - return this.httpRequest.request({ - method: "DELETE", - url: "/docs/{id}", - path: { - id: id, - }, - }); - } /** * Embed a query for search * @returns Docs_EmbedQueryResponse The request has succeeded. @@ -778,6 +770,34 @@ export class DefaultService { mediaType: "application/json", }); } + /** + * Resume an execution with a task token + * @returns Common_ResourceUpdatedResponse The request has succeeded. + * @throws ApiError + */ + public executionsRouteResumeWithTaskToken({ + taskToken, + requestBody, + }: { + /** + * A Task Token is a unique identifier for a specific Task Execution. + */ + taskToken: string; + /** + * Request to resume an execution with a task token + */ + requestBody: Executions_TaskTokenResumeExecutionRequest; + }): CancelablePromise { + return this.httpRequest.request({ + method: "POST", + url: "/executions", + query: { + task_token: taskToken, + }, + body: requestBody, + mediaType: "application/json", + }); + } /** * Get an Execution by id * @returns Executions_Execution The request has succeeded. @@ -799,6 +819,31 @@ export class DefaultService { }, }); } + /** + * Update an existing Execution + * @returns Common_ResourceUpdatedResponse The request has succeeded. + * @throws ApiError + */ + public executionsRouteUpdate({ + id, + requestBody, + }: { + /** + * ID of the resource + */ + id: Common_uuid; + requestBody: Executions_UpdateExecutionRequest; + }): CancelablePromise { + return this.httpRequest.request({ + method: "PUT", + url: "/executions/{id}", + path: { + id: id, + }, + body: requestBody, + mediaType: "application/json", + }); + } /** * List the Transitions of an Execution by id * @returns any The request has succeeded. @@ -910,7 +955,7 @@ export class DefaultService { */ metadataFilter?: string; }): CancelablePromise<{ - results: Array; + items: Array; }> { return this.httpRequest.request({ method: "GET", @@ -1071,221 +1116,7 @@ export class DefaultService { /** * Request to generate a response from the model */ - requestBody: - | { - /** - * A list of new input messages comprising the conversation so far. - */ - messages: Array; - /** - * (Advanced) List of tools that are provided in addition to agent's default set of tools. - */ - tools?: Array; - /** - * Can be one of existing tools given to the agent earlier or the ones provided in this request. - */ - tool_choice?: "auto" | "none" | Tools_NamedToolChoice; - /** - * Whether previous memories should be recalled or not (will be enabled in a future release) - */ - readonly recall: boolean; - /** - * Whether this interaction should form new memories or not (will be enabled in a future release) - */ - readonly remember: boolean; - /** - * Whether this interaction should be stored in the session history or not - */ - save: boolean; - /** - * Identifier of the model to be used - */ - model?: Common_identifierSafeUnicode; - /** - * Indicates if the server should stream the response as it's generated - */ - stream: boolean; - /** - * Up to 4 sequences where the API will stop generating further tokens. - */ - stop?: Array; - /** - * If specified, the system will make a best effort to sample deterministically for that particular seed value - */ - seed?: number; - /** - * The maximum number of tokens to generate in the chat completion - */ - max_tokens?: number; - /** - * Modify the likelihood of specified tokens appearing in the completion - */ - logit_bias?: Record; - /** - * Response format (set to `json_object` to restrict output to JSON) - */ - response_format?: Chat_CompletionResponseFormat; - /** - * Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - */ - agent?: Common_uuid; - /** - * Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) - */ - preset?: Chat_GenerationPreset; - } - | { - /** - * A list of new input messages comprising the conversation so far. - */ - messages: Array; - /** - * (Advanced) List of tools that are provided in addition to agent's default set of tools. - */ - tools?: Array; - /** - * Can be one of existing tools given to the agent earlier or the ones provided in this request. - */ - tool_choice?: "auto" | "none" | Tools_NamedToolChoice; - /** - * Whether previous memories should be recalled or not (will be enabled in a future release) - */ - readonly recall: boolean; - /** - * Whether this interaction should form new memories or not (will be enabled in a future release) - */ - readonly remember: boolean; - /** - * Whether this interaction should be stored in the session history or not - */ - save: boolean; - /** - * Identifier of the model to be used - */ - model?: Common_identifierSafeUnicode; - /** - * Indicates if the server should stream the response as it's generated - */ - stream: boolean; - /** - * Up to 4 sequences where the API will stop generating further tokens. - */ - stop?: Array; - /** - * If specified, the system will make a best effort to sample deterministically for that particular seed value - */ - seed?: number; - /** - * The maximum number of tokens to generate in the chat completion - */ - max_tokens?: number; - /** - * Modify the likelihood of specified tokens appearing in the completion - */ - logit_bias?: Record; - /** - * Response format (set to `json_object` to restrict output to JSON) - */ - response_format?: Chat_CompletionResponseFormat; - /** - * Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - */ - agent?: Common_uuid; - /** - * Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - */ - frequency_penalty?: number; - /** - * Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - */ - presence_penalty?: number; - /** - * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - */ - temperature?: number; - /** - * Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - */ - top_p?: number; - } - | { - /** - * A list of new input messages comprising the conversation so far. - */ - messages: Array; - /** - * (Advanced) List of tools that are provided in addition to agent's default set of tools. - */ - tools?: Array; - /** - * Can be one of existing tools given to the agent earlier or the ones provided in this request. - */ - tool_choice?: "auto" | "none" | Tools_NamedToolChoice; - /** - * Whether previous memories should be recalled or not (will be enabled in a future release) - */ - readonly recall: boolean; - /** - * Whether this interaction should form new memories or not (will be enabled in a future release) - */ - readonly remember: boolean; - /** - * Whether this interaction should be stored in the session history or not - */ - save: boolean; - /** - * Identifier of the model to be used - */ - model?: Common_identifierSafeUnicode; - /** - * Indicates if the server should stream the response as it's generated - */ - stream: boolean; - /** - * Up to 4 sequences where the API will stop generating further tokens. - */ - stop?: Array; - /** - * If specified, the system will make a best effort to sample deterministically for that particular seed value - */ - seed?: number; - /** - * The maximum number of tokens to generate in the chat completion - */ - max_tokens?: number; - /** - * Modify the likelihood of specified tokens appearing in the completion - */ - logit_bias?: Record; - /** - * Response format (set to `json_object` to restrict output to JSON) - */ - response_format?: Chat_CompletionResponseFormat; - /** - * Agent ID of the agent to use for this interaction. (Only applicable for multi-agent sessions) - */ - agent?: Common_uuid; - /** - * Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - */ - repetition_penalty?: number; - /** - * Number between 0 and 2.0. 1.0 is neutral and values larger than that penalize number of tokens generated. - */ - length_penalty?: number; - /** - * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. - */ - temperature?: number; - /** - * Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. - */ - top_p?: number; - /** - * Minimum probability compared to leading token to be considered - */ - min_p?: number; - }; + requestBody: Chat_ChatInput; }): CancelablePromise { return this.httpRequest.request({ method: "POST", @@ -1325,16 +1156,11 @@ export class DefaultService { */ public historyRouteHistory({ id, - limit = 100, }: { /** * ID of parent */ id: Common_uuid; - /** - * Limit the number of items returned - */ - limit?: Common_limit; }): CancelablePromise { return this.httpRequest.request({ method: "GET", @@ -1342,9 +1168,6 @@ export class DefaultService { path: { id: id, }, - query: { - limit: limit, - }, }); } /** @@ -1427,65 +1250,6 @@ export class DefaultService { }, }); } - /** - * Resume an execution with a task token - * @returns Common_ResourceUpdatedResponse The request has succeeded. - * @throws ApiError - */ - public taskExecutionsRouteResumeWithTaskToken({ - id, - requestBody, - }: { - /** - * ID of parent Task - */ - id: Common_uuid; - /** - * Request to resume an execution with a task token - */ - requestBody: Executions_TaskTokenResumeExecutionRequest; - }): CancelablePromise { - return this.httpRequest.request({ - method: "PUT", - url: "/tasks/{id}/executions", - path: { - id: id, - }, - body: requestBody, - mediaType: "application/json", - }); - } - /** - * Update an existing Execution - * @returns Common_ResourceUpdatedResponse The request has succeeded. - * @throws ApiError - */ - public taskExecutionsRouteUpdate({ - id, - childId, - requestBody, - }: { - /** - * ID of parent resource - */ - id: Common_uuid; - /** - * ID of the resource to be updated - */ - childId: Common_uuid; - requestBody: Executions_UpdateExecutionRequest; - }): CancelablePromise { - return this.httpRequest.request({ - method: "PUT", - url: "/tasks/{id}/executions/{child_id}", - path: { - id: id, - child_id: childId, - }, - body: requestBody, - mediaType: "application/json", - }); - } /** * List users (paginated) * @returns any The request has succeeded. @@ -1519,7 +1283,7 @@ export class DefaultService { */ metadataFilter?: string; }): CancelablePromise<{ - results: Array; + items: Array; }> { return this.httpRequest.request({ method: "GET", @@ -1560,7 +1324,7 @@ export class DefaultService { requestBody, }: { id: Common_uuid; - requestBody: Users_UpdateUserRequest; + requestBody: Users_CreateUserRequest; }): CancelablePromise { return this.httpRequest.request({ method: "POST", @@ -1719,19 +1483,66 @@ export class DefaultService { }, }); } + /** + * Create a Doc for this User + * @returns Common_ResourceCreatedResponse The request has succeeded and a new resource has been created as a result. + * @throws ApiError + */ + public userDocsRouteCreate({ + id, + requestBody, + }: { + /** + * ID of parent resource + */ + id: Common_uuid; + requestBody: Docs_CreateDocRequest; + }): CancelablePromise { + return this.httpRequest.request({ + method: "POST", + url: "/users/{id}/docs", + path: { + id: id, + }, + body: requestBody, + mediaType: "application/json", + }); + } + /** + * Delete a Doc for this User + * @returns Common_ResourceDeletedResponse The request has been accepted for processing, but processing has not yet completed. + * @throws ApiError + */ + public userDocsRouteDelete({ + id, + childId, + }: { + /** + * ID of parent resource + */ + id: Common_uuid; + /** + * ID of the resource to be deleted + */ + childId: Common_uuid; + }): CancelablePromise { + return this.httpRequest.request({ + method: "DELETE", + url: "/users/{id}/docs/{child_id}", + path: { + id: id, + child_id: childId, + }, + }); + } /** * Search Docs owned by a User - * @returns any The request has succeeded. + * @returns Docs_DocSearchResponse The request has succeeded. * @throws ApiError */ public userDocsSearchRouteSearch({ id, requestBody, - limit = 100, - offset, - sortBy = "created_at", - direction = "asc", - metadataFilter = "{}", }: { /** * ID of the parent @@ -1743,42 +1554,13 @@ export class DefaultService { | Docs_TextOnlyDocSearchRequest | Docs_HybridDocSearchRequest; }; - /** - * Limit the number of items returned - */ - limit?: Common_limit; - /** - * Offset the items returned - */ - offset: Common_offset; - /** - * Sort by a field - */ - sortBy?: "created_at" | "updated_at"; - /** - * Sort direction - */ - direction?: "asc" | "desc"; - /** - * JSON string of object that should be used to filter objects by metadata - */ - metadataFilter?: string; - }): CancelablePromise<{ - results: Array; - }> { + }): CancelablePromise { return this.httpRequest.request({ method: "POST", url: "/users/{id}/search", path: { id: id, }, - query: { - limit: limit, - offset: offset, - sort_by: sortBy, - direction: direction, - metadata_filter: metadataFilter, - }, body: requestBody, mediaType: "application/json", }); diff --git a/typespec/agents/models.tsp b/typespec/agents/models.tsp index 3d8642d2e..17f43691b 100644 --- a/typespec/agents/models.tsp +++ b/typespec/agents/models.tsp @@ -20,8 +20,7 @@ model Agent { ...HasTimestamps; /** Name of the agent */ - @maxLength(120) - name: identifierSafeUnicode = ""; + name: identifierSafeUnicode = identifierSafeUnicode(""); /** About the agent */ about: string = ""; @@ -46,11 +45,12 @@ model UpdateAgentRequest { model PatchAgentRequest is UpdateAgentRequest {} /** Payload for creating a agent (and associated documents) */ +@withVisibility("create") model CreateAgentRequest { ...UpdateAgentRequest; } -model CreateOrUpdateAgentRequest { +model CreateOrUpdateAgentRequest extends CreateAgentRequest { @path id: uuid; ...UpdateAgentRequest; diff --git a/typespec/chat/endpoints.tsp b/typespec/chat/endpoints.tsp index 232a44ab0..64eb6700d 100644 --- a/typespec/chat/endpoints.tsp +++ b/typespec/chat/endpoints.tsp @@ -19,8 +19,6 @@ interface Endpoints { @post @doc("Generate a response from the model") generate( - @header contentType: yaml | json; - @path @doc("The session ID") id: uuid; diff --git a/typespec/chat/models.tsp b/typespec/chat/models.tsp index a90ad47a5..f52dae04c 100644 --- a/typespec/chat/models.tsp +++ b/typespec/chat/models.tsp @@ -18,19 +18,6 @@ namespace Chat; // CHAT MODELS // -/** Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) */ -enum GenerationPreset { - problem_solving, - conversational, - fun, - prose, - creative, - business, - deterministic, - code, - multilingual, -} - /** * The reason the model stopped generating tokens. This will be `stop` * if the model hit a natural stop point or a provided stop sequence, @@ -47,14 +34,13 @@ enum FinishReason { /** Determines how the session accesses history and memories */ model MemoryAccessOptions { - /** Whether previous memories should be recalled or not (will be enabled in a future release) */ - @visibility("read") // DISABLED - recall: boolean = false; - - /** Whether this interaction should form new memories or not (will be enabled in a future release) */ + /** DISABLED: Whether this interaction should form new memories or not (will be enabled in a future release) */ @visibility("read") // DISABLED remember: boolean = false; + /** Whether previous memories and docs should be recalled or not */ + recall: boolean = true; + /** Whether this interaction should be stored in the session history or not */ save: boolean = true; } @@ -95,11 +81,6 @@ model CommonChatSettings { agent?: uuid; } -model GenerationPresetSettings { - /** Generation preset (one of: problem_solving, conversational, fun, prose, creative, business, deterministic, code, multilingual) */ - preset?: GenerationPreset; -} - model OpenAISettings { /** Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. */ @minValue(-2) @@ -133,16 +114,6 @@ model vLLMSettings { @maxValue(2) length_penalty?: float32; - /** What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. */ - @minValue(0) - @maxValue(5) - temperature?: float32; - - /** Defaults to 1 An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both. */ - @minValue(0) - @maxValue(1) - top_p?: float32; - /** Minimum probability compared to leading token to be considered */ @minValue(0) @maxValue(1) @@ -150,29 +121,27 @@ model vLLMSettings { } /** Default settings for the chat session (also used by the agent) */ -alias DefaultChatSettings = GenerationPresetSettings | OpenAISettings | vLLMSettings; +model DefaultChatSettings extends OpenAISettings { + ...vLLMSettings; +} -alias ChatSettings = ( - CommonChatSettings & GenerationPresetSettings -) | ( - CommonChatSettings & OpenAISettings -) | ( - CommonChatSettings & vLLMSettings -); +model ChatSettings extends DefaultChatSettings { + ...CommonChatSettings; +} /** Usage statistics for the completion request */ model CompetionUsage { /** Number of tokens in the generated completion */ @visibility("read") - completion_tokens: uint32; + completion_tokens?: uint32; /** Number of tokens in the prompt */ @visibility("read") - prompt_tokens: uint32; + prompt_tokens?: uint32; /** Total number of tokens used in the request (prompt + completion) */ @visibility("read") - total_tokens: uint32; + total_tokens?: uint32; } model ChatInputData { @@ -188,13 +157,10 @@ model ChatInputData { tool_choice?: ToolChoiceOption; } -alias ChatInput = ( - ChatInputData & MemoryAccessOptions & CommonChatSettings & GenerationPresetSettings -) | ( - ChatInputData & MemoryAccessOptions & CommonChatSettings & OpenAISettings -) | ( - ChatInputData & MemoryAccessOptions & CommonChatSettings & vLLMSettings -); +model ChatInput extends ChatInputData { + ...MemoryAccessOptions; + ...ChatSettings; +} model BaseTokenLogProb { token: string; @@ -202,7 +168,7 @@ model BaseTokenLogProb { /** The log probability of the token */ logprob: float32; - bytes: uint16[] | null; + bytes?: uint16[]; } model TokenLogProb extends BaseTokenLogProb { @@ -221,17 +187,17 @@ model BaseChatOutput { finish_reason: FinishReason; /** The log probabilities of tokens */ - logprobs: LogProbResponse | null; + logprobs?: LogProbResponse; } /** The output returned by the model. Note that, depending on the model provider, they might return more than one message. */ model SingleChatOutput extends BaseChatOutput { - message: ChatMLMessage; + message: InputChatMLMessage; } /** The output returned by the model. Note that, depending on the model provider, they might return more than one message. */ model MultipleChatOutput extends BaseChatOutput { - messages: ChatMLMessage[]; + messages: InputChatMLMessage[]; } alias ChatOutput = SingleChatOutput | MultipleChatOutput; @@ -239,12 +205,12 @@ alias ChatOutput = SingleChatOutput | MultipleChatOutput; /** Streaming chat completion output */ model ChatOutputChunk extends BaseChatOutput { /** The message generated by the model */ - delta: ChatMLMessage; + delta: InputChatMLMessage; } model BaseChatResponse { /** Usage statistics for the completion request */ - usage: CompetionUsage | null; + usage?: CompetionUsage; /** Background job IDs that may have been spawned from this interaction. */ jobs: uuid[]; @@ -267,7 +233,7 @@ model MessageChatResponse extends BaseChatResponse { @header contentType: json; /** The deltas generated by the model */ - choices: ChatOutputChunk[]; + choices: ChatOutput[]; } -alias ChatResponse = ChunkChatResponse | MessageChatResponse; \ No newline at end of file +alias ChatResponse = ChunkChatResponse | MessageChatResponse; diff --git a/typespec/common/interfaces.tsp b/typespec/common/interfaces.tsp index 56beb423a..b7a15bf25 100644 --- a/typespec/common/interfaces.tsp +++ b/typespec/common/interfaces.tsp @@ -18,7 +18,7 @@ interface LimitOffsetPagination< @get @doc(DocString) list(...PaginationOptions): { - results: Type[]; + items: Type[]; }; } @@ -28,7 +28,7 @@ interface CreateEndpoint< > { @post @doc(DocString) - create(@header contentType: yaml | json, ...CreateType): { + create(...CreateType): { @statusCode _: "201"; @body @@ -43,7 +43,7 @@ interface CreateOrUpdateEndpoint< > { @post @doc(DocString) - createOrUpdate(@header contentType: yaml | json, ...CreateOrUpdateType): { + createOrUpdate(...CreateOrUpdateType): { @statusCode _: "200"; @body @@ -59,8 +59,6 @@ interface UpdateEndpoint< @put @doc(DocString) update( - @header contentType: yaml | json, - @path @doc("ID of the resource") id: uuid, @@ -82,8 +80,6 @@ interface PatchEndpoint< @patch @doc(DocString) patch( - @header contentType: yaml | json, - @path @doc("ID of the resource") id: uuid, @@ -151,8 +147,6 @@ interface ChildCreateEndpoint< @post @doc(DocString) create( - @header contentType: yaml | json, - @path @doc("ID of parent resource") id: uuid, @@ -174,8 +168,6 @@ interface ChildCreateOrUpdateEndpoint< @post @doc(DocString) createOrUpdate( - @header contentType: yaml | json, - @path @doc("ID of parent resource") parent_id: uuid, diff --git a/typespec/common/scalars.tsp b/typespec/common/scalars.tsp index 177f2ccd6..79eda2d99 100644 --- a/typespec/common/scalars.tsp +++ b/typespec/common/scalars.tsp @@ -12,11 +12,13 @@ scalar uuid extends string; * See: https://unicode.org/reports/tr31/ * See: https://www.unicode.org/reports/tr39/#Identifier_Characters */ +@maxLength(120) @pattern("^[\\p{L}\\p{Nl}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]+[\\p{ID_Start}\\p{Mn}\\p{Mc}\\p{Nd}\\p{Pc}\\p{Pattern_Syntax}\\p{Pattern_White_Space}]*$") scalar identifierSafeUnicode extends string; /** Valid python identifier names */ @pattern("^[^\\W0-9]\\w*$") +@maxLength(40) scalar validPythonIdentifier extends string; /** Limit the number of results */ @@ -51,4 +53,7 @@ alias entrySource = "api_request" | "api_response" | "tool_response" | "internal scalar toolRef extends string; /** A simple python expression compatible with SimpleEval. */ -scalar PyExpression extends string; \ No newline at end of file +scalar PyExpression extends string; + +/** A valid jinja template. */ +scalar JinjaTemplate extends string; \ No newline at end of file diff --git a/typespec/docs/endpoints.tsp b/typespec/docs/endpoints.tsp index 940d040b5..3600319c1 100644 --- a/typespec/docs/endpoints.tsp +++ b/typespec/docs/endpoints.tsp @@ -18,14 +18,17 @@ namespace Docs; // interface UserEndpoints - extends ChildLimitOffsetPagination {} + extends ChildLimitOffsetPagination, + ChildDeleteEndpoint<"Delete a Doc for this User">, + ChildCreateEndpoint {} interface AgentEndpoints - extends ChildLimitOffsetPagination {} + extends ChildLimitOffsetPagination, + ChildDeleteEndpoint<"Delete a Doc for this Agent">, + ChildCreateEndpoint {} interface IndividualDocEndpoints - extends GetEndpoint, - DeleteEndpoint<"Delete an existing Doc by id"> {} + extends GetEndpoint {} interface SearchEndpoints { @doc(DocString) @@ -35,17 +38,12 @@ interface SearchEndpoints pure BM25; 1 => pure vector; */ - @minValue(0) - @maxValue(1) - alpha: float = 0.75; - - /** Whether to include the MMR algorithm in the search. Optimizes for diversity in search results. */ - mmr: boolean = false; + @minValue(1) + @maxValue(100) + limit: uint16 = 10; /** The language to be used for text-only search. Support for other languages coming soon. */ lang: "en-US" = "en-US"; } model VectorDocSearchRequest extends BaseDocSearchRequest { - /** Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. */ - vector: float[] | float[][]; + /** The confidence cutoff level */ + @minValue(0) + @maxValue(1) + confidence: float = 0.5; + + /** Vector to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. */ + vector: float[]; text?: never; } model TextOnlyDocSearchRequest extends BaseDocSearchRequest { - /** Text or texts to use in the search. */ - text: string | string[]; + /** Text to use in the search. */ + text: string; vector?: never; } model HybridDocSearchRequest extends BaseDocSearchRequest { - /** Text or texts to use in the search. In `hybrid` search mode, either `text` or both `text` and `vector` fields are required. */ - text: string | string[]; + /** The confidence cutoff level */ + @minValue(0) + @maxValue(1) + confidence: float = 0.5; - /** Vector or vectors to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. */ - vector: float[] | float[][]; + /** The weight to apply to BM25 vs Vector search results. 0 => pure BM25; 1 => pure vector; */ + @minValue(0) + @maxValue(1) + alpha: float = 0.75; + + /** Text to use in the search. In `hybrid` search mode, either `text` or both `text` and `vector` fields are required. */ + text: string; + + /** Vector to use in the search. Must be the same dimensions as the embedding model or else an error will be thrown. */ + vector: float[]; } -alias DocSearchRequest = VectorDocSearchRequest | TextOnlyDocSearchRequest | HybridDocSearchRequest; \ No newline at end of file +alias DocSearchRequest = VectorDocSearchRequest | TextOnlyDocSearchRequest | HybridDocSearchRequest; + +model DocSearchResponse { + /** The documents that were found */ + docs: DocReference[]; + + /** The time taken to search in seconds */ + @minValueExclusive(0) + time: float; +} \ No newline at end of file diff --git a/typespec/entries/endpoints.tsp b/typespec/entries/endpoints.tsp index bcb9459b6..3a4a0335b 100644 --- a/typespec/entries/endpoints.tsp +++ b/typespec/entries/endpoints.tsp @@ -23,9 +23,5 @@ interface Endpoints @path @doc("ID of parent") id: uuid, - - @query - @doc("Limit the number of items returned") - limit: limit = 100, ): History; } diff --git a/typespec/entries/models.tsp b/typespec/entries/models.tsp index 1a614d449..b254fa1d1 100644 --- a/typespec/entries/models.tsp +++ b/typespec/entries/models.tsp @@ -22,7 +22,7 @@ enum ImageDetail { /** ChatML role (system|assistant|user|function_call|function|function_response|auto) */ enum ChatMLRole { user, - agent, + assistant, system, function, function_response, @@ -30,23 +30,22 @@ enum ChatMLRole { auto, } -model ImageURL { +model ImageURL { /** Image URL or base64 data url (e.g. `data:image/jpeg;base64,`) */ - @format("uri") - url: string; + url: T; /** The detail level of the image */ detail: ImageDetail = ImageDetail.auto; } -model ChatMLTextContentPart { - text: string; +model ChatMLTextContentPart { + text: T; /** The type (fixed to 'text') */ type: "text" = "text"; } -model ChatMLImageContentPart { +model ChatMLImageContentPart { /** The image URL */ image_url: ImageURL; @@ -54,14 +53,14 @@ model ChatMLImageContentPart { type: "image_url" = "image_url"; } -alias ChatMLContentPart = ChatMLTextContentPart | ChatMLImageContentPart; +alias ChatMLContentPart = ChatMLTextContentPart | ChatMLImageContentPart; -model ChatMLMessage { +model ChatMLMessage { /** The role of the message */ role: ChatMLRole; /** The content parts of the message */ - content: string | string[] | ChatMLContentPart[]; + content: T | T[] | ChatMLContentPart[]; /** Name */ name?: string; @@ -79,11 +78,11 @@ model ChatMLMessage { } @withVisibility("create") -model InputChatMLMessage { - ...ChatMLMessage; +model InputChatMLMessage { + ...ChatMLMessage; } -alias EntryContent = ChatMLContentPart[] | Tool | ChosenToolCall | string | ToolResponse; +alias EntryContent = ChatMLContentPart[] | Tool | ChosenToolCall | string | ToolResponse; model BaseEntry { role: ChatMLRole; @@ -91,8 +90,8 @@ model BaseEntry { content: EntryContent | EntryContent[]; source: entrySource; - tokenizer?: string; - token_count?: uint16; + tokenizer: string; + token_count: uint16; /** This is the time that this event refers to. */ @minValue(0) @@ -112,9 +111,9 @@ model Relation { } model History { - entries: BaseEntry[]; + entries: Entry[]; relations: Relation[]; session_id: Session.id; ...HasCreatedAt; -} \ No newline at end of file +} diff --git a/typespec/executions/endpoints.tsp b/typespec/executions/endpoints.tsp index 46ec49286..9b5820d9c 100644 --- a/typespec/executions/endpoints.tsp +++ b/typespec/executions/endpoints.tsp @@ -14,28 +14,20 @@ namespace Executions; // interface Endpoints - extends GetEndpoint {} - -interface TaskEndpoints - extends ChildUpdateEndpoint< - UpdateExecutionRequest, - "Update an existing Execution" - >, - ChildCreateEndpoint< - CreateExecutionRequest, - "Create an execution for the given task" - >, - ChildLimitOffsetPagination { - @put + extends GetEndpoint, + UpdateEndpoint {} + +interface PublicEndpoints { + @post + @useAuth([]) @doc("Resume an execution with a task token") - resumeWithTaskToken( - @header contentType: yaml | json, + op resumeWithTaskToken( + @header contentType: json, - @path - @doc("ID of parent Task") - id: uuid, + // Uses a query parameter `task_token` to get the task token + // See `TaskTokenResumeExecutionRequest` for more details - @body + @bodyRoot @doc("Request to resume an execution with a task token") body: TaskTokenResumeExecutionRequest, ): { @@ -47,6 +39,16 @@ interface TaskEndpoints }; } +interface TaskEndpoints + extends ChildCreateEndpoint< + CreateExecutionRequest, + "Create an execution for the given task" + >, + ChildLimitOffsetPagination< + Execution, + "List executions of the given task" + > {} + interface TransitionEndpoints extends ChildLimitOffsetPagination< { diff --git a/typespec/executions/models.tsp b/typespec/executions/models.tsp index bbc34028c..ffaa10dc5 100644 --- a/typespec/executions/models.tsp +++ b/typespec/executions/models.tsp @@ -1,6 +1,10 @@ +import "@typespec/http"; + import "../common"; import "../tasks"; +using TypeSpec.Http; + using Common; using Tasks; @@ -71,7 +75,7 @@ model StopExecutionRequest extends UpdateExecutionRequest { model ResumeExecutionRequest extends UpdateExecutionRequest { status: "running" = "running"; - + /** The input to resume the execution with */ input?: Record; } @@ -80,6 +84,7 @@ model TaskTokenResumeExecutionRequest { status: "running" = "running"; /** A Task Token is a unique identifier for a specific Task Execution. */ + @query task_token: string; /** The input to resume the execution with */ @@ -103,7 +108,10 @@ alias TransitionType = ( | "cancelled" ); -alias TransitionTarget = [validPythonIdentifier, uint16]; +model TransitionTarget { + workflow: identifierSafeUnicode; + step: uint16; +} model Transition { @visibility("read") @@ -113,7 +121,7 @@ model Transition { execution_id: Execution.id; @visibility("read") - output: Record; + output: unknown; @visibility("read") current: TransitionTarget; diff --git a/typespec/jobs/models.tsp b/typespec/jobs/models.tsp index 07663d005..e8050b083 100644 --- a/typespec/jobs/models.tsp +++ b/typespec/jobs/models.tsp @@ -24,8 +24,7 @@ model JobStatus { ...HasTimestamps; /** Name of the job */ - @maxLength(120) - name: identifierSafeUnicode = ""; + name: identifierSafeUnicode = identifierSafeUnicode(""); /** Reason for the current state of the job */ reason: string = ""; diff --git a/typespec/main.tsp b/typespec/main.tsp index b6213871e..4cf61f6fa 100644 --- a/typespec/main.tsp +++ b/typespec/main.tsp @@ -105,7 +105,7 @@ namespace Api { interface TaskExecutionsRoute extends Executions.TaskEndpoints {} @route("/executions") - interface ExecutionsRoute extends Executions.Endpoints {} + interface ExecutionsRoute extends Executions.Endpoints, Executions.PublicEndpoints {} @route("/executions/{id}/transitions") interface ExecutionTransitionsRoute extends Executions.TransitionEndpoints {} diff --git a/typespec/package-lock.json b/typespec/package-lock.json index f535be674..90269602f 100644 --- a/typespec/package-lock.json +++ b/typespec/package-lock.json @@ -8,12 +8,12 @@ "name": "julep-typespec", "version": "0.3.0", "dependencies": { - "@typespec/compiler": "^0.58.1", - "@typespec/http": "^0.58.0", - "@typespec/openapi": "^0.58.0", - "@typespec/openapi3": "^0.58.0", - "@typespec/rest": "^0.58.0", - "@typespec/versioning": "^0.58.0" + "@typespec/compiler": "^0.59.1", + "@typespec/http": "^0.59.1", + "@typespec/openapi": "^0.59.0", + "@typespec/openapi3": "^0.59.1", + "@typespec/rest": "^0.59.1", + "@typespec/versioning": "^0.59.0" } }, "node_modules/@apidevtools/swagger-methods": { @@ -274,20 +274,20 @@ "license": "MIT" }, "node_modules/@typespec/compiler": { - "version": "0.58.1", - "resolved": "https://registry.npmjs.org/@typespec/compiler/-/compiler-0.58.1.tgz", - "integrity": "sha512-bVxxM35r40OtuL4+/9W/g1EevlnWnW6i151nsZAFOJj1xWHoE2G9zkx5/Feic8OlzArjhGGLJOLH3Ez1Wrw35A==", + "version": "0.59.1", + "resolved": "https://registry.npmjs.org/@typespec/compiler/-/compiler-0.59.1.tgz", + "integrity": "sha512-O2ljgr6YoFaIH6a8lWc90/czdv4B2X6N9wz4WsnQnVvgO0Tj0s+3xkvp4Tv59RKMhT0f3fK6dL8oEGO32FYk1A==", "license": "MIT", "dependencies": { "@babel/code-frame": "~7.24.7", - "ajv": "~8.16.0", + "ajv": "~8.17.1", "change-case": "~5.4.4", "globby": "~14.0.2", "mustache": "~4.2.0", "picocolors": "~1.0.1", - "prettier": "~3.3.2", + "prettier": "~3.3.3", "prompts": "~2.4.2", - "semver": "^7.6.2", + "semver": "^7.6.3", "temporal-polyfill": "^0.2.5", "vscode-languageserver": "~9.0.1", "vscode-languageserver-textdocument": "~1.0.11", @@ -303,34 +303,34 @@ } }, "node_modules/@typespec/http": { - "version": "0.58.0", - "resolved": "https://registry.npmjs.org/@typespec/http/-/http-0.58.0.tgz", - "integrity": "sha512-jQpkugg9AZVrNDMkDIgZRpIoRkkU2b0LtKWqMGg33MItYj9/DYSgDtY7xb7oCBppRtFFZ/h138HyhYl3zQxZRg==", + "version": "0.59.1", + "resolved": "https://registry.npmjs.org/@typespec/http/-/http-0.59.1.tgz", + "integrity": "sha512-Ai8oCAO+Bw1HMSZ9gOI5Od4fNn/ul4HrVtTB01xFuLK6FQj854pxhzao8ylPnr7gIRQ327FV12/QfXR87yCiYQ==", "license": "MIT", "engines": { "node": ">=18.0.0" }, "peerDependencies": { - "@typespec/compiler": "~0.58.0" + "@typespec/compiler": "~0.59.0" } }, "node_modules/@typespec/openapi": { - "version": "0.58.0", - "resolved": "https://registry.npmjs.org/@typespec/openapi/-/openapi-0.58.0.tgz", - "integrity": "sha512-gu6nXfmpfZrfq8Etpgl1dpMfsXii7EzQyhZgsPhIy7ZwV5bDmFk1/oyhTqIpWrnr4pD3r151T2BQjzJefjf15A==", + "version": "0.59.0", + "resolved": "https://registry.npmjs.org/@typespec/openapi/-/openapi-0.59.0.tgz", + "integrity": "sha512-do1Dm5w0MuK3994gYTBg6qMfgeIxmmsDqnz3zimYKMPpbnUBi4F6/o4iCfn0Fn9kaNl+H6UlOzZpsZW9xHui1Q==", "license": "MIT", "engines": { "node": ">=18.0.0" }, "peerDependencies": { - "@typespec/compiler": "~0.58.0", - "@typespec/http": "~0.58.0" + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.0" } }, "node_modules/@typespec/openapi3": { - "version": "0.58.0", - "resolved": "https://registry.npmjs.org/@typespec/openapi3/-/openapi3-0.58.0.tgz", - "integrity": "sha512-G9t9CWT9cN6ip39dLZaE6JdEDxGsFyOUxA2s6a087rweoTH85XzsFiQL7uiUD8vHhXyEo6tF6sy3LMZVN0BsoQ==", + "version": "0.59.1", + "resolved": "https://registry.npmjs.org/@typespec/openapi3/-/openapi3-0.59.1.tgz", + "integrity": "sha512-89VbUbkWKxeFgE0w0hpVyk1UZ6ZHRxOhcAHvF5MgxQxEhs2ALXKAqapWjFQsYrLBhAUoWzdPFrJJUMbwF9kX0Q==", "license": "MIT", "dependencies": { "@readme/openapi-parser": "~2.6.0", @@ -343,47 +343,47 @@ "node": ">=18.0.0" }, "peerDependencies": { - "@typespec/compiler": "~0.58.0", - "@typespec/http": "~0.58.0", - "@typespec/openapi": "~0.58.0", - "@typespec/versioning": "~0.58.0" + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.1", + "@typespec/openapi": "~0.59.0", + "@typespec/versioning": "~0.59.0" } }, "node_modules/@typespec/rest": { - "version": "0.58.0", - "resolved": "https://registry.npmjs.org/@typespec/rest/-/rest-0.58.0.tgz", - "integrity": "sha512-QBxkED0/KQKG22pwzis0n7BY+uLMSZZPSoVe/ESBFika9n5/yyeQ0l58xbFFwwfxAxe4xwuZ5PNwTdEXZbzr5g==", + "version": "0.59.1", + "resolved": "https://registry.npmjs.org/@typespec/rest/-/rest-0.59.1.tgz", + "integrity": "sha512-uKU431jBYL2tVQWG5THA75+OtXDa1e8cMAafYK/JJRRiVRd8D/Epd8fp07dzlB8tFGrhCaGlekRMqFPFrHh2/A==", "license": "MIT", "engines": { "node": ">=18.0.0" }, "peerDependencies": { - "@typespec/compiler": "~0.58.0", - "@typespec/http": "~0.58.0" + "@typespec/compiler": "~0.59.0", + "@typespec/http": "~0.59.1" } }, "node_modules/@typespec/versioning": { - "version": "0.58.0", - "resolved": "https://registry.npmjs.org/@typespec/versioning/-/versioning-0.58.0.tgz", - "integrity": "sha512-brnQQ3wKWh4AbgqmnVLj+8zyOaDk9VPWg4QBecdQxzz7PrSrlAzIzRfeIyr67+hwi/0SvkTAB6GNH7YYTypKGA==", + "version": "0.59.0", + "resolved": "https://registry.npmjs.org/@typespec/versioning/-/versioning-0.59.0.tgz", + "integrity": "sha512-aihO/ux0lLmsuYAdGVkiBflSudcZokYG42SELk1FtMFo609G3Pd7ep7hau6unBnMIceQZejB0ow5UGRupK4X5A==", "license": "MIT", "engines": { "node": ">=18.0.0" }, "peerDependencies": { - "@typespec/compiler": "~0.58.0" + "@typespec/compiler": "~0.59.0" } }, "node_modules/ajv": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.16.0.tgz", - "integrity": "sha512-F0twR8U1ZU67JIEtekUcLkXkoO5mMMmgGD8sK/xUFzJ805jxHQl92hImFAqqXMyMYjSPOyUPAwHYhB72g5sTXw==", + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.4.1" + "require-from-string": "^2.0.2" }, "funding": { "type": "github", @@ -553,6 +553,12 @@ "node": ">=8.6.0" } }, + "node_modules/fast-uri": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.1.tgz", + "integrity": "sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw==", + "license": "MIT" + }, "node_modules/fastq": { "version": "1.17.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", @@ -838,15 +844,6 @@ "node": ">= 6" } }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -925,9 +922,9 @@ } }, "node_modules/semver": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", - "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -1031,15 +1028,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, "node_modules/vscode-jsonrpc": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", diff --git a/typespec/package.json b/typespec/package.json index 95c181471..a7b3d7ee4 100644 --- a/typespec/package.json +++ b/typespec/package.json @@ -3,12 +3,12 @@ "version": "0.3.0", "type": "module", "dependencies": { - "@typespec/compiler": "^0.58.1", - "@typespec/http": "^0.58.0", - "@typespec/openapi": "^0.58.0", - "@typespec/openapi3": "^0.58.0", - "@typespec/rest": "^0.58.0", - "@typespec/versioning": "^0.58.0" + "@typespec/compiler": "^0.59.1", + "@typespec/http": "^0.59.1", + "@typespec/openapi": "^0.59.0", + "@typespec/openapi3": "^0.59.1", + "@typespec/rest": "^0.59.1", + "@typespec/versioning": "^0.59.0" }, "private": true } diff --git a/typespec/sessions/models.tsp b/typespec/sessions/models.tsp index ed58f55c6..27c1179d9 100644 --- a/typespec/sessions/models.tsp +++ b/typespec/sessions/models.tsp @@ -54,7 +54,7 @@ model Session { summary: string | null = null; /** Render system and assistant message content as jinja templates */ - render_templates: boolean = false; + render_templates: boolean = true; /** Threshold value for the adaptive context functionality */ token_budget: uint16 | null = null; @@ -150,7 +150,7 @@ model CreateSessionRequest { } @withVisibility("create", "update") -model CreateOrUpdateSessionRequest { +model CreateOrUpdateSessionRequest extends CreateSessionRequest { @path id: uuid; diff --git a/typespec/tasks/endpoints.tsp b/typespec/tasks/endpoints.tsp index 73f50e457..2d9ec422b 100644 --- a/typespec/tasks/endpoints.tsp +++ b/typespec/tasks/endpoints.tsp @@ -1,7 +1,11 @@ +import "@typespec/http"; + import "../common"; import "../executions"; import "./models.tsp"; +using TypeSpec.Http; + using Common; using Executions; @@ -11,11 +15,25 @@ namespace Tasks; // TASK ENDPOINTS // -interface CreateOrUpdateEndpoints - extends ChildCreateOrUpdateEndpoint< - CreateOrUpdateTaskRequest, - "Create or update a task" - > {} +interface CreateOrUpdateEndpoints { + @post + @doc("Create or update a task") + createOrUpdate( + @header contentType: yaml | json, + + @path + @doc("ID of the agent") + parent_id: uuid, + + ...CreateOrUpdateTaskRequest, + ): { + @statusCode _: "201"; + + @body + @doc("Details of the task updated along with ID") + body: ResourceUpdatedResponse; + }; +} interface Endpoints extends ChildUpdateEndpoint< @@ -27,5 +45,22 @@ interface Endpoints "Update an existing task (merges with existing values)" >, ChildDeleteEndpoint<"Delete a task by its id">, - ChildLimitOffsetPagination, - ChildCreateEndpoint {} \ No newline at end of file + ChildLimitOffsetPagination { + @post + @doc("Create a new task") + create( + @header contentType: yaml | json, + + @path + @doc("ID of parent resource") + id: uuid, + + ...CreateTaskRequest, + ): { + @statusCode _: "200"; + + @body + @doc("Details of the task created") + body: ResourceCreatedResponse; + }; +} diff --git a/typespec/tasks/models.tsp b/typespec/tasks/models.tsp index ad29e73d5..749d47a11 100644 --- a/typespec/tasks/models.tsp +++ b/typespec/tasks/models.tsp @@ -1,17 +1,13 @@ import "@typespec/http"; -import "../agents"; import "../common"; -import "../chat"; -import "../entries"; import "../tools"; +import "./steps.tsp"; + using TypeSpec.Http; -using Agents; -using Chat; using Common; -using Entries; using Tools; namespace Tasks; @@ -20,105 +16,6 @@ namespace Tasks; // TASK MODELS // -alias WorkflowStepKind = - | /** A step that runs a tool */ - "tool_call" - | /** A step that runs a subworkflow */ - "yield" - | /** A step that runs a prompt */ - "prompt" - | /** A step that evaluates an expression */ - "evaluate" - | /** A step that runs a conditional */ - "if_else" - | /** A step that signals that it needs more input before resuming */ - "wait_for_input" - | /** Throw an error */ - "error"; - -model BaseWorkflowStep { - /** The kind of step */ - kind_: WorkflowStepKind; -} - -model ToolCallStep extends BaseWorkflowStep { - kind_: "tool_call" = "tool_call"; - - /** The tool to run */ - tool: toolRef; - - /** The input parameters for the tool */ - arguments: Record; -} - -/** An object where values are strings in the Common Expression Language that get evaluated before being passed downstream */ -alias ExpressionObject = Record; - -model YieldStep extends BaseWorkflowStep { - kind_: "yield" = "yield"; - - /** The subworkflow to run */ - workflow: string; - - /** The input parameters for the subworkflow */ - arguments: ExpressionObject; -} - -model PromptStep extends BaseWorkflowStep { - kind_: "prompt" = "prompt"; - - /** The prompt to run */ - prompt: string | InputChatMLMessage[]; - - /** Settings for the prompt */ - settings: ChatSettings; -} - -model EvaluateStep extends BaseWorkflowStep { - kind_: "evaluate" = "evaluate"; - - /** The expression to evaluate */ - evaluate: ExpressionObject; -} - -model ErrorWorkflowStep extends BaseWorkflowStep { - kind_: "error" = "error"; - - /** The error message */ - error: string; -} - -model WaitForInputStep extends BaseWorkflowStep { - kind_: "wait_for_input" = "wait_for_input"; - - /** Any additional info or data */ - info: string | Record; -} - -alias NonConditionalWorkflowStep = - | EvaluateStep - | ToolCallStep - | YieldStep - | PromptStep - | ErrorWorkflowStep - | WaitForInputStep; - -model IfElseWorkflowStep extends BaseWorkflowStep { - kind_: "if_else" = "if_else"; - - /** The condition to evaluate */ - `if`: PyExpression; - - /** The steps to run if the condition is true */ - then: NonConditionalWorkflowStep; - - /** The steps to run if the condition is false */ - `else`: NonConditionalWorkflowStep; -} - -alias WorkflowStep = NonConditionalWorkflowStep | IfElseWorkflowStep; -alias CreateWorkflowStep = WorkflowStep; - model Workflow { @key name: validPythonIdentifier; @@ -126,12 +23,10 @@ model Workflow { steps: WorkflowStep[]; } -model TaskTool { +model TaskTool extends CreateToolRequest { /** Read-only: Whether the tool was inherited or not. Only applies within tasks. */ @visibility("read") inherited?: boolean = false; - - ...CreateToolRequest; } /** Object describing a Task */ diff --git a/typespec/tasks/step_kind.tsp b/typespec/tasks/step_kind.tsp new file mode 100644 index 000000000..569c5737a --- /dev/null +++ b/typespec/tasks/step_kind.tsp @@ -0,0 +1,72 @@ +namespace Tasks; + +// +// STEP KINDS +// + +alias WorkflowStepKind = + //////////////////// + /// Common steps /// + //////////////////// + + | /** A step that runs a tool */ + "tool_call" + | /** A step that runs a prompt */ + "prompt" + | /** A step that evaluates an expression */ + "evaluate" + | /** A step that signals that it needs more input before resuming */ + "wait_for_input" + | /** log step */ + "log" + + //////////////////////// + /// Doc search steps /// + //////////////////////// + + | /** A step that can embed text */ + "embed" + | /** A step that can search for documents (in the agents doc store only) */ + "search" + + /////////////////////// + /// Key-value steps /// + /////////////////////// + + | /** set step */ + "set" + | /** get step */ + "get" + + /////////////////////// + /// Iteration steps /// + /////////////////////// + + | /** foreach step */ + "foreach" + | /** map_reduce step */ + "map_reduce" + | /** parallel step */ + "parallel" + + ///////////////////////// + /// Conditional steps /// + ///////////////////////// + + | /** switch step */ + "switch" + | /** A step that runs a conditional */ + "if_else" + + ////////////////////////// + /// Other control flow /// + ////////////////////////// + + | /** sleep step */ + "sleep" + | /** return step */ + "return" + | /** A step that runs a subworkflow */ + "yield" + | /** Throw an error */ + "error"; \ No newline at end of file diff --git a/typespec/tasks/steps.tsp b/typespec/tasks/steps.tsp new file mode 100644 index 000000000..ed630fa08 --- /dev/null +++ b/typespec/tasks/steps.tsp @@ -0,0 +1,381 @@ +import "@typespec/http"; + +import "../chat"; +import "../common"; +import "../docs"; +import "../entries"; + +import "./step_kind.tsp"; + +using TypeSpec.Http; + +using Chat; +using Common; +using Docs; +using Entries; + +namespace Tasks; + +// +// STEP DEFINITIONS +// + +/** A simple python expression evaluated at runtime that is expected to return type T. */ +alias TypedExpression = PyExpression; + +/** A python expression that takes an accumulator `results` and an input item `_` and reduces them. */ +alias ReduceExpression> = TypedExpression; + +/** A string->string object where the values are python expressions that get evaluated to give a final object. */ +alias ExpressionObject = Record>; + +/** Nested expression object. */ +alias NestedExpressionObject = Record | ExpressionObject>; + +@discriminator("kind_") +model BaseWorkflowStep { + /** The kind of step */ + @visibility("read") + kind_: T; +} + +alias MappableWorkflowStep = + | EvaluateStep + | ToolCallStep + | PromptStep + | GetStep + | SetStep + | LogStep + | EmbedStep + | SearchStep; + +alias NonConditionalWorkflowStep = + | MappableWorkflowStep + | ReturnStep + | SleepStep + | ErrorWorkflowStep + | YieldStep + | WaitForInputStep; + +alias ConditionalStep = IfElseWorkflowStep | SwitchStep; +alias IterationStep = ForeachStep | ParallelStep | MapReduceStep; +alias WorkflowStep = NonConditionalWorkflowStep | ConditionalStep | IterationStep; + +alias CreateWorkflowStep = WorkflowStep; + +//////////////////// +/// Common steps /// +//////////////////// + +model ToolCallStep extends BaseWorkflowStep<"tool_call"> { + @visibility("read") + kind_: "tool_call" = "tool_call"; + + ...ToolCallStepDef; +} + +model ToolCallStepDef { + /** The tool to run */ + tool: toolRef; + + /** The input parameters for the tool (defaults to last step output) */ + arguments: ExpressionObject | "_" = "_"; +} + +model PromptStep extends BaseWorkflowStep<"prompt"> { + @visibility("read") + kind_: "prompt" = "prompt"; + + ...PromptStepDef; +} + +model PromptStepDef { + /** The prompt to run */ + prompt: JinjaTemplate | InputChatMLMessage[]; + + /** Settings for the prompt */ + settings?: ChatSettings; +} + +model EvaluateStep extends BaseWorkflowStep<"evaluate"> { + @visibility("read") + kind_: "evaluate" = "evaluate"; + + ...EvaluateStepDef; +} + +model EvaluateStepDef { + /** The expression to evaluate */ + evaluate: ExpressionObject; +} + +model WaitForInputStep extends BaseWorkflowStep<"wait_for_input"> { + @visibility("read") + kind_: "wait_for_input" = "wait_for_input"; + + ...WaitForInputStepDef; +} + +model WaitForInputInfo { + /** Any additional info or data */ + info: ExpressionObject; +} + +model WaitForInputStepDef { + /** Any additional info or data */ + wait_for_input: WaitForInputInfo; +} + +model LogStep extends BaseWorkflowStep<"log"> { + @visibility("read") + kind_: "log" = "log"; + + ...LogStepDef; +} + +model LogStepDef { + /** The value to log */ + log: TypedExpression; +} + +//////////////////////// +/// Doc search steps /// +//////////////////////// + +model EmbedStep extends BaseWorkflowStep<"embed"> { + @visibility("read") + kind_: "embed" = "embed"; + + ...EmbedStepDef; +} + +model EmbedStepDef { + /** The text to embed */ + embed: EmbedQueryRequest; +} + +model SearchStep extends BaseWorkflowStep<"search"> { + @visibility("read") + kind_: "search" = "search"; + + ...SearchStepDef; +} + +model SearchStepDef { + /** The search query */ + search: DocSearchRequest; +} + +/////////////////////// +/// Key-value steps /// +/////////////////////// + +model GetStep extends BaseWorkflowStep<"get"> { + @visibility("read") + kind_: "get" = "get"; + + ...GetStepDef; +} + +model GetStepDef { + /** The key to get */ + get: string; +} + +model SetKey { + /** The key to set */ + key: string; + + /** The value to set */ + value: TypedExpression; +} + +model SetStep extends BaseWorkflowStep<"set"> { + @visibility("read") + kind_: "set" = "set"; + + ...SetStepDef; +} + +model SetStepDef { + /** The value to set */ + set: SetKey; +} + +/////////////////////// +/// Iteration steps /// +/////////////////////// + +model ParallelStep extends BaseWorkflowStep<"parallel"> { + @visibility("read") + kind_: "parallel" = "parallel"; + + ...ParallelStepDef; +} + +model ParallelStepDef { + /** The steps to run in parallel. Max concurrency will depend on the platform. */ + @maxItems(100) + parallel: MappableWorkflowStep[]; +} + +model ForeachDo { + /** The variable to iterate over. + * VALIDATION: Should NOT return more than 1000 elements. */ + in: TypedExpression>; + + /** The steps to run for each iteration */ + do: MappableWorkflowStep; +} + +model ForeachStep extends BaseWorkflowStep<"foreach"> { + @visibility("read") + kind_: "foreach" = "foreach"; + + ...ForeachStepDef; +} + +model ForeachStepDef { + /** The steps to run for each iteration */ + foreach: ForeachDo; +} + +model MapReduceStep> extends BaseWorkflowStep<"map_reduce"> { + @visibility("read") + kind_: "map_reduce" = "map_reduce"; + + /** The variable to iterate over */ + over: TypedExpression>; + + /** The steps to run for each iteration */ + map: MappableWorkflowStep; + + /** The expression to reduce the results. + * If not provided, the results are collected and returned as a list. + * A special parameter named `results` is the accumulator and `_` is the current value. */ + reduce?: ReduceExpression; + + initial?: unknown = #[]; +} + +///////////////////////// +/// Conditional steps /// +///////////////////////// + +model IfElseWorkflowStep extends BaseWorkflowStep<"if_else"> { + @visibility("read") + kind_: "if_else" = "if_else"; + + ...IfElseWorkflowStepDef; +} + +model IfElseWorkflowStepDef { + /** The condition to evaluate */ + `if`: TypedExpression; + + /** The steps to run if the condition is true */ + then: NonConditionalWorkflowStep; + + /** The steps to run if the condition is false */ + `else`: NonConditionalWorkflowStep; +} + +model CaseThen { + /** The condition to evaluate */ + case: TypedExpression | "_"; // To support '_' as a value + + /** The steps to run if the condition is true */ + then: NonConditionalWorkflowStep; +} + +model SwitchStep extends BaseWorkflowStep<"switch"> { + @visibility("read") + kind_: "switch" = "switch"; + + ...SwitchStepDef; +} + +model SwitchStepDef { + /** The cond tree */ + @minItems(1) + switch: CaseThen[]; +} + +////////////////////////// +/// Other control flow /// +////////////////////////// + +model YieldStep extends BaseWorkflowStep<"yield"> { + @visibility("read") + kind_: "yield" = "yield"; + + ...YieldStepDef; +} + +model YieldStepDef { + /** The subworkflow to run. + * VALIDATION: Should resolve to a defined subworkflow. + */ + workflow: string; + + /** The input parameters for the subworkflow (defaults to last step output) */ + arguments: ExpressionObject | "_" = "_"; +} + +model ErrorWorkflowStep extends BaseWorkflowStep<"error"> { + @visibility("read") + kind_: "error" = "error"; + + ...ErrorWorkflowStepDef; +} + +model ErrorWorkflowStepDef { + /** The error message */ + error: string; +} + +model SleepFor { + /** The number of seconds to sleep for */ + @minValue(0) + @maxValue(60) + seconds: uint16 = 0; + + /** The number of minutes to sleep for */ + @minValue(0) + @maxValue(60) + minutes: uint16 = 0; + + /** The number of hours to sleep for */ + @minValue(0) + @maxValue(24) + hours: uint16 = 0; + + /** The number of days to sleep for */ + @minValue(0) + @maxValue(30) + days: uint16 = 0; +} + +model SleepStep extends BaseWorkflowStep<"sleep"> { + @visibility("read") + kind_: "sleep" = "sleep"; + + ...SleepStepDef; +} + +model SleepStepDef { + /** The duration to sleep for (max 31 days) */ + sleep: SleepFor; +} + +model ReturnStep extends BaseWorkflowStep<"return"> { + @visibility("read") + kind_: "return" = "return"; + + ...ReturnStepDef; +} + +model ReturnStepDef { + /** The value to return */ + `return`: ExpressionObject; +} diff --git a/typespec/tools/models.tsp b/typespec/tools/models.tsp index 509520304..1938611c9 100644 --- a/typespec/tools/models.tsp +++ b/typespec/tools/models.tsp @@ -32,13 +32,13 @@ alias ToolChoiceOption = "auto" | "none" | NamedToolChoice; /** Function definition */ model FunctionDef { /** DO NOT USE: This will be overriden by the tool name. Here only for compatibility reasons. */ - name?: validPythonIdentifier = "overriden"; + name?: null = null; /** Description of the function */ description?: identifierSafeUnicode; /** The parameters the function accepts */ - parameters: FunctionParameters; + parameters?: FunctionParameters; } diff --git a/typespec/users/models.tsp b/typespec/users/models.tsp index 3bcd691ae..c21b1b03c 100644 --- a/typespec/users/models.tsp +++ b/typespec/users/models.tsp @@ -18,8 +18,7 @@ model User { ...HasTimestamps; /** Name of the user */ - @maxLength(120) - name: identifierSafeUnicode = ""; + name: identifierSafeUnicode = identifierSafeUnicode(""); /** About the user */ about: string = ""; @@ -35,12 +34,9 @@ model UpdateUserRequest { model PatchUserRequest is UpdateUserRequest {} /** Payload for creating a user (and associated documents) */ -model CreateUserRequest { - ...UpdateUserRequest; -} +model CreateUserRequest is UpdateUserRequest {} -model CreateOrUpdateUserRequest { +model CreateOrUpdateUserRequest extends CreateUserRequest { @path id: uuid; - ...UpdateUserRequest; } \ No newline at end of file