diff --git a/python/langsmith/client.py b/python/langsmith/client.py index f2d84a70b..12d0fe10b 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -58,6 +58,7 @@ from urllib import parse as urllib_parse import requests +from deprecated import deprecated from requests import adapters as requests_adapters from requests_toolbelt import ( # type: ignore[import-untyped] multipart as rqtb_multipart, @@ -3896,9 +3897,9 @@ def create_example_from_run( def _prepare_multipart_data( self, examples: Union[ - List[ls_schemas.ExampleUploadWithAttachments] + List[ls_schemas.ExampleCreate] | List[ls_schemas.ExampleUpsertWithAttachments] - | List[ls_schemas.ExampleUpdateWithAttachments], + | List[ls_schemas.ExampleUpdate], ], include_dataset_id: bool = False, dangerously_allow_filesystem: bool = False, @@ -3915,28 +3916,50 @@ def _prepare_multipart_data( for example in examples: if ( - not isinstance(example, ls_schemas.ExampleUploadWithAttachments) + not isinstance(example, ls_schemas.ExampleCreate) and not isinstance(example, ls_schemas.ExampleUpsertWithAttachments) - and not isinstance(example, ls_schemas.ExampleUpdateWithAttachments) + and not isinstance(example, ls_schemas.ExampleUpdate) ): raise ValueError( - "The examples must be of type ExampleUploadWithAttachments" + "The examples must be of type ExampleCreate" " or ExampleUpsertWithAttachments" - " or ExampleUpdateWithAttachments" + " or ExampleUpdate" ) if example.id is not None: example_id = str(example.id) else: example_id = str(uuid.uuid4()) - if isinstance(example, ls_schemas.ExampleUpdateWithAttachments): + if isinstance(example, ls_schemas.ExampleUpdate): created_at = None else: created_at = example.created_at + if isinstance(example, ls_schemas.ExampleCreate): + use_source_run_io = example.use_source_run_io + use_source_run_attachments = example.use_source_run_attachments + source_run_id = example.source_run_id + else: + use_source_run_io, use_source_run_attachments, source_run_id = ( + None, + None, + None, + ) + example_body = { **({"dataset_id": dataset_id} if include_dataset_id else {}), **({"created_at": created_at} if created_at is not None else {}), + **( + {"use_source_run_io": use_source_run_io} + if use_source_run_io + else {} + ), + **( + {"use_source_run_attachments": use_source_run_attachments} + if use_source_run_attachments + else {} + ), + **({"source_run_id": source_run_id} if source_run_id else {}), } if example.metadata is not None: example_body["metadata"] = example.metadata @@ -3956,19 +3979,20 @@ def _prepare_multipart_data( ) ) - inputsb = _dumps_json(example.inputs) + if example.inputs: + inputsb = _dumps_json(example.inputs) - parts.append( - ( - f"{example_id}.inputs", + parts.append( ( - None, - inputsb, - "application/json", - {}, - ), + f"{example_id}.inputs", + ( + None, + inputsb, + "application/json", + {}, + ), + ) ) - ) if example.outputs: outputsb = _dumps_json(example.outputs) @@ -4023,7 +4047,7 @@ def _prepare_multipart_data( ) if ( - isinstance(example, ls_schemas.ExampleUpdateWithAttachments) + isinstance(example, ls_schemas.ExampleUpdate) and example.attachments_operations ): attachments_operationsb = _dumps_json(example.attachments_operations) @@ -4047,18 +4071,36 @@ def _prepare_multipart_data( return encoder, data, opened_files_dict + @deprecated( + version="0.1.0", + reason="This method is deprecated. Use `update_examples` instead.", + ) def update_examples_multipart( self, *, dataset_id: ID_TYPE, - updates: Optional[List[ls_schemas.ExampleUpdateWithAttachments]] = None, + updates: Optional[List[ls_schemas.ExampleUpdate]] = None, + dangerously_allow_filesystem: bool = False, + ) -> ls_schemas.UpsertExamplesResponse: + """Update examples using multipart.""" + return self._update_examples_multipart( + dataset_id=dataset_id, + updates=updates, + dangerously_allow_filesystem=dangerously_allow_filesystem, + ) + + def _update_examples_multipart( + self, + *, + dataset_id: ID_TYPE, + updates: Optional[List[ls_schemas.ExampleUpdate]] = None, dangerously_allow_filesystem: bool = False, ) -> ls_schemas.UpsertExamplesResponse: """Update examples using multipart. Args: dataset_id (Union[UUID, str]): The ID of the dataset to update. - updates (Optional[List[ExampleUpdateWithAttachments]]): The updates to apply to the examples. + updates (Optional[List[ExampleUpdate]]): The updates to apply to the examples. Raises: ValueError: If the multipart examples endpoint is not enabled. @@ -4095,18 +4137,36 @@ def update_examples_multipart( _close_files(list(opened_files_dict.values())) return response.json() + @deprecated( + reason="This method is deprecated. Please use the `create_examples` method instead." + ) def upload_examples_multipart( self, *, dataset_id: ID_TYPE, - uploads: Optional[List[ls_schemas.ExampleUploadWithAttachments]] = None, + uploads: Optional[List[ls_schemas.ExampleCreate]] = None, + dangerously_allow_filesystem: bool = False, + ) -> ls_schemas.UpsertExamplesResponse: + """Upload examples using multipart.""" + return self._upload_examples_multipart( + dataset_id=dataset_id, + uploads=uploads, + dangerously_allow_filesystem=dangerously_allow_filesystem, + ) + + def _upload_examples_multipart( + self, + *, + dataset_id: ID_TYPE, + uploads: Optional[List[ls_schemas.ExampleCreate]] = None, dangerously_allow_filesystem: bool = False, ) -> ls_schemas.UpsertExamplesResponse: """Upload examples using multipart. Args: dataset_id (Union[UUID, str]): The ID of the dataset to upload to. - uploads (Optional[List[ExampleUploadWithAttachments]]): The examples to upload. + uploads (Optional[List[ExampleCreate]]): The examples to upload. + dangerously_allow_filesystem (bool): Whether to allow uploading files from the filesystem. Returns: ls_schemas.UpsertExamplesResponse: The count and ids of the successfully uploaded examples @@ -4145,6 +4205,10 @@ def upload_examples_multipart( _close_files(list(opened_files_dict.values())) return response.json() + @deprecated( + version="0.1.0", + reason="This method is deprecated. Use `create_examples` instead.", + ) def upsert_examples_multipart( self, *, @@ -4154,7 +4218,7 @@ def upsert_examples_multipart( """Upsert examples. .. deprecated:: 0.1.0 - This method is deprecated. Use :func:`langsmith.upload_examples_multipart` instead. + This method is deprecated. Use :func:`langsmith.create_examples` instead. """ # noqa: E501 if not (self.info.instance_flags or {}).get( "examples_multipart_enabled", False @@ -4188,40 +4252,29 @@ def upsert_examples_multipart( _close_files(list(opened_files_dict.values())) return response.json() + @ls_utils.xor_args(("dataset_id", "dataset_name")) def create_examples( self, *, - inputs: Sequence[Mapping[str, Any]], - outputs: Optional[Sequence[Optional[Mapping[str, Any]]]] = None, - metadata: Optional[Sequence[Optional[Mapping[str, Any]]]] = None, - splits: Optional[Sequence[Optional[str | List[str]]]] = None, - source_run_ids: Optional[Sequence[Optional[ID_TYPE]]] = None, - ids: Optional[Sequence[Optional[ID_TYPE]]] = None, - dataset_id: Optional[ID_TYPE] = None, dataset_name: Optional[str] = None, + dataset_id: Optional[ID_TYPE] = None, + uploads: Optional[List[ls_schemas.ExampleCreate]] = None, + dangerously_allow_filesystem: bool = False, **kwargs: Any, ) -> None: """Create examples in a dataset. Args: - inputs (Sequence[Mapping[str, Any]]): - The input values for the examples. - outputs (Optional[Sequence[Optional[Mapping[str, Any]]]]): - The output values for the examples. - metadata (Optional[Sequence[Optional[Mapping[str, Any]]]]): - The metadata for the examples. - splits (Optional[Sequence[Optional[str | List[str]]]]): - The splits for the examples, which are divisions - of your dataset such as 'train', 'test', or 'validation'. - source_run_ids (Optional[Sequence[Optional[Union[UUID, str]]]]): - The IDs of the source runs associated with the examples. - ids (Optional[Sequence[Union[UUID, str]]]): - The IDs of the examples. - dataset_id (Optional[Union[UUID, str]]): - The ID of the dataset to create the examples in. dataset_name (Optional[str]): The name of the dataset to create the examples in. - **kwargs: Any: Additional keyword arguments are ignored. + dataset_id (Optional[Union[UUID, str]]): + The ID of the dataset to create the examples in. + uploads (Optional[List[ExampleCreate]]): + The examples to create. + dangerously_allow_filesystem (bool): + Whether to allow uploading files from the filesystem. + **kwargs: (Any): Kwargs for backwards compatibility of old `create_eaxmples`. + Do not pass if using uploads. Raises: ValueError: If neither dataset_id nor dataset_name is provided. @@ -4229,59 +4282,72 @@ def create_examples( Returns: None """ + if kwargs and uploads: + raise ValueError("When passing kwargs, you must not pass uploads") + elif kwargs and not kwargs.get("inputs"): + raise ValueError("When passing kwargs, you must pass inputs") + elif kwargs: + inputs = kwargs.pop("inputs", None) + if dataset_id is None and dataset_name is None: raise ValueError("Either dataset_id or dataset_name must be provided.") if dataset_id is None: dataset_id = self.read_dataset(dataset_name=dataset_name).id - sequence_args = { - "outputs": outputs, - "metadata": metadata, - "splits": splits, - "ids": ids, - "source_run_ids": source_run_ids, - } + if not kwargs: + self._upload_examples_multipart( + dataset_id=dataset_id, + uploads=uploads, + dangerously_allow_filesystem=dangerously_allow_filesystem, + ) + return + # Since inputs are required, we will check against them input_len = len(inputs) - for arg_name, arg_value in sequence_args.items(): + for arg_name, arg_value in kwargs.items(): if arg_value is not None and len(arg_value) != input_len: raise ValueError( f"Length of {arg_name} ({len(arg_value)}) does not match" f" length of inputs ({input_len})" ) examples = [ - { - "inputs": in_, - "outputs": out_, - "dataset_id": dataset_id, - "metadata": metadata_, - "split": split_, - "id": id_ or str(uuid.uuid4()), - "source_run_id": source_run_id_, - } - for in_, out_, metadata_, split_, id_, source_run_id_ in zip( + ls_schemas.ExampleCreate( + **{ + "inputs": in_, + "outputs": out_, + "metadata": metadata_, + "split": split_, + "id": id_ or str(uuid.uuid4()), + "source_run_id": source_run_id_, + "attachments": attachments_, + "use_source_run_io": use_source_run_io_, + "use_source_run_attachments": use_source_run_attachments_, + } + ) + for in_, out_, metadata_, split_, id_, source_run_id_, attachments_, use_source_run_io_, use_source_run_attachments_ in zip( inputs, - outputs or [None] * len(inputs), - metadata or [None] * len(inputs), - splits or [None] * len(inputs), - ids or [None] * len(inputs), - source_run_ids or [None] * len(inputs), + kwargs.get("outputs") or [None] * len(inputs), + kwargs.get("metadata") or [None] * len(inputs), + kwargs.get("splits") or [None] * len(inputs), + kwargs.get("ids") or [None] * len(inputs), + kwargs.get("source_run_ids") or [None] * len(inputs), + kwargs.get("attachments") or [None] * len(inputs), + kwargs.get("use_source_run_io") or [False] * len(inputs), + kwargs.get("use_source_run_attachments") or [[]] * len(inputs), ) ] - response = self.request_with_retries( - "POST", - "/examples/bulk", - headers={**self._headers, "Content-Type": "application/json"}, - data=_dumps_json(examples), + self._upload_examples_multipart( + dataset_id=dataset_id, + uploads=examples, + dangerously_allow_filesystem=dangerously_allow_filesystem, ) - ls_utils.raise_for_status_with_text(response) @ls_utils.xor_args(("dataset_id", "dataset_name")) def create_example( self, - inputs: Mapping[str, Any], + inputs: Optional[Mapping[str, Any]] = None, dataset_id: Optional[ID_TYPE] = None, dataset_name: Optional[str] = None, created_at: Optional[datetime.datetime] = None, @@ -4290,6 +4356,9 @@ def create_example( split: Optional[str | List[str]] = None, example_id: Optional[ID_TYPE] = None, source_run_id: Optional[ID_TYPE] = None, + use_source_run_io: bool = False, + use_source_run_attachments: Optional[List[str]] = None, + attachments: Optional[ls_schemas.Attachments] = None, ) -> ls_schemas.Example: """Create a dataset example in the LangSmith API. @@ -4318,37 +4387,44 @@ def create_example( example will be created. source_run_id (Optional[Union[UUID, str]]): The ID of the source run associated with this example. + use_source_run_io (bool): + Whether to use the inputs, outputs, and attachments from the source run. + use_source_run_attachments (Optional[List[str]]): + Which attachments to use from the source run. If use_source_run_io + is True, all attachments will be used regardless of this param. + attachments (Optional[Attachments]): + The attachments for the example. Returns: Example: The created example. """ + if inputs is None and not use_source_run_io: + raise ValueError("Must provide either inputs or use_source_run_io") + if dataset_id is None: dataset_id = self.read_dataset(dataset_name=dataset_name).id - data = { - "inputs": inputs, - "outputs": outputs, - "dataset_id": dataset_id, - "metadata": metadata, - "split": split, - "source_run_id": source_run_id, - } - if created_at: - data["created_at"] = created_at.isoformat() - data["id"] = example_id or str(uuid.uuid4()) - response = self.request_with_retries( - "POST", - "/examples", - headers={**self._headers, "Content-Type": "application/json"}, - data=_dumps_json({k: v for k, v in data.items() if v is not None}), + data = ls_schemas.ExampleCreate( + **{ + "inputs": inputs, + "outputs": outputs, + "metadata": metadata, + "split": split, + "source_run_id": source_run_id, + "use_source_run_io": use_source_run_io, + "use_source_run_attachments": use_source_run_attachments, + "attachments": attachments, + } ) - ls_utils.raise_for_status_with_text(response) - result = response.json() - return ls_schemas.Example( - **result, - _host_url=self._host_url, - _tenant_id=self._get_optional_tenant_id(), + if created_at: + data.created_at = created_at + data.id = ( + (uuid.UUID(example_id) if isinstance(example_id, str) else example_id) + if example_id + else uuid.uuid4() ) + self._upload_examples_multipart(dataset_id=dataset_id, uploads=[data]) + return self.read_example(example_id=data.id) def read_example( self, example_id: ID_TYPE, *, as_of: Optional[datetime.datetime] = None @@ -4657,6 +4733,7 @@ def update_example( split: Optional[str | List[str]] = None, dataset_id: Optional[ID_TYPE] = None, attachments_operations: Optional[ls_schemas.AttachmentsOperations] = None, + attachments: Optional[ls_schemas.Attachments] = None, ) -> Dict[str, Any]: """Update a specific example. @@ -4676,6 +4753,8 @@ def update_example( The ID of the dataset to update. attachments_operations (Optional[AttachmentsOperations]): The attachments operations to perform. + attachments (Optional[Attachments]): + The attachments to add to the example. Returns: Dict[str, Any]: The updated example. @@ -4687,117 +4766,153 @@ def update_example( raise ValueError( "Your LangSmith version does not allow using the attachment operations, please update to the latest version." ) - example = dict( + example_dict = dict( inputs=inputs, outputs=outputs, - dataset_id=dataset_id, + id=example_id, metadata=metadata, split=split, attachments_operations=attachments_operations, + attachments=attachments, ) - example = {k: v for k, v in example.items() if v is not None} - response = self.request_with_retries( - "PATCH", - f"/examples/{_as_uuid(example_id, 'example_id')}", - headers={**self._headers, "Content-Type": "application/json"}, - data=_dumps_json({k: v for k, v in example.items() if v is not None}), + example = ls_schemas.ExampleUpdate( + **{k: v for k, v in example_dict.items() if v is not None} ) - ls_utils.raise_for_status_with_text(response) - return response.json() + + if dataset_id is not None: + return dict( + self._update_examples_multipart( + dataset_id=dataset_id, updates=[example] + ) + ) + else: + dataset_id = self.read_example(example_id).dataset_id + return dict( + self._update_examples_multipart( + dataset_id=dataset_id, updates=[example] + ) + ) def update_examples( self, *, - example_ids: Sequence[ID_TYPE], - inputs: Optional[Sequence[Optional[Dict[str, Any]]]] = None, - outputs: Optional[Sequence[Optional[Mapping[str, Any]]]] = None, - metadata: Optional[Sequence[Optional[Dict]]] = None, - splits: Optional[Sequence[Optional[str | List[str]]]] = None, - dataset_ids: Optional[Sequence[Optional[ID_TYPE]]] = None, - attachments_operations: Optional[ - Sequence[Optional[ls_schemas.AttachmentsOperations]] - ] = None, + dataset_id: ID_TYPE | None = None, + updates: Optional[List[ls_schemas.ExampleUpdate]] = None, + dangerously_allow_filesystem: bool = False, + **kwargs: Any, ) -> Dict[str, Any]: """Update multiple examples. Args: - example_ids (Sequence[Union[UUID, str]]): - The IDs of the examples to update. - inputs (Optional[Sequence[Optional[Dict[str, Any]]]): - The input values for the examples. - outputs (Optional[Sequence[Optional[Mapping[str, Any]]]]): - The output values for the examples. - metadata (Optional[Sequence[Optional[Mapping[str, Any]]]]): - The metadata for the examples. - splits (Optional[Sequence[Optional[str | List[str]]]]): - The splits for the examples, which are divisions - of your dataset such as 'train', 'test', or 'validation'. - dataset_ids (Optional[Sequence[Optional[Union[UUID, str]]]]): - The IDs of the datasets to move the examples to. - attachments_operations (Optional[Sequence[Optional[ls_schemas.AttachmentsOperations]]): - The operations to perform on the attachments. + dataset_id (Optional[Union[UUID, str]]): + The ID of the dataset to update. + updates (Optional[List[ExampleUpdate]]): + The updates to apply to the examples. + dangerously_allow_filesystem (bool, default=False): + Whether to allow using filesystem paths as attachments. + **kwargs: (Any): Kwargs for backwards compatibility of old `update_examples`. + Do not pass if using updates. Returns: Dict[str, Any]: The response from the server (specifies the number of examples updated). """ - if attachments_operations is not None: + if kwargs and any([dataset_id, updates]): + raise ValueError( + "When passing kwargs, you must not pass dataset_id, or updates" + ) + elif kwargs and not kwargs.get("example_ids"): + raise ValueError("When passing kwargs, you must pass example_ids") + elif kwargs: + example_ids = kwargs.pop("example_ids") + elif not updates: + raise ValueError("When not passing kwargs, you must pass updates") + elif len(updates) == 0: + return {"message": "0 examples updated", "count": 0, "example_ids": []} + else: + if not dataset_id and not updates[0].dataset_id: + raise ValueError( + "When not passing kwargs, you must pass dataset_id as a param or in the first update object" + ) + if not dataset_id: + # We will naively assume all updates are for dataset from first example + dataset_id = updates[0].dataset_id + + if not dataset_id: + raise ValueError("Must pass a non-null dataset_id") + + response = self._update_examples_multipart( + dataset_id=dataset_id, + updates=updates, + dangerously_allow_filesystem=dangerously_allow_filesystem, + ) + return { + "message": f"{response.get('count', 0)} examples updated", + **response, + } + + if ( + kwargs.get("attachments_operations") is not None + or kwargs.get("attachments") is not None + ): if not (self.info.instance_flags or {}).get( "dataset_examples_multipart_enabled", False ): raise ValueError( "Your LangSmith version does not allow using the attachment operations, please update to the latest version." ) - sequence_args = { - "inputs": inputs, - "outputs": outputs, - "metadata": metadata, - "splits": splits, - "dataset_ids": dataset_ids, - "attachments_operations": attachments_operations, - } - # Since inputs are required, we will check against them + # Since ids are required, we will check against them examples_len = len(example_ids) - for arg_name, arg_value in sequence_args.items(): + for arg_name, arg_value in kwargs.items(): if arg_value is not None and len(arg_value) != examples_len: raise ValueError( f"Length of {arg_name} ({len(arg_value)}) does not match" f" length of examples ({examples_len})" ) examples = [ - { - "id": id_, - "inputs": in_, - "outputs": out_, - "dataset_id": dataset_id_, - "metadata": metadata_, - "split": split_, - "attachments_operations": attachments_operations_, - } - for id_, in_, out_, metadata_, split_, dataset_id_, attachments_operations_ in zip( + ls_schemas.ExampleUpdate( + **{ + "id": id_, + "inputs": in_, + "outputs": out_, + "dataset_id": dataset_id_, + "metadata": metadata_, + "split": split_, + "attachments": attachments_, + "attachments_operations": attachments_operations_, + } + ) + for id_, in_, out_, metadata_, split_, dataset_id_, attachments_, attachments_operations_ in zip( example_ids, - inputs or [None] * len(example_ids), - outputs or [None] * len(example_ids), - metadata or [None] * len(example_ids), - splits or [None] * len(example_ids), - dataset_ids or [None] * len(example_ids), - attachments_operations or [None] * len(example_ids), + kwargs.get("inputs", [None] * len(example_ids)), + kwargs.get("outputs", [None] * len(example_ids)), + kwargs.get("metadata", [None] * len(example_ids)), + kwargs.get("splits", [None] * len(example_ids)), + kwargs.get("dataset_ids", [None] * len(example_ids)), + kwargs.get("attachments", [None] * len(example_ids)), + kwargs.get("attachments_operations", [None] * len(example_ids)), ) ] - response = self.request_with_retries( - "PATCH", - "/examples/bulk", - headers={**self._headers, "Content-Type": "application/json"}, - data=( - _dumps_json( - [ - {k: v for k, v in example.items() if v is not None} - for example in examples - ] - ) - ), - ) - ls_utils.raise_for_status_with_text(response) - return response.json() + if "dataset_ids" not in kwargs: + # get dataset_id of first example, assume it works for all + dataset_id = self.read_example(example_ids[0]).dataset_id + response = self._update_examples_multipart( + dataset_id=dataset_id, + updates=examples, + dangerously_allow_filesystem=dangerously_allow_filesystem, + ) + else: + if len(set(kwargs["dataset_ids"])) > 1: + raise ValueError("Dataset IDs must be the same for all examples") + dataset_id = kwargs["dataset_ids"][0] + if not dataset_id: + raise ValueError("dataset_ids cannot be set to None") + response = self._update_examples_multipart( + dataset_id=dataset_id, + updates=examples, + dangerously_allow_filesystem=dangerously_allow_filesystem, + ) + + return {"message": f"{response.get('count', 0)} examples updated", **response} def delete_example(self, example_id: ID_TYPE) -> None: """Delete an example by ID. diff --git a/python/langsmith/evaluation/_runner.py b/python/langsmith/evaluation/_runner.py index a464f9cf2..3de814a8c 100644 --- a/python/langsmith/evaluation/_runner.py +++ b/python/langsmith/evaluation/_runner.py @@ -2258,7 +2258,7 @@ def _flatten_experiment_results( ): return [ { - **{f"inputs.{k}": v for k, v in x["example"].inputs.items()}, + **{f"inputs.{k}": v for k, v in (x["example"].inputs or {}).items()}, **{f"outputs.{k}": v for k, v in (x["run"].outputs or {}).items()}, "error": x["run"].error, **( diff --git a/python/langsmith/schemas.py b/python/langsmith/schemas.py index 7555a4b79..c2da6f6e4 100644 --- a/python/langsmith/schemas.py +++ b/python/langsmith/schemas.py @@ -91,7 +91,7 @@ class ExampleBase(BaseModel): """Example base model.""" dataset_id: UUID - inputs: Dict[str, Any] = Field(default_factory=dict) + inputs: Optional[Dict[str, Any]] = Field(default=None) outputs: Optional[Dict[str, Any]] = Field(default=None) metadata: Optional[Dict[str, Any]] = Field(default=None) @@ -102,27 +102,29 @@ class Config: arbitrary_types_allowed = True -class ExampleCreate(ExampleBase): - """Example create model.""" - - id: Optional[UUID] - created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) - split: Optional[Union[str, List[str]]] = None - - -class ExampleUploadWithAttachments(BaseModel): +class ExampleCreate(BaseModel): """Example upload with attachments.""" id: Optional[UUID] created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) - inputs: Dict[str, Any] = Field(default_factory=dict) + inputs: Optional[Dict[str, Any]] = Field(default=None) outputs: Optional[Dict[str, Any]] = Field(default=None) metadata: Optional[Dict[str, Any]] = Field(default=None) split: Optional[Union[str, List[str]]] = None attachments: Optional[Attachments] = None + use_source_run_io: bool = False + use_source_run_attachments: Optional[List[str]] = None + source_run_id: Optional[UUID] = None + + def __init__(self, **data): + """Initialize from dict.""" + super().__init__(**data) + +ExampleUploadWithAttachments = ExampleCreate -class ExampleUpsertWithAttachments(ExampleUploadWithAttachments): + +class ExampleUpsertWithAttachments(ExampleCreate): """Example create with attachments.""" dataset_id: UUID @@ -197,31 +199,28 @@ class AttachmentsOperations(BaseModel): class ExampleUpdate(BaseModel): - """Update class for Example.""" + """Example update with attachments.""" + id: UUID dataset_id: Optional[UUID] = None - inputs: Optional[Dict[str, Any]] = None - outputs: Optional[Dict[str, Any]] = None - attachments_operations: Optional[AttachmentsOperations] = None - metadata: Optional[Dict[str, Any]] = None + inputs: Optional[Dict[str, Any]] = Field(default=None) + outputs: Optional[Dict[str, Any]] = Field(default=None) + metadata: Optional[Dict[str, Any]] = Field(default=None) split: Optional[Union[str, List[str]]] = None + attachments: Optional[Attachments] = None + attachments_operations: Optional[AttachmentsOperations] = None class Config: """Configuration class for the schema.""" frozen = True + def __init__(self, **data): + """Initialize from dict.""" + super().__init__(**data) -class ExampleUpdateWithAttachments(ExampleUpdate): - """Example update with attachments.""" - id: UUID - inputs: Dict[str, Any] = Field(default_factory=dict) - outputs: Optional[Dict[str, Any]] = Field(default=None) - metadata: Optional[Dict[str, Any]] = Field(default=None) - split: Optional[Union[str, List[str]]] = None - attachments: Optional[Attachments] = None - attachments_operations: Optional[AttachmentsOperations] = None +ExampleUpdateWithAttachments = ExampleUpdate class DataType(str, Enum): diff --git a/python/poetry.lock b/python/poetry.lock index be599152c..1289f6511 100644 --- a/python/poetry.lock +++ b/python/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,6 +6,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -17,6 +18,7 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main", "dev", "lint"] files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -39,6 +41,7 @@ version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, @@ -58,6 +61,7 @@ version = "24.10.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, @@ -104,6 +108,7 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev", "lint"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -115,6 +120,8 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation == \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -194,6 +201,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -295,6 +303,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -309,10 +318,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "lint", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "sys_platform == \"win32\"", dev = "sys_platform == \"win32\" or platform_system == \"Windows\"", lint = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -320,6 +331,7 @@ version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -397,6 +409,7 @@ version = "0.6.7" description = "Easily serialize dataclasses to and from JSON." optional = false python-versions = "<4.0,>=3.7" +groups = ["dev"] files = [ {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, @@ -406,12 +419,31 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" +[[package]] +name = "deprecated" +version = "1.2.18" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["main"] +files = [ + {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, + {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] + [[package]] name = "distro" version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" +groups = ["lint"] files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -423,6 +455,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "lint", "test"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -437,6 +471,7 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -451,6 +486,7 @@ version = "0.115.8" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"}, {file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"}, @@ -471,6 +507,7 @@ version = "1.5.1" description = "Let your Python tests travel through time" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, @@ -485,6 +522,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "lint"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -496,6 +534,7 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -517,6 +556,7 @@ version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint"] files = [ {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, @@ -542,6 +582,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "dev", "lint"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -556,6 +597,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -567,6 +609,7 @@ version = "0.8.2" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" +groups = ["lint"] files = [ {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, @@ -652,6 +695,8 @@ version = "0.1.0rc5" description = "" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"langsmith-pyo3\"" files = [ {file = "langsmith_pyo3-0.1.0rc5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:670e5707f09e756e3b71f12a46f546826893158c2dde3250e212234e15ac8c70"}, {file = "langsmith_pyo3-0.1.0rc5-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:e0d2261c3f9ba07594c7450187604c4c4b1e10e3b3f4a4eb39587a495d6945e3"}, @@ -691,6 +736,8 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"pytest\"" files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -715,6 +762,7 @@ version = "3.26.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"}, {file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"}, @@ -734,6 +782,8 @@ version = "0.1.2" description = "Markdown URL utilities" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"pytest\"" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -745,6 +795,7 @@ version = "6.1.0" description = "multidict implementation" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -849,6 +900,7 @@ version = "1.2.1" description = "Parser for multipart/form-data" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "multipart-1.2.1-py3-none-any.whl", hash = "sha256:c03dc203bc2e67f6b46a599467ae0d87cf71d7530504b2c1ff4a9ea21d8b8c8c"}, {file = "multipart-1.2.1.tar.gz", hash = "sha256:829b909b67bc1ad1c6d4488fcdc6391c2847842b08323addf5200db88dbe9480"}, @@ -864,6 +916,7 @@ version = "1.14.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, @@ -923,6 +976,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -934,6 +988,7 @@ version = "2.0.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, @@ -988,6 +1043,7 @@ version = "1.61.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.8" +groups = ["lint"] files = [ {file = "openai-1.61.0-py3-none-any.whl", hash = "sha256:e8c512c0743accbdbe77f3429a1490d862f8352045de8dc81969301eb4a4f666"}, {file = "openai-1.61.0.tar.gz", hash = "sha256:216f325a24ed8578e929b0f1b3fb2052165f3b04b0461818adaa51aa29c71f8a"}, @@ -1013,6 +1069,8 @@ version = "3.10.15" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, @@ -1101,6 +1159,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -1112,6 +1171,7 @@ version = "2.2.2.240807" description = "Type annotations for pandas" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pandas_stubs-2.2.2.240807-py3-none-any.whl", hash = "sha256:893919ad82be4275f0d07bb47a95d08bae580d3fdea308a7acfcb3f02e76186e"}, {file = "pandas_stubs-2.2.2.240807.tar.gz", hash = "sha256:64a559725a57a449f46225fbafc422520b7410bff9252b661a225b5559192a93"}, @@ -1127,6 +1187,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1138,6 +1199,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -1154,6 +1216,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1169,6 +1232,7 @@ version = "0.2.1" description = "Accelerated property cache" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, @@ -1260,6 +1324,7 @@ version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["dev"] files = [ {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, @@ -1288,6 +1353,7 @@ version = "0.3.14" description = "Sampling profiler for Python programs" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "py_spy-0.3.14-py2.py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5b342cc5feb8d160d57a7ff308de153f6be68dcf506ad02b4d67065f2bae7f45"}, {file = "py_spy-0.3.14-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fe7efe6c91f723442259d428bf1f9ddb9c1679828866b353d539345ca40d9dd2"}, @@ -1304,6 +1370,8 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation == \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -1315,6 +1383,7 @@ version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -1335,6 +1404,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -1447,6 +1517,8 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"pytest\"" files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -1461,6 +1533,7 @@ version = "2.8.1" description = "Python module to run and analyze benchmarks" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pyperf-2.8.1-py3-none-any.whl", hash = "sha256:12a974a800a96568575be51d229b88e6b14197d02440afd98e908d80a42a1a44"}, {file = "pyperf-2.8.1.tar.gz", hash = "sha256:ef103e21a4d04999315003026a2d659c48a7cfce5e1440f03d6e72591400713a"}, @@ -1478,6 +1551,7 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "test"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -1500,6 +1574,7 @@ version = "0.21.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, @@ -1518,6 +1593,7 @@ version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, @@ -1536,6 +1612,7 @@ version = "14.0" description = "pytest plugin to re-run tests to eliminate flaky failures" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-rerunfailures-14.0.tar.gz", hash = "sha256:4a400bcbcd3c7a4ad151ab8afac123d90eca3abe27f98725dc4d9702887d2e92"}, {file = "pytest_rerunfailures-14.0-py3-none-any.whl", hash = "sha256:4197bdd2eaeffdbf50b5ea6e7236f47ff0e44d1def8dae08e409f536d84e7b32"}, @@ -1551,6 +1628,7 @@ version = "0.7.0" description = "Pytest Plugin to disable socket calls during tests" optional = false python-versions = ">=3.8,<4.0" +groups = ["dev", "test"] files = [ {file = "pytest_socket-0.7.0-py3-none-any.whl", hash = "sha256:7e0f4642177d55d317bbd58fc68c6bd9048d6eadb2d46a89307fa9221336ce45"}, {file = "pytest_socket-0.7.0.tar.gz", hash = "sha256:71ab048cbbcb085c15a4423b73b619a8b35d6a307f46f78ea46be51b1b7e11b3"}, @@ -1565,6 +1643,7 @@ version = "0.11.0" description = "unittest subTest() support and subtests fixture" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-subtests-0.11.0.tar.gz", hash = "sha256:51865c88457545f51fb72011942f0a3c6901ee9e24cbfb6d1b9dc1348bafbe37"}, {file = "pytest_subtests-0.11.0-py3-none-any.whl", hash = "sha256:453389984952eec85ab0ce0c4f026337153df79587048271c7fd0f49119c07e4"}, @@ -1580,6 +1659,7 @@ version = "0.3.5" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" +groups = ["dev"] files = [ {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, @@ -1595,6 +1675,7 @@ version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -1615,6 +1696,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1629,6 +1711,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -1691,6 +1774,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -1712,6 +1796,7 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -1726,6 +1811,8 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = true python-versions = ">=3.8.0" +groups = ["main"] +markers = "extra == \"pytest\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -1745,6 +1832,7 @@ version = "0.6.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, @@ -1772,6 +1860,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -1783,6 +1872,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "lint"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -1794,6 +1884,7 @@ version = "0.45.3" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"}, {file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"}, @@ -1812,6 +1903,7 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "test"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -1846,6 +1938,7 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] +markers = {main = "python_version < \"3.11\"", dev = "python_full_version <= \"3.11.0a6\"", test = "python_version < \"3.11\""} [[package]] name = "tqdm" @@ -1853,6 +1946,7 @@ version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" +groups = ["lint"] files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -1868,12 +1962,25 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "types-deprecated" +version = "1.2.15.20241117" +description = "Typing stubs for Deprecated" +optional = false +python-versions = ">=3.8" +groups = ["typing"] +files = [ + {file = "types-Deprecated-1.2.15.20241117.tar.gz", hash = "sha256:924002c8b7fddec51ba4949788a702411a2e3636cd9b2a33abd8ee119701d77e"}, + {file = "types_Deprecated-1.2.15.20241117-py3-none-any.whl", hash = "sha256:a0cc5e39f769fc54089fd8e005416b55d74aa03f6964d2ed1a0b0b2e28751884"}, +] + [[package]] name = "types-psutil" version = "5.9.5.20240516" description = "Typing stubs for psutil" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-psutil-5.9.5.20240516.tar.gz", hash = "sha256:bb296f59fc56458891d0feb1994717e548a1bcf89936a2877df8792b822b4696"}, {file = "types_psutil-5.9.5.20240516-py3-none-any.whl", hash = "sha256:83146ded949a10167d9895e567b3b71e53ebc5e23fd8363eab62b3c76cce7b89"}, @@ -1885,6 +1992,7 @@ version = "2025.1.0.20250204" description = "Typing stubs for pytz" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "types_pytz-2025.1.0.20250204-py3-none-any.whl", hash = "sha256:32ca4a35430e8b94f6603b35beb7f56c32260ddddd4f4bb305fdf8f92358b87e"}, {file = "types_pytz-2025.1.0.20250204.tar.gz", hash = "sha256:00f750132769f1c65a4f7240bc84f13985b4da774bd17dfbe5d9cd442746bd49"}, @@ -1896,6 +2004,7 @@ version = "6.0.12.20241230" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, @@ -1907,6 +2016,8 @@ version = "2.31.0.6" description = "Typing stubs for requests" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "platform_python_implementation == \"PyPy\" or python_version < \"3.10\"" files = [ {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, @@ -1921,6 +2032,8 @@ version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.10\"" files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -1935,6 +2048,7 @@ version = "4.67.0.20241221" description = "Typing stubs for tqdm" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_tqdm-4.67.0.20241221-py3-none-any.whl", hash = "sha256:a1f1c9cda5c2d8482d2c73957a5398bfdedda10f6bc7b3b4e812d5c910486d29"}, {file = "types_tqdm-4.67.0.20241221.tar.gz", hash = "sha256:e56046631056922385abe89aeb18af5611f471eadd7918a0ad7f34d84cd4c8cc"}, @@ -1949,6 +2063,8 @@ version = "1.26.25.14" description = "Typing stubs for urllib3" optional = false python-versions = "*" +groups = ["dev"] +markers = "platform_python_implementation == \"PyPy\" or python_version < \"3.10\"" files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, @@ -1960,6 +2076,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "lint"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -1971,6 +2088,7 @@ version = "0.9.0" description = "Runtime inspection utilities for typing module." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, @@ -1986,6 +2104,8 @@ version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main", "dev"] +markers = "platform_python_implementation == \"PyPy\" or python_version < \"3.10\"" files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, @@ -2002,6 +2122,8 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] +markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.10\"" files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, @@ -2019,6 +2141,7 @@ version = "0.29.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"}, {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"}, @@ -2038,6 +2161,7 @@ version = "6.0.2" description = "Automatically mock your HTTP interactions to simplify and speed up testing" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "vcrpy-6.0.2-py2.py3-none-any.whl", hash = "sha256:40370223861181bc76a5e5d4b743a95058bb1ad516c3c08570316ab592f56cad"}, {file = "vcrpy-6.0.2.tar.gz", hash = "sha256:88e13d9111846745898411dbc74a75ce85870af96dd320d75f1ee33158addc09"}, @@ -2061,6 +2185,7 @@ version = "6.0.0" description = "Filesystem events monitoring" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, @@ -2103,6 +2228,7 @@ version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -2191,6 +2317,7 @@ version = "1.18.3" description = "Yet another URL library" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -2287,6 +2414,7 @@ version = "0.23.0" description = "Zstandard bindings for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, @@ -2399,6 +2527,6 @@ pytest = ["pytest", "rich"] vcr = [] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.9,<4.0" -content-hash = "dba91f69a082b69ba78b241563849629fc80ffc37934d00f7a365f5e5f2cfe56" +content-hash = "a358a032ba5bed139f9fc95a7c3dbd12c638e068c834dd1d83dc7b4b8085b805" diff --git a/python/pyproject.toml b/python/pyproject.toml index 6e61d67e3..9d7329ba5 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -37,6 +37,7 @@ langsmith-pyo3 = { version = "^0.1.0rc2", optional = true } zstandard = "^0.23.0" rich = {version = "^13.9.4", optional = true} pytest = {version = ">=7.0.0", optional = true} +deprecated = "^1.2.18" [tool.poetry.group.dev.dependencies] pytest = "^7.3.1" @@ -75,6 +76,10 @@ openai = "^1.10" [tool.poetry.group.test.dependencies] pytest-socket = "^0.7.0" + +[tool.poetry.group.typing.dependencies] +types-deprecated = "^1.2.15.20241117" + [tool.poetry.extras] vcr = ["vcrpy"] langsmith_pyo3 = ["langsmith-pyo3"] diff --git a/python/tests/integration_tests/test_client.py b/python/tests/integration_tests/test_client.py index 63b46c680..3b2117868 100644 --- a/python/tests/integration_tests/test_client.py +++ b/python/tests/integration_tests/test_client.py @@ -28,12 +28,13 @@ AttachmentsOperations, DataType, Example, - ExampleUpdateWithAttachments, - ExampleUploadWithAttachments, + ExampleCreate, + ExampleUpdate, ExampleUpsertWithAttachments, Run, ) from langsmith.utils import ( + LangSmithAPIError, LangSmithConnectionError, LangSmithError, LangSmithNotFoundError, @@ -438,7 +439,7 @@ def test_upload_examples_multipart(langchain_client: Client): # Test example with all fields example_id = uuid4() - example_1 = ExampleUploadWithAttachments( + example_1 = ExampleCreate( id=example_id, inputs={"text": "hello world"}, attachments={ @@ -447,12 +448,12 @@ def test_upload_examples_multipart(langchain_client: Client): ) # Test example with minimum required fields - example_2 = ExampleUploadWithAttachments( + example_2 = ExampleCreate( inputs={"text": "minimal example"}, ) # Test example with outputs and multiple attachments - example_3 = ExampleUploadWithAttachments( + example_3 = ExampleCreate( inputs={"text": "example with outputs"}, outputs={"response": "test response"}, attachments={ @@ -501,7 +502,7 @@ def test_upload_examples_multipart(langchain_client: Client): langchain_client.upload_examples_multipart( dataset_id=fake_id, uploads=[ - ExampleUploadWithAttachments( + ExampleCreate( inputs={"text": "should fail"}, ) ], @@ -1533,7 +1534,7 @@ def test_list_examples_attachments_keys(langchain_client: Client) -> None: langchain_client.upload_examples_multipart( dataset_id=dataset.id, uploads=[ - ExampleUploadWithAttachments( + ExampleCreate( inputs={"text": "hello world"}, outputs={"response": "hi there"}, attachments={ @@ -1572,7 +1573,7 @@ def test_mime_type_is_propogated(langchain_client: Client) -> None: langchain_client.upload_examples_multipart( dataset_id=dataset.id, uploads=[ - ExampleUploadWithAttachments( + ExampleCreate( inputs={"text": "hello world"}, outputs={"response": "hi there"}, attachments={ @@ -1601,7 +1602,7 @@ def test_evaluate_mime_type_is_propogated(langchain_client: Client) -> None: langchain_client.upload_examples_multipart( dataset_id=dataset.id, uploads=[ - ExampleUploadWithAttachments( + ExampleCreate( inputs={"text": "hello world"}, outputs={"response": "hi there"}, attachments={ @@ -1640,7 +1641,7 @@ async def test_aevaluate_mime_type_is_propogated(langchain_client: Client) -> No langchain_client.upload_examples_multipart( dataset_id=dataset.id, uploads=[ - ExampleUploadWithAttachments( + ExampleCreate( inputs={"text": "hello world"}, outputs={"response": "hi there"}, attachments={ @@ -1687,7 +1688,7 @@ def test_evaluate_with_attachments_multiple_evaluators( ) # 2. Create example with attachments - example = ExampleUploadWithAttachments( + example = ExampleCreate( inputs={"question": "What is shown in the image?"}, outputs={"answer": "test image"}, attachments={ @@ -1758,7 +1759,7 @@ def test_evaluate_with_attachments(langchain_client: Client) -> None: ) # 2. Create example with attachments - example = ExampleUploadWithAttachments( + example = ExampleCreate( inputs={"question": "What is shown in the image?"}, outputs={"answer": "test image"}, attachments={ @@ -1812,7 +1813,7 @@ def test_evaluate_with_attachments_not_in_target(langchain_client: Client) -> No data_type=DataType.kv, ) - example = ExampleUploadWithAttachments( + example = ExampleCreate( inputs={"question": "What is shown in the image?"}, outputs={"answer": "test image"}, attachments={ @@ -1879,7 +1880,7 @@ def test_evaluate_with_no_attachments(langchain_client: Client) -> None: ) # Verify we can create example the new way without attachments - example = ExampleUploadWithAttachments( + example = ExampleCreate( inputs={"question": "What is 3+1?"}, outputs={"answer": "4"}, ) @@ -1919,7 +1920,7 @@ async def test_aevaluate_with_attachments(langchain_client: Client) -> None: ) examples = [ - ExampleUploadWithAttachments( + ExampleCreate( inputs={"question": "What is shown in the image?", "index": i}, outputs={"answer": "test image"}, attachments={ @@ -1994,7 +1995,7 @@ async def test_aevaluate_with_attachments_not_in_target( data_type=DataType.kv, ) - example = ExampleUploadWithAttachments( + example = ExampleCreate( inputs={"question": "What is shown in the image?"}, outputs={"answer": "test image"}, attachments={ @@ -2049,7 +2050,7 @@ async def test_aevaluate_with_no_attachments(langchain_client: Client) -> None: ) # Verify we can create example the new way without attachments - example = ExampleUploadWithAttachments( + example = ExampleCreate( inputs={"question": "What is 3+1?"}, outputs={"answer": "4"}, ) @@ -2122,6 +2123,534 @@ def test_examples_length_validation(langchain_client: Client) -> None: langchain_client.delete_dataset(dataset_id=dataset.id) +def test_new_create_example(langchain_client: Client) -> None: + """Test create_examples works with multipart style input.""" + dataset_name = "__test_update_examples_output" + uuid4().hex[:4] + dataset = langchain_client.create_dataset(dataset_name=dataset_name) + + example_id = uuid4() + langchain_client.create_example( + dataset_name=dataset_name, + example_id=example_id, + inputs={"query": "What's in this image?"}, + outputs={"answer": "A test image"}, + attachments={ + "image1": ("image/png", b"fake image data 1"), + "image2": ("image/png", b"fake image data 2"), + }, + ) + + retrieved_example = langchain_client.read_example(example_id=example_id) + + assert retrieved_example.id == example_id + assert retrieved_example.dataset_id == dataset.id + assert retrieved_example.inputs == {"query": "What's in this image?"} + assert retrieved_example.outputs == {"answer": "A test image"} + assert list(retrieved_example.attachments.keys()) == ["image1", "image2"] + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + +def test_new_create_examples(langchain_client: Client) -> None: + """Test create_examples works with multipart style input.""" + dataset_name = "__test_update_examples_output" + uuid4().hex[:4] + dataset = langchain_client.create_dataset(dataset_name=dataset_name) + + example_id = uuid4() + example = ExampleCreate( + id=example_id, + inputs={"query": "What's in this image?"}, + outputs={"answer": "A test image"}, + attachments={ + "image1": ("image/png", b"fake image data 1"), + "image2": ("image/png", b"fake image data 2"), + }, + ) + + # Use new way of passing example + langchain_client.create_examples(dataset_name=dataset_name, uploads=[example]) + + retrieved_example = langchain_client.read_example(example_id=example_id) + examples_in_dataset = langchain_client.list_examples(dataset_id=dataset.id) + assert len(list(examples_in_dataset)) == 1 + + assert retrieved_example.id == example_id + assert retrieved_example.dataset_id == dataset.id + assert retrieved_example.inputs == example.inputs + assert retrieved_example.outputs == example.outputs + assert retrieved_example.attachments.keys() == example.attachments.keys() + + # Use old way of passing example + example_id2 = uuid4() + langchain_client.create_examples( + dataset_name=dataset_name, + ids=[example_id2], + inputs=[{"query": "What's not in this image?"}], + outputs=[{"answer": "A real image"}], + attachments=[ + { + "image1": ("image/png", b"fake image data 1"), + "image2": ("image/png", b"fake image data 2"), + } + ], + ) + + retrieved_example = langchain_client.read_example(example_id=example_id2) + examples_in_dataset = langchain_client.list_examples(dataset_id=dataset.id) + assert len(list(examples_in_dataset)) == 2 + + assert retrieved_example.id == example_id2 + assert retrieved_example.dataset_id == dataset.id + assert retrieved_example.inputs == {"query": "What's not in this image?"} + assert retrieved_example.outputs == {"answer": "A real image"} + assert retrieved_example.attachments.keys() == example.attachments.keys() + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + +def test_new_update_examples(langchain_client: Client) -> None: + """Test update_examples works with multipart style input.""" + dataset_name = "__test_update_examples_output" + uuid4().hex[:4] + dataset = langchain_client.create_dataset(dataset_name=dataset_name) + + example_id = uuid4() + example = ExampleCreate( + id=example_id, + inputs={"query": "What's in this image?"}, + outputs={"answer": "A test image"}, + attachments={ + "image1": ("image/png", b"fake image data 1"), + "image2": ("image/png", b"fake image data 2"), + }, + ) + + # Create some valid examples for testing update + langchain_client.create_examples(dataset_name=dataset_name, uploads=[example]) + + example_update = ExampleUpdate( + id=example_id, + inputs={"query": "What's not in this image?"}, + outputs={"answer": "A real image"}, + attachments={ + "image3": ("image/png", b"fake image data 3"), + }, + ) + + langchain_client.update_examples(dataset_id=dataset.id, updates=[example_update]) + + retrieved_example = langchain_client.read_example(example_id=example_id) + + assert retrieved_example.id == example_id + assert retrieved_example.dataset_id == dataset.id + assert retrieved_example.inputs == example_update.inputs + assert retrieved_example.outputs == example_update.outputs + assert retrieved_example.attachments.keys() == example_update.attachments.keys() + + langchain_client.update_examples( + dataset_ids=[dataset.id], + example_ids=[example_id], + inputs=[{"query": "What's not in this image?"}], + outputs=[{"answer": "A real image"}], + attachments=[ + { + "image4": ("image/png", b"fake image data 4"), + } + ], + ) + + retrieved_example = langchain_client.read_example(example_id=example_id) + + assert retrieved_example.id == example_id + assert retrieved_example.dataset_id == dataset.id + assert retrieved_example.inputs == example_update.inputs + assert retrieved_example.outputs == example_update.outputs + assert list(retrieved_example.attachments.keys()) == ["image4"] + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + +def test_update_examples_multiple_datasets(langchain_client: Client) -> None: + """Test update_examples does not work with multiple datasets.""" + dataset_name1 = "__test_update_examples_output" + uuid4().hex[:4] + dataset_name2 = "__test_update_examples_output" + uuid4().hex[:4] + dataset1 = langchain_client.create_dataset(dataset_name=dataset_name1) + dataset2 = langchain_client.create_dataset(dataset_name=dataset_name2) + + example1_id = uuid4() + example2_id = uuid4() + example_1 = ExampleCreate( + id=example1_id, + inputs={"query": "What's in this image?"}, + outputs={"answer": "A test image"}, + attachments={ + "image1": ("image/png", b"fake image data 1"), + "image2": ("image/png", b"fake image data 2"), + }, + ) + example_2 = ExampleCreate( + id=example2_id, + inputs={"query": "What's in this image?"}, + outputs={"answer": "A test image"}, + attachments={ + "image1": ("image/png", b"fake image data 1"), + "image2": ("image/png", b"fake image data 2"), + }, + ) + + # Create some valid examples for testing update + langchain_client.create_examples(dataset_name=dataset_name1, uploads=[example_1]) + + langchain_client.create_examples(dataset_name=dataset_name2, uploads=[example_2]) + + example_update_1 = ExampleUpdate( + id=example1_id, + inputs={"query": "What's not in this image?"}, + outputs={"answer": "A real image"}, + attachments={ + "image3": ("image/png", b"fake image data 1"), + }, + ) + + example_update_2 = ExampleUpdate( + id=example2_id, + inputs={"query": "What's not in this image?"}, + outputs={"answer": "A real image"}, + attachments={ + "image3": ("image/png", b"fake image data 1"), + }, + ) + + with pytest.raises(LangSmithAPIError, match="Dataset ID mismatch"): + langchain_client.update_examples( + dataset_id=dataset1.id, updates=[example_update_1, example_update_2] + ) + + with pytest.raises(LangSmithAPIError, match="Dataset ID mismatch"): + langchain_client.update_examples( + example_ids=[example1_id, example2_id], + inputs=[example_update_1.inputs, example_update_2.inputs], + outputs=[example_update_1.outputs, example_update_2.outputs], + ) + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset1.id) + langchain_client.delete_dataset(dataset_id=dataset2.id) + + +@pytest.mark.xfail(reason="Need to wait for backend changes to go endpoint") +def test_use_source_run_io(langchain_client: Client) -> None: + dataset_name = "__test_use_source_run_io" + uuid4().hex[:4] + dataset = langchain_client.create_dataset( + dataset_name=dataset_name, + description="Test dataset for creating dataset with description", + ) + + run_id = uuid4() + langchain_client.create_run( + name="foo", + run_type="llm", + inputs={"foo": "bar"}, + outputs={"foo": "bar"}, + attachments={ + "test_file": ("text/plain", b"test content"), + }, + id=run_id, + ) + + retrieved_example = langchain_client.create_example( + use_source_run_io=True, source_run_id=run_id, dataset_id=dataset.id + ) + + assert retrieved_example.dataset_id == dataset.id + assert retrieved_example.inputs == {"foo": "bar"} + assert retrieved_example.outputs == {"foo": "bar"} + assert list(retrieved_example.attachments.keys()) == ["test_file"] + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + +@pytest.mark.xfail(reason="Need to wait for backend changes to go endpoint") +def test_use_source_run_attachments(langchain_client: Client) -> None: + dataset_name = "__test_use_source_run_attachments" + uuid4().hex[:4] + dataset = langchain_client.create_dataset( + dataset_name=dataset_name, + description="Test dataset for creating dataset with description", + ) + + run_id = uuid4() + langchain_client.create_run( + name="foo", + run_type="llm", + inputs={"foo": "bar"}, + outputs={"foo": "bar"}, + attachments={ + "test_file": ("text/plain", b"test content"), + }, + id=run_id, + ) + + retrieved_example = langchain_client.create_example( + use_source_run_io=True, + use_source_run_attachments=[], + source_run_id=run_id, + dataset_id=dataset.id, + inputs={"bar": "baz"}, + outputs={"bar": "baz"}, + attachments={ + "test_file2": ("text/plain", b"test content"), + }, + ) + + assert retrieved_example.dataset_id == dataset.id + assert retrieved_example.inputs == {"bar": "baz"} + assert retrieved_example.outputs == {"bar": "baz"} + # source run attachments should override manually passed ones + assert list(retrieved_example.attachments.keys()) == ["test_file"] + + example_id = uuid4() + example = ExampleCreate( + id=example_id, + use_source_run_io=True, + source_run_id=run_id, + attachments={ + "test_file2": ("text/plain", b"test content"), + }, + inputs={"bar": "baz"}, + outputs={"bar": "baz"}, + ) + + langchain_client.create_examples(dataset_id=dataset.id, uploads=[example]) + + retrieved_example = langchain_client.read_example(example_id=example_id) + + assert retrieved_example.dataset_id == dataset.id + assert retrieved_example.inputs == {"foo": "bar"} + assert retrieved_example.outputs == {"foo": "bar"} + # source run attachments should override manually passed ones + assert list(retrieved_example.attachments.keys()) == ["test_file"] + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + +def test_create_examples_xor_dataset_args(langchain_client: Client) -> None: + """Test create_examples fails if both dataset_name and dataset_id are provided.""" + dataset_name = "__test_create_examples_xor_dataset_args" + uuid4().hex[:4] + dataset = langchain_client.create_dataset( + dataset_name=dataset_name, + description="Test dataset for creating dataset with description", + ) + + with pytest.raises( + ValueError, + match="Exactly one argument in each of the following " + "groups must be defined: dataset_id, dataset_name", + ): + langchain_client.create_examples( + dataset_name=dataset_name, dataset_id=dataset.id + ) + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + +def test_must_pass_uploads_or_inputs(langchain_client: Client) -> None: + """Test create_examples fails if no uploads or inputs are provided.""" + dataset_name = "__test_must_pass_uploads_or_inputs" + uuid4().hex[:4] + dataset = langchain_client.create_dataset( + dataset_name=dataset_name, + description="Test dataset for creating dataset with description", + ) + + with pytest.raises(ValueError, match="When passing kwargs, you must pass inputs"): + langchain_client.create_examples(dataset_id=dataset.id, outputs={"foo": "bar"}) + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + +def test_update_examples_errors(langchain_client: Client) -> None: + """Test update_examples fails in a number of cases.""" + dataset_name = "__test_update_examples_errors" + uuid4().hex[:4] + dataset2_name = "__test_update_examples_errors" + uuid4().hex[:4] + dataset = langchain_client.create_dataset( + dataset_name=dataset_name, + description="Test dataset for creating dataset with description", + ) + dataset2 = langchain_client.create_dataset( + dataset_name=dataset2_name, + description="Test dataset for creating dataset with description", + ) + + # Create example to update + example_id = uuid4() + langchain_client.create_example( + example_id=example_id, + inputs={"foo": "bar"}, + outputs={"foo": "bar"}, + dataset_id=dataset.id, + ) + example_id2 = uuid4() + langchain_client.create_example( + example_id=example_id2, + inputs={"foo": "bar"}, + outputs={"foo": "bar"}, + dataset_id=dataset2.id, + ) + + # Update example + with pytest.raises( + ValueError, match="When passing kwargs, you must pass example_ids" + ): + langchain_client.update_examples( + outputs=[{"bar": "baz"}], + ) + + with pytest.raises( + ValueError, + match="When passing kwargs, you must not pass dataset_id, or updates", + ): + langchain_client.update_examples( + outputs=[{"bar": "baz"}], + example_ids=[example_id], + dataset_id=dataset.id, + ) + + with pytest.raises( + ValueError, match="When not passing kwargs, you must pass dataset_id" + ): + langchain_client.update_examples( + updates=[ExampleUpdate(id=example_id, outputs={"bar": "baz"})], + ) + + with pytest.raises( + ValueError, match="Dataset IDs must be the same for all examples" + ): + langchain_client.update_examples( + dataset_ids=[dataset.id, uuid4()], + outputs=[{"bar": "baz"}, {"bar": "baz"}], + example_ids=[example_id, uuid4()], + ) + + with pytest.raises(ValueError, match="dataset_ids cannot be set to None"): + langchain_client.update_examples( + dataset_ids=[None], + outputs=[{"bar": "baz"}], + example_ids=[example_id], + ) + + with pytest.raises(LangSmithAPIError): + langchain_client.update_examples( + updates=[ + ExampleUpdate( + id=example_id, outputs={"bar": "baz"}, dataset_id=dataset.id + ), + ExampleUpdate( + id=example_id2, outputs={"bar": "baz"}, dataset_id=dataset2.id + ), + ], + ) + + retrieved_example = langchain_client.read_example(example_id=example_id) + # Assert update failed due to differing datasets + assert retrieved_example.outputs == {"foo": "bar"} + + langchain_client.update_examples( + updates=[ + ExampleUpdate(id=example_id, outputs={"bar": "baz"}, dataset_id=dataset.id) + ], + ) + retrieved_example = langchain_client.read_example(example_id=example_id) + # Assert update was successful + assert retrieved_example.outputs == {"bar": "baz"} + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + +def test_create_examples_errors(langchain_client: Client) -> None: + """Test create_examples fails in a number of cases.""" + dataset_name = "__test_create_examples_errors" + uuid4().hex[:4] + dataset = langchain_client.create_dataset( + dataset_name=dataset_name, + description="Test dataset for creating dataset with description", + ) + + with pytest.raises( + ValueError, match="When passing kwargs, you must not pass uploads" + ): + langchain_client.create_examples( + dataset_id=dataset.id, outputs={"foo": "bar"}, uploads=[ExampleCreate()] + ) + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + +@pytest.mark.xfail(reason="Need to wait for backend changes to go endpoint") +def test_use_source_run_io_multiple_examples(langchain_client: Client) -> None: + dataset_name = "__test_use_source_run_io" + uuid4().hex[:4] + dataset = langchain_client.create_dataset( + dataset_name=dataset_name, + description="Test dataset for creating dataset with description", + ) + + run_id = uuid4() + langchain_client.create_run( + name="foo", + run_type="llm", + inputs={"foo": "bar"}, + outputs={"foo": "bar"}, + attachments={ + "test_file": ("text/plain", b"test content"), + "real_file": ("text/plain", b"real content"), + }, + id=run_id, + ) + + example_ids = [uuid4(), uuid4(), uuid4()] + langchain_client.create_examples( + ids=example_ids, + inputs=[{"bar": "baz"}, {"bar": "baz"}, {"bar": "baz"}], + outputs=[{"bar": "baz"}, {"bar": "baz"}, {"bar": "baz"}], + attachments=[ + {"test_file2": ("text/plain", b"test content")}, + {"test_file2": ("text/plain", b"test content")}, + {"test_file2": ("text/plain", b"test content")}, + ], + use_source_run_io=[True, False, True], + use_source_run_attachments=[[], ["test_file"], ["test_file"]], + source_run_ids=[run_id, run_id, run_id], + dataset_id=dataset.id, + ) + + example_1 = langchain_client.read_example(example_id=example_ids[0]) + example_2 = langchain_client.read_example(example_id=example_ids[1]) + example_3 = langchain_client.read_example(example_id=example_ids[2]) + + assert example_1.dataset_id == dataset.id + assert example_1.inputs == {"foo": "bar"} + assert example_1.outputs == {"foo": "bar"} + assert sorted(example_1.attachments.keys()) == ["real_file", "test_file"] + + assert example_2.dataset_id == dataset.id + assert example_2.inputs == {"bar": "baz"} + assert example_2.outputs == {"bar": "baz"} + assert sorted(example_2.attachments.keys()) == ["test_file"] + + assert example_3.dataset_id == dataset.id + assert example_3.inputs == {"foo": "bar"} + assert example_3.outputs == {"foo": "bar"} + assert sorted(example_3.attachments.keys()) == ["real_file", "test_file"] + + # Clean up + langchain_client.delete_dataset(dataset_id=dataset.id) + + def test_update_example_with_attachments_operations(langchain_client: Client) -> None: """Test updating an example with attachment operations.""" dataset_name = "__test_update_example_attachments" + uuid4().hex[:4] @@ -2131,7 +2660,7 @@ def test_update_example_with_attachments_operations(langchain_client: Client) -> ) example_id = uuid4() # Create example with attachments - example = ExampleUploadWithAttachments( + example = ExampleCreate( id=example_id, inputs={"query": "What's in this image?"}, outputs={"answer": "A test image"}, @@ -2140,7 +2669,7 @@ def test_update_example_with_attachments_operations(langchain_client: Client) -> "image2": ("image/png", b"fake image data 2"), }, ) - langchain_client.upload_examples_multipart(dataset_id=dataset.id, uploads=[example]) + langchain_client.create_examples(dataset_id=dataset.id, uploads=[example]) # Update example with attachment operations to rename and retain attachments attachments_operations = AttachmentsOperations( @@ -2171,6 +2700,24 @@ def test_update_example_with_attachments_operations(langchain_client: Client) -> == b"fake image data 1" ) + langchain_client.update_example( + dataset_id=dataset.id, + example_id=example_id, + attachments={"image3": ("image/png", b"fake image data 3")}, + ) + + # Verify the update + retrieved_example = langchain_client.read_example( + example_id=example_id, + ) + + # Check that only the renamed attachment exists + assert len(retrieved_example.attachments) == 1 + assert "image3" in retrieved_example.attachments + assert ( + retrieved_example.attachments["image3"]["reader"].read() == b"fake image data 3" + ) + # Clean up langchain_client.delete_dataset(dataset_id=dataset.id) @@ -2187,7 +2734,7 @@ def test_bulk_update_examples_with_attachments_operations( example_id1, example_id2 = uuid4(), uuid4() # Create two examples with attachments - example1 = ExampleUploadWithAttachments( + example1 = ExampleCreate( id=example_id1, inputs={"query": "What's in this image?"}, outputs={"answer": "A test image 1"}, @@ -2196,7 +2743,7 @@ def test_bulk_update_examples_with_attachments_operations( "extra": ("text/plain", b"extra data"), }, ) - example2 = ExampleUploadWithAttachments( + example2 = ExampleCreate( id=example_id2, inputs={"query": "What's in this image?"}, outputs={"answer": "A test image 2"}, @@ -2273,7 +2820,7 @@ def test_examples_multipart_attachment_path(langchain_client: Client) -> None: file_path = Path(__file__).parent / "test_data/parrot-icon.png" example_id = uuid4() - example = ExampleUploadWithAttachments( + example = ExampleCreate( id=example_id, inputs={"text": "hello world"}, attachments={ @@ -2334,7 +2881,7 @@ def test_examples_multipart_attachment_path(langchain_client: Client) -> None: == (Path(__file__).parent / "test_data/parrot-icon.png").read_bytes() ) - example_update = ExampleUpdateWithAttachments( + example_update = ExampleUpdate( id=example_id, attachments={ "new_file1": ( @@ -2362,7 +2909,7 @@ def test_examples_multipart_attachment_path(langchain_client: Client) -> None: assert retrieved.attachments["new_file1"]["reader"].read() == file_path.read_bytes() assert retrieved.attachments["new_file2"]["reader"].read() == file_path.read_bytes() - example_wrong_path = ExampleUploadWithAttachments( + example_wrong_path = ExampleCreate( id=example_id, inputs={"text": "hello world"}, attachments={ @@ -2399,7 +2946,7 @@ def test_update_examples_multipart(langchain_client: Client) -> None: example_ids = [uuid4() for _ in range(2)] # First create some examples with attachments - example_1 = ExampleUploadWithAttachments( + example_1 = ExampleCreate( id=example_ids[0], inputs={"text": "hello world"}, attachments={ @@ -2408,7 +2955,7 @@ def test_update_examples_multipart(langchain_client: Client) -> None: }, ) - example_2 = ExampleUploadWithAttachments( + example_2 = ExampleCreate( id=example_ids[1], inputs={"text": "second example"}, attachments={ @@ -2423,7 +2970,7 @@ def test_update_examples_multipart(langchain_client: Client) -> None: assert created_examples["count"] == 2 # Now create update operations - update_1 = ExampleUpdateWithAttachments( + update_1 = ExampleUpdate( id=example_ids[0], inputs={"text": "updated hello world"}, attachments={ @@ -2434,7 +2981,7 @@ def test_update_examples_multipart(langchain_client: Client) -> None: ), ) - update_2 = ExampleUpdateWithAttachments( + update_2 = ExampleUpdate( id=example_ids[1], inputs={"text": "updated second example"}, attachments={ @@ -2490,7 +3037,7 @@ def test_update_examples_multipart(langchain_client: Client) -> None: response = langchain_client.update_examples_multipart( dataset_id=dataset.id, updates=[ - ExampleUpdateWithAttachments( + ExampleUpdate( id=uuid4(), inputs={"text": "should fail"}, ) @@ -2502,7 +3049,7 @@ def test_update_examples_multipart(langchain_client: Client) -> None: response = langchain_client.update_examples_multipart( dataset_id=dataset.id, updates=[ - ExampleUpdateWithAttachments( + ExampleUpdate( id=example_ids[0], attachments={ "renamed_file1": ("text/plain", b"new content 1"), @@ -2525,7 +3072,7 @@ def test_update_examples_multipart(langchain_client: Client) -> None: response = langchain_client.update_examples_multipart( dataset_id=dataset.id, updates=[ - ExampleUpdateWithAttachments( + ExampleUpdate( id=example_ids[0], attachments={ "foo": ("text/plain", b"new content 1"), @@ -2553,7 +3100,7 @@ async def test_aevaluate_max_concurrency(langchain_client: Client) -> None: ) examples = [ - ExampleUploadWithAttachments( + ExampleCreate( inputs={"query": "What's in this image?"}, outputs={"answer": "A test image 1"}, )