Skip to content

Commit

Permalink
anthropic, mistral: return model_name in response metadata (#30048)
Browse files Browse the repository at this point in the history
Took a "census" of models supported by init_chat_model-- of those that
return model names in response metadata, these were the only two that
had it keyed under `"model"` instead of `"model_name"`.
  • Loading branch information
ccurme authored Feb 28, 2025
1 parent 9e6ffd1 commit f8ed500
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 1 deletion.
7 changes: 7 additions & 0 deletions libs/partners/anthropic/langchain_anthropic/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -900,6 +900,8 @@ def _format_output(self, data: Any, **kwargs: Any) -> ChatResult:
llm_output = {
k: v for k, v in data_dict.items() if k not in ("content", "role", "type")
}
if "model" in llm_output and "model_name" not in llm_output:
llm_output["model_name"] = llm_output["model"]
if (
len(content) == 1
and content[0]["type"] == "text"
Expand Down Expand Up @@ -1445,9 +1447,14 @@ def _make_message_chunk_from_anthropic_event(
# See https://github.com/anthropics/anthropic-sdk-python/blob/main/src/anthropic/lib/streaming/_messages.py # noqa: E501
if event.type == "message_start" and stream_usage:
usage_metadata = _create_usage_metadata(event.message.usage)
if hasattr(event.message, "model"):
response_metadata = {"model_name": event.message.model}
else:
response_metadata = {}
message_chunk = AIMessageChunk(
content="" if coerce_content_to_string else [],
usage_metadata=usage_metadata,
response_metadata=response_metadata,
)
elif (
event.type == "content_block_start"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def test_stream() -> None:
full: Optional[BaseMessageChunk] = None
chunks_with_input_token_counts = 0
chunks_with_output_token_counts = 0
chunks_with_model_name = 0
for token in llm.stream("I'm Pickle Rick"):
assert isinstance(token.content, str)
full = token if full is None else full + token
Expand All @@ -44,12 +45,14 @@ def test_stream() -> None:
chunks_with_input_token_counts += 1
elif token.usage_metadata.get("output_tokens"):
chunks_with_output_token_counts += 1
chunks_with_model_name += int("model_name" in token.response_metadata)
if chunks_with_input_token_counts != 1 or chunks_with_output_token_counts != 1:
raise AssertionError(
"Expected exactly one chunk with input or output token counts. "
"AIMessageChunk aggregation adds counts. Check that "
"this is behaving properly."
)
assert chunks_with_model_name == 1
# check token usage is populated
assert isinstance(full, AIMessageChunk)
assert full.usage_metadata is not None
Expand All @@ -62,6 +65,7 @@ def test_stream() -> None:
)
assert "stop_reason" in full.response_metadata
assert "stop_sequence" in full.response_metadata
assert "model_name" in full.response_metadata


async def test_astream() -> None:
Expand Down Expand Up @@ -219,6 +223,7 @@ async def test_ainvoke() -> None:

result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
assert isinstance(result.content, str)
assert "model_name" in result.response_metadata


def test_invoke() -> None:
Expand Down
6 changes: 5 additions & 1 deletion libs/partners/mistralai/langchain_mistralai/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -579,7 +579,11 @@ def _create_chat_result(self, response: Dict) -> ChatResult:
)
generations.append(gen)

llm_output = {"token_usage": token_usage, "model": self.model}
llm_output = {
"token_usage": token_usage,
"model_name": self.model,
"model": self.model, # Backwards compatability
}
return ChatResult(generations=generations, llm_output=llm_output)

def _create_message_dicts(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ async def test_ainvoke() -> None:

result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
assert isinstance(result.content, str)
assert "model_name" in result.response_metadata


def test_invoke() -> None:
Expand Down

0 comments on commit f8ed500

Please sign in to comment.