Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding InferenceClient.get_recommended_model #1770

Merged
merged 9 commits into from
Oct 23, 2023
23 changes: 23 additions & 0 deletions src/huggingface_hub/inference/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1870,6 +1870,11 @@ def _resolve_url(self, model: Optional[str] = None, task: Optional[str] = None)
"You must specify at least a model (repo_id or URL) or a task, either when instantiating"
" `InferenceClient` or when making a request."
)
logger.info(
f"Using recommended model {model} for task {task}. Note that it is"
f" encouraged to explicitly set `model='{model}'` as the recommended"
" models list might get updated without prior notice."
)
jamesbraza marked this conversation as resolved.
Show resolved Hide resolved
model = _get_recommended_model(task)

# Compute InferenceAPI url
Expand All @@ -1881,6 +1886,24 @@ def _resolve_url(self, model: Optional[str] = None, task: Optional[str] = None)
else f"{INFERENCE_ENDPOINT}/models/{model}"
)

@staticmethod
def get_recommended_model(task) -> str:
jamesbraza marked this conversation as resolved.
Show resolved Hide resolved
"""
Get the model Hugging Face recommends for the input task.

Args:
task (`str`):
The Hugging Face task to get which model Hugging Face recommends.
All available tasks can be found [here](https://huggingface.co/tasks).

Returns:
`str`: Name of the model recommended for the input task.

Raises:
`ValueError`: If Hugging Face has no recommendation for the input task.
"""
return _get_recommended_model(task)
jamesbraza marked this conversation as resolved.
Show resolved Hide resolved

def get_model_status(self, model: Optional[str] = None) -> ModelStatus:
"""
Get the status of a model hosted on the Inference API.
Expand Down
4 changes: 0 additions & 4 deletions src/huggingface_hub/inference/_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,10 +147,6 @@ def _get_recommended_model(task: str) -> str:
f"Task {task} has no recommended task. Please specify a model explicitly. Visit"
" https://huggingface.co/tasks for more info."
)
logger.info(
jamesbraza marked this conversation as resolved.
Show resolved Hide resolved
f"Using recommended model {model} for task {task}. Note that it is encouraged to explicitly set"
f" `model='{model}'` as the recommended models list might get updated without prior notice."
)
return model


Expand Down
23 changes: 23 additions & 0 deletions src/huggingface_hub/inference/_generated/_async_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1898,6 +1898,11 @@ def _resolve_url(self, model: Optional[str] = None, task: Optional[str] = None)
"You must specify at least a model (repo_id or URL) or a task, either when instantiating"
" `InferenceClient` or when making a request."
)
logger.info(
f"Using recommended model {model} for task {task}. Note that it is"
f" encouraged to explicitly set `model='{model}'` as the recommended"
" models list might get updated without prior notice."
)
model = _get_recommended_model(task)

# Compute InferenceAPI url
Expand All @@ -1909,6 +1914,24 @@ def _resolve_url(self, model: Optional[str] = None, task: Optional[str] = None)
else f"{INFERENCE_ENDPOINT}/models/{model}"
)

@staticmethod
def get_recommended_model(task) -> str:
"""
Get the model Hugging Face recommends for the input task.

Args:
task (`str`):
The Hugging Face task to get which model Hugging Face recommends.
All available tasks can be found [here](https://huggingface.co/tasks).

Returns:
`str`: Name of the model recommended for the input task.

Raises:
`ValueError`: If Hugging Face has no recommendation for the input task.
"""
return _get_recommended_model(task)

async def get_model_status(self, model: Optional[str] = None) -> ModelStatus:
"""
Get the status of a model hosted on the Inference API.
Expand Down
8 changes: 8 additions & 0 deletions tests/test_inference_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,14 @@ def test_fill_mask(self) -> None:
{"score", "sequence", "token", "token_str"},
)

def test_get_recommended_model_has_recommendation(self) -> None:
assert self.client.get_recommended_model("feature-extraction") == "facebook/bart-base"
assert self.client.get_recommended_model("translation") == "t5-small"

def test_get_recommended_model_no_recommendation(self) -> None:
with pytest.raises(ValueError):
self.client.get_recommended_model("text-generation")

def test_image_classification(self) -> None:
output = self.client.image_classification(self.image_file)
self.assertIsInstance(output, list)
Expand Down