Skip to content

Commit

Permalink
pplx - fix supports tool choice openai param (#8496)
Browse files Browse the repository at this point in the history
  • Loading branch information
ishaan-jaff authored Feb 13, 2025
1 parent faee508 commit ca7c5e8
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 81 deletions.
20 changes: 20 additions & 0 deletions litellm/llms/perplexity/chat/transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,23 @@ def _get_openai_compatible_provider_info(
or get_secret_str("PERPLEXITY_API_KEY")
)
return api_base, dynamic_api_key

def get_supported_openai_params(self, model: str) -> list:
"""
Perplexity supports a subset of OpenAI params
Ref: https://docs.perplexity.ai/api-reference/chat-completions
Eg. Perplexity does not support tools, tool_choice, function_call, functions, etc.
"""
return [
"frequency_penalty",
"max_tokens",
"max_completion_tokens",
"presence_penalty",
"response_format",
"stream",
"temperature",
"top_p" "max_retries",
"extra_headers",
]
60 changes: 20 additions & 40 deletions litellm/model_prices_and_context_window_backup.json
Original file line number Diff line number Diff line change
Expand Up @@ -8108,8 +8108,7 @@
"input_cost_per_token": 0.00000035,
"output_cost_per_token": 0.00000140,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/codellama-70b-instruct": {
"max_tokens": 16384,
Expand All @@ -8118,8 +8117,7 @@
"input_cost_per_token": 0.00000070,
"output_cost_per_token": 0.00000280,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/llama-3.1-70b-instruct": {
"max_tokens": 131072,
Expand All @@ -8128,8 +8126,7 @@
"input_cost_per_token": 0.000001,
"output_cost_per_token": 0.000001,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/llama-3.1-8b-instruct": {
"max_tokens": 131072,
Expand All @@ -8138,8 +8135,7 @@
"input_cost_per_token": 0.0000002,
"output_cost_per_token": 0.0000002,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/llama-3.1-sonar-huge-128k-online": {
"max_tokens": 127072,
Expand All @@ -8149,8 +8145,7 @@
"output_cost_per_token": 0.000005,
"litellm_provider": "perplexity",
"mode": "chat",
"deprecation_date": "2025-02-22",
"supports_tool_choice": true
"deprecation_date": "2025-02-22"
},
"perplexity/llama-3.1-sonar-large-128k-online": {
"max_tokens": 127072,
Expand All @@ -8160,8 +8155,7 @@
"output_cost_per_token": 0.000001,
"litellm_provider": "perplexity",
"mode": "chat",
"deprecation_date": "2025-02-22",
"supports_tool_choice": true
"deprecation_date": "2025-02-22"
},
"perplexity/llama-3.1-sonar-large-128k-chat": {
"max_tokens": 131072,
Expand All @@ -8171,8 +8165,7 @@
"output_cost_per_token": 0.000001,
"litellm_provider": "perplexity",
"mode": "chat",
"deprecation_date": "2025-02-22",
"supports_tool_choice": true
"deprecation_date": "2025-02-22"
},
"perplexity/llama-3.1-sonar-small-128k-chat": {
"max_tokens": 131072,
Expand All @@ -8182,8 +8175,7 @@
"output_cost_per_token": 0.0000002,
"litellm_provider": "perplexity",
"mode": "chat",
"deprecation_date": "2025-02-22",
"supports_tool_choice": true
"deprecation_date": "2025-02-22"
},
"perplexity/llama-3.1-sonar-small-128k-online": {
"max_tokens": 127072,
Expand All @@ -8193,8 +8185,7 @@
"output_cost_per_token": 0.0000002,
"litellm_provider": "perplexity",
"mode": "chat" ,
"deprecation_date": "2025-02-22",
"supports_tool_choice": true
"deprecation_date": "2025-02-22"
},
"perplexity/sonar": {
"max_tokens": 127072,
Expand All @@ -8221,8 +8212,7 @@
"input_cost_per_token": 0.00000007,
"output_cost_per_token": 0.00000028,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/pplx-70b-chat": {
"max_tokens": 4096,
Expand All @@ -8231,8 +8221,7 @@
"input_cost_per_token": 0.00000070,
"output_cost_per_token": 0.00000280,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/pplx-7b-online": {
"max_tokens": 4096,
Expand All @@ -8242,8 +8231,7 @@
"output_cost_per_token": 0.00000028,
"input_cost_per_request": 0.005,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/pplx-70b-online": {
"max_tokens": 4096,
Expand All @@ -8253,8 +8241,7 @@
"output_cost_per_token": 0.00000280,
"input_cost_per_request": 0.005,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/llama-2-70b-chat": {
"max_tokens": 4096,
Expand All @@ -8263,8 +8250,7 @@
"input_cost_per_token": 0.00000070,
"output_cost_per_token": 0.00000280,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/mistral-7b-instruct": {
"max_tokens": 4096,
Expand All @@ -8273,8 +8259,7 @@
"input_cost_per_token": 0.00000007,
"output_cost_per_token": 0.00000028,
"litellm_provider": "perplexity",
"mode": "chat" ,
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/mixtral-8x7b-instruct": {
"max_tokens": 4096,
Expand All @@ -8283,8 +8268,7 @@
"input_cost_per_token": 0.00000007,
"output_cost_per_token": 0.00000028,
"litellm_provider": "perplexity",
"mode": "chat",
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/sonar-small-chat": {
"max_tokens": 16384,
Expand All @@ -8293,8 +8277,7 @@
"input_cost_per_token": 0.00000007,
"output_cost_per_token": 0.00000028,
"litellm_provider": "perplexity",
"mode": "chat",
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/sonar-small-online": {
"max_tokens": 12000,
Expand All @@ -8304,8 +8287,7 @@
"output_cost_per_token": 0.00000028,
"input_cost_per_request": 0.005,
"litellm_provider": "perplexity",
"mode": "chat",
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/sonar-medium-chat": {
"max_tokens": 16384,
Expand All @@ -8314,8 +8296,7 @@
"input_cost_per_token": 0.0000006,
"output_cost_per_token": 0.0000018,
"litellm_provider": "perplexity",
"mode": "chat",
"supports_tool_choice": true
"mode": "chat"
},
"perplexity/sonar-medium-online": {
"max_tokens": 12000,
Expand All @@ -8325,8 +8306,7 @@
"output_cost_per_token": 0.0000018,
"input_cost_per_request": 0.005,
"litellm_provider": "perplexity",
"mode": "chat",
"supports_tool_choice": true
"mode": "chat"
},
"fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct": {
"max_tokens": 16384,
Expand Down
Loading

0 comments on commit ca7c5e8

Please sign in to comment.