Skip to content

Commit

Permalink
fix: removed experimental
Browse files Browse the repository at this point in the history
  • Loading branch information
asafgardin committed Dec 26, 2023
1 parent c55cbee commit e316760
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 8 deletions.
5 changes: 0 additions & 5 deletions ai21/clients/studio/resources/studio_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,13 @@ def create(
top_p: Optional[float] = 1,
top_k_return: Optional[int] = 0,
custom_model: Optional[str] = None,
experimental_mode: bool = False,
stop_sequences: Optional[List[str]] = None,
frequency_penalty: Optional[Dict[str, Any]] = None,
presence_penalty: Optional[Dict[str, Any]] = None,
count_penalty: Optional[Dict[str, Any]] = None,
epoch: Optional[int] = None,
**kwargs,
) -> CompletionsResponse:
if experimental_mode:
model = f"experimental/{model}"

url = f"{self._client.get_base_url()}/{model}"

if custom_model is not None:
Expand All @@ -45,7 +41,6 @@ def create(
top_p=top_p,
top_k_return=top_k_return,
custom_model=custom_model,
experimental_mode=experimental_mode,
stop_sequences=stop_sequences,
frequency_penalty=frequency_penalty,
presence_penalty=presence_penalty,
Expand Down
3 changes: 0 additions & 3 deletions ai21/resources/bases/completion_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ def create(
top_p=1,
top_k_return=0,
custom_model: Optional[str] = None,
experimental_mode: bool = False,
stop_sequences: Optional[List[str]] = (),
frequency_penalty: Optional[Dict[str, Any]] = {},
presence_penalty: Optional[Dict[str, Any]] = {},
Expand All @@ -44,7 +43,6 @@ def _create_body(
top_p: Optional[int],
top_k_return: Optional[int],
custom_model: Optional[str],
experimental_mode: bool,
stop_sequences: Optional[List[str]],
frequency_penalty: Optional[Dict[str, Any]],
presence_penalty: Optional[Dict[str, Any]],
Expand All @@ -54,7 +52,6 @@ def _create_body(
return {
"model": model,
"customModel": custom_model,
"experimentalModel": experimental_mode,
"prompt": prompt,
"maxTokens": max_tokens,
"numResults": num_results,
Expand Down

0 comments on commit e316760

Please sign in to comment.