Skip to content

Commit

Permalink
feat(api): update via SDK Studio (#225)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-app[bot] authored and stainless-bot committed Aug 7, 2024
1 parent 521e7b8 commit 43d5616
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 34 deletions.
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 22
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-34edf740524e434708905ba916368bd4b1b335aa95cc8c26883f25d3dfbdd221.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-9cff8ea13f14bd0899df69243fe78b4f88d4d0172263aa260af1ea66a7d0484e.yml
49 changes: 24 additions & 25 deletions src/resources/prompts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,9 @@ export class Prompts extends APIResource {
}

/**
* Fetches the configured model parameters and messages rendered with the provided
* variables mapped to the set LLM provider. This endpoint abstracts the need to
* handle mapping between different providers, while still allowing direct calls to
* the providers.
* Fetches the model configuration parameters for a specified prompt, including
* penalty settings, response format, and the model messages rendered with the
* given variables mapped to the set LLM provider.
*/
getParameters(
id: string,
Expand Down Expand Up @@ -539,9 +538,14 @@ export namespace PromptConfiguration {
maxTokens: number | null;

/**
* The name of the model for the provider.
* Example: "gpt-3.5-turbo"
*/
name: string;
modelName: string;

/**
* The provider of the provided model.
*/
modelProvider: 'ANTHROPIC' | 'OPENAI';

parallelToolCalls: boolean;

Expand All @@ -550,11 +554,6 @@ export namespace PromptConfiguration {
*/
presencePenalty: number;

/**
* The LLM model provider.
*/
provider: 'ANTHROPIC' | 'OPENAI';

/**
* Example: PromptResponseFormat.TEXT
*/
Expand Down Expand Up @@ -717,9 +716,14 @@ export namespace PromptCreateParams {
maxTokens: number | null;

/**
* The name of the model for the provider.
* Example: "gpt-3.5-turbo"
*/
name: string;
modelName: string;

/**
* The provider of the provided model.
*/
modelProvider: 'ANTHROPIC' | 'OPENAI';

parallelToolCalls: boolean;

Expand All @@ -728,11 +732,6 @@ export namespace PromptCreateParams {
*/
presencePenalty: number;

/**
* The LLM model provider.
*/
provider: 'ANTHROPIC' | 'OPENAI';

/**
* Example: PromptResponseFormat.TEXT
*/
Expand Down Expand Up @@ -867,9 +866,14 @@ export namespace PromptUpdateParams {
maxTokens: number | null;

/**
* The name of the model for the provider.
* Example: "gpt-3.5-turbo"
*/
name: string;
modelName: string;

/**
* The provider of the provided model.
*/
modelProvider: 'ANTHROPIC' | 'OPENAI';

parallelToolCalls: boolean;

Expand All @@ -878,11 +882,6 @@ export namespace PromptUpdateParams {
*/
presencePenalty: number;

/**
* The LLM model provider.
*/
provider: 'ANTHROPIC' | 'OPENAI';

/**
* Example: PromptResponseFormat.TEXT
*/
Expand Down
16 changes: 8 additions & 8 deletions tests/api-resources/prompts.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ describe('resource prompts', () => {
],
name: 'name',
parameters: {
provider: 'ANTHROPIC',
name: 'name',
modelProvider: 'ANTHROPIC',
modelName: 'modelName',
responseFormat: 'JSON',
temperature: 0,
topP: 0,
Expand Down Expand Up @@ -100,8 +100,8 @@ describe('resource prompts', () => {
],
name: 'name',
parameters: {
provider: 'ANTHROPIC',
name: 'name',
modelProvider: 'ANTHROPIC',
modelName: 'modelName',
responseFormat: 'JSON',
temperature: 0,
topP: 0,
Expand Down Expand Up @@ -151,8 +151,8 @@ describe('resource prompts', () => {
],
name: 'name',
parameters: {
provider: 'ANTHROPIC',
name: 'name',
modelProvider: 'ANTHROPIC',
modelName: 'modelName',
responseFormat: 'JSON',
temperature: 0,
topP: 0,
Expand Down Expand Up @@ -209,8 +209,8 @@ describe('resource prompts', () => {
],
name: 'name',
parameters: {
provider: 'ANTHROPIC',
name: 'name',
modelProvider: 'ANTHROPIC',
modelName: 'modelName',
responseFormat: 'JSON',
temperature: 0,
topP: 0,
Expand Down

0 comments on commit 43d5616

Please sign in to comment.