diff --git a/README.md b/README.md index 7688d31..ff943b8 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,7 @@ USAGE * [`heroku ai:models:create MODEL_NAME`](#heroku-aimodelscreate-model_name) * [`heroku ai:models:destroy MODELRESOURCE`](#heroku-aimodelsdestroy-modelresource) * [`heroku ai:models:detach MODEL_RESOURCE`](#heroku-aimodelsdetach-model_resource) +* [`heroku ai:models:info [MODELRESOURCE]`](#heroku-aimodelsinfo-modelresource) * [`heroku ai:models:list`](#heroku-aimodelslist) ## `heroku ai:docs` @@ -175,6 +176,32 @@ EXAMPLES _See code: [dist/commands/ai/models/detach.ts](https://github.com/heroku/heroku-cli-plugin-integration/blob/v0.0.0/dist/commands/ai/models/detach.ts)_ +## `heroku ai:models:info [MODELRESOURCE]` + +get the current status of all the AI model resources attached to your app or a specific resource + +``` +USAGE + $ heroku ai:models:info [MODELRESOURCE] -a [-r ] + +ARGUMENTS + MODELRESOURCE The resource ID or alias of the model resource to check. + +FLAGS + -a, --app= (required) app to run command against + -r, --remote= git remote of app to use + +DESCRIPTION + get the current status of all the AI model resources attached to your app or a specific resource + +EXAMPLES + $ heroku ai:models:info claude-3-5-sonnet-acute-04281 --app example-app + + $ heroku ai:models:info --app example-app +``` + +_See code: [dist/commands/ai/models/info.ts](https://github.com/heroku/heroku-cli-plugin-integration/blob/v0.0.0/dist/commands/ai/models/info.ts)_ + ## `heroku ai:models:list` list available AI models to provision access to diff --git a/package.json b/package.json index b4bf6d3..43c29dd 100644 --- a/package.json +++ b/package.json @@ -71,6 +71,7 @@ "lint": "eslint . --ext .ts --config .eslintrc.json", "posttest": "yarn lint", "test": "nyc mocha --forbid-only", + "test:local": "nyc mocha", "version": "oclif readme && git add README.md" } } diff --git a/src/commands/ai/models/info.ts b/src/commands/ai/models/info.ts new file mode 100644 index 0000000..964cf51 --- /dev/null +++ b/src/commands/ai/models/info.ts @@ -0,0 +1,105 @@ +import color from '@heroku-cli/color' +import {flags} from '@heroku-cli/command' +import {Args, ux} from '@oclif/core' +import Command from '../../../lib/base' +import {ModelResource} from '../../../lib/ai/types' +import appAddons from '../../../lib/ai/models/app_addons' +import * as Heroku from '@heroku-cli/schema' + +export default class Info extends Command { + static description = 'get the current status of all the AI model resources attached to your app or a specific resource' + static examples = [ + 'heroku ai:models:info claude-3-5-sonnet-acute-04281 --app example-app', + 'heroku ai:models:info --app example-app', + ] + + static flags = { + app: flags.app({required: true}), + remote: flags.remote(), + } + + static args = { + modelResource: Args.string({description: 'The resource ID or alias of the model resource to check.'}), + } + + public async run(): Promise { + const {args, flags} = await this.parse(Info) + const {app} = flags + const {modelResource} = args + const synthesizedModels: Array = [] + let listOfProvisionedModels: Array = [] + + const modelInfo = async () => { + const modelInfoResponse = await this.herokuAI.get(`/models/${this.apiModelId}`, { + headers: {authorization: `Bearer ${this.apiKey}`}, + }) + .catch(error => { + if (error.statusCode === 404) { + ux.warn(`We can’t find a model resource called ${color.yellow(modelResource)}.\nRun ${color.cmd('heroku ai:models:info -a ')} to see a list of model resources.`) + } else { + throw error + } + }) + + return modelInfoResponse + } + + const getModelDetails = async (collectedModels: Array | string) => { + if (typeof collectedModels === 'string') { + const modelResource = collectedModels + await this.configureHerokuAIClient(modelResource, app) + + const {body: currentModelResource} = await modelInfo() || {body: null} + synthesizedModels.push(currentModelResource!) + } else { + for (const addonModel of collectedModels) { + await this.configureHerokuAIClient(addonModel.modelResource, app) + + const {body: currentModelResource} = await modelInfo() || {body: null} + synthesizedModels.push(currentModelResource!) + } + } + + return synthesizedModels + } + + if (modelResource) { + listOfProvisionedModels = await getModelDetails(modelResource) + } else { + const provisionedModelsInfo: Record[] = [] + const inferenceRegex = /inference/ + const addonsResponse = await appAddons(this.config, app) + + for (const addonInfo of addonsResponse as Array) { + const addonType = addonInfo.addon_service?.name || '' + const isModelAddon = inferenceRegex.test(addonType) + + if (isModelAddon) { + provisionedModelsInfo.push({ + addonName: addonInfo.addon_service?.name, + modelResource: addonInfo.name, + modelId: addonInfo.addon_service?.id, + }) + } + } + + listOfProvisionedModels = await getModelDetails(provisionedModelsInfo) + } + + this.displayModelResource(listOfProvisionedModels) + } + + displayModelResource(modelResources: ModelResource[]) { + for (const modelResource of modelResources) { + ux.log() + ux.styledHeader(modelResource.model_id) + ux.styledObject({ + 'Base Model ID': modelResource.model_id, + Ready: modelResource.ready, + 'Tokens In': modelResource.tokens_in, + 'Tokens Out': modelResource.tokens_out, + 'Avg Performance': modelResource.avg_performance, + }) + } + } +} diff --git a/src/lib/ai/models/app_addons.ts b/src/lib/ai/models/app_addons.ts new file mode 100644 index 0000000..bdad9ae --- /dev/null +++ b/src/lib/ai/models/app_addons.ts @@ -0,0 +1,17 @@ +import {Config} from '@oclif/core' +import {APIClient} from '@heroku-cli/command' +import * as Heroku from '@heroku-cli/schema' + +export default async function (config: Config, app: string) { + const herokuClient = new APIClient(config) + + const {body: response} = await herokuClient.get(`/apps/${app}/addons`, { + headers: {'Accept-Expansion': 'plan'}, + }).catch(error => { + console.log('ERROR MESSAGE:', error.message) + const error_ = error.body && error.body.message ? new Error(`The add-on was unable to be destroyed: ${error.body.message}.`) : new Error(`The add-on was unable to be destroyed: ${error}.`) + throw error_ + }) + + return response +} diff --git a/src/lib/ai/types.ts b/src/lib/ai/types.ts index da19f90..ae6fb8e 100644 --- a/src/lib/ai/types.ts +++ b/src/lib/ai/types.ts @@ -42,7 +42,8 @@ export type ModelInfo = { * Object schema for Model Status endpoint responses. */ export type ModelResource = { - plan: ModelName + model_id: ModelName + ready: string created: string tokens_in: string tokens_out?: string diff --git a/src/lib/base.ts b/src/lib/base.ts index 74261f2..d39f88a 100644 --- a/src/lib/base.ts +++ b/src/lib/base.ts @@ -186,8 +186,9 @@ export default abstract class extends Command { } // 5. If we resolved for an add-on, check that it's a Managed Inference add-on or throw a NotFound error. - if (resolvedAddon && resolvedAddon.addon_service.name !== this.addonServiceSlug) + if (resolvedAddon && resolvedAddon.addon_service.name !== this.addonServiceSlug) { throw new NotFound(addonIdentifier, appIdentifier) + } // 6. If we resolved for an add-on but not for an attachment yet, try to resolve the attachment if (resolvedAddon && !resolvedAttachment) { diff --git a/test/commands/ai/models/info.test.ts b/test/commands/ai/models/info.test.ts new file mode 100644 index 0000000..fd9ab2a --- /dev/null +++ b/test/commands/ai/models/info.test.ts @@ -0,0 +1,171 @@ +import {expect} from 'chai' +import {stdout, stderr} from 'stdout-stderr' +import Cmd from '../../../../src/commands/ai/models/info' +import {runCommand} from '../../../run-command' +import {modelResource, addon1Attachment1, addon1, mockAPIErrors} from '../../../helpers/fixtures' +import nock from 'nock' +import heredoc from 'tsheredoc' +import stripAnsi from '../../../helpers/strip-ansi' +import {CLIError} from '@oclif/core/lib/errors' + +describe('ai:models:info', function () { + const {env} = process + let api: nock.Scope + let herokuAI: nock.Scope + + context('when provisioned model name is provided and is found', function () { + beforeEach(function () { + process.env = {} + api = nock('https://api.heroku.com:443') + herokuAI = nock('https://inference.heroku.com') + }) + + afterEach(function () { + process.env = env + nock.cleanAll() + }) + + it('shows info for a model resource', async function () { + api + .post('/actions/addons/resolve', + {addon: addon1.name, app: addon1Attachment1.app?.name}) + .reply(200, [addon1]) + .get(`/addons/${addon1.id}/addon-attachments`) + .reply(200, [addon1Attachment1]) + .get(`/apps/${addon1Attachment1.app?.id}/config-vars`) + .reply(200, { + INFERENCE_KEY: 's3cr3t_k3y', + INFERENCE_MODEL_ID: 'claude-3-haiku', + INFERENCE_URL: 'inference.heroku.com', + }) + herokuAI + .get('/models/claude-3-haiku') + .reply(200, modelResource) + + await runCommand(Cmd, [ + 'inference-regular-74659', + '--app', + 'app1', + ]) + + expect(stripAnsi(stdout.output)).to.equal(heredoc` + + === claude-3-haiku + + Avg Performance: latency 0.4sec, 28 tokens/sec + Base Model ID: claude-3-haiku + Ready: Yes + Tokens In: 0 tokens this period + Tokens Out: 0 tokens this period + `) + + expect(stderr.output).to.eq('') + }) + }) + + context('when provisioned model name is not provided', function () { + // eslint-disable-next-line mocha/no-setup-in-describe + const multipleAddons = Array.from({length: 2}).fill(addon1) + + beforeEach(function () { + process.env = {} + api = nock('https://api.heroku.com:443') + }) + + afterEach(function () { + process.env = env + nock.cleanAll() + }) + + it('shows info for all model resources on specified app', async function () { + api + .post('/actions/addons/resolve', + {addon: addon1.name, app: addon1Attachment1.app?.name}) + .reply(200, [addon1]) + .get(`/addons/${addon1.id}/addon-attachments`) + .reply(200, [addon1Attachment1]) + .get(`/apps/${addon1Attachment1.app?.id}/config-vars`) + .reply(200, { + INFERENCE_KEY: 's3cr3t_k3y', + INFERENCE_MODEL_ID: 'claude-3-haiku', + INFERENCE_URL: 'inference.heroku.com', + }) + herokuAI + .get('/models/claude-3-haiku') + .reply(200, modelResource) + api + .get(`/apps/${addon1.app?.name}/addons`) + .reply(200, multipleAddons) + .post('/actions/addons/resolve', + {addon: addon1.name, app: addon1Attachment1.app?.name}) + .reply(200, [addon1]) + .get(`/addons/${addon1.id}/addon-attachments`) + .reply(200, [addon1Attachment1]) + .get(`/apps/${addon1Attachment1.app?.id}/config-vars`) + .reply(200, { + INFERENCE_KEY: 's3cr3t_k3y', + INFERENCE_MODEL_ID: 'claude-3-haiku', + INFERENCE_URL: 'inference.heroku.com', + }) + herokuAI + .get('/models/claude-3-haiku') + .reply(200, modelResource) + + await runCommand(Cmd, [ + '--app', + 'app1', + ]) + + expect(stdout.output).to.equal(heredoc` + + === claude-3-haiku + + Avg Performance: latency 0.4sec, 28 tokens/sec + Base Model ID: claude-3-haiku + Ready: Yes + Tokens In: 0 tokens this period + Tokens Out: 0 tokens this period + + === claude-3-haiku + + Avg Performance: latency 0.4sec, 28 tokens/sec + Base Model ID: claude-3-haiku + Ready: Yes + Tokens In: 0 tokens this period + Tokens Out: 0 tokens this period + `) + }) + }) + + context('when provisioned model name is incorrectly inputted', function () { + const incorrectModelName = 'inference-regular-WRONG' + + beforeEach(function () { + process.env = {} + api = nock('https://api.heroku.com:443') + }) + + afterEach(function () { + process.env = env + nock.cleanAll() + }) + + it('shows an error message', async function () { + api + .post('/actions/addons/resolve', + {addon: incorrectModelName, app: addon1Attachment1.app?.name}) + .reply(404, mockAPIErrors.modelsInfoErrorResponse) + + try { + await runCommand(Cmd, [ + incorrectModelName, + '--app', + 'app1', + ]) + } catch (error) { + const {message} = error as CLIError + expect(stripAnsi(message)).contains(mockAPIErrors.modelsInfoErrorResponse.message) + } + }) + }) +}) diff --git a/test/helpers/fixtures.ts b/test/helpers/fixtures.ts index 3d5a178..87ed68d 100644 --- a/test/helpers/fixtures.ts +++ b/test/helpers/fixtures.ts @@ -1,4 +1,5 @@ import * as Heroku from '@heroku-cli/schema' +import {ModelResource} from '../../src/lib/ai/types' export const availableModels = [ { @@ -46,6 +47,19 @@ export const mockAPIErrors = { id: 'error', message: 'Example API Error', }, + modelsInfoErrorResponse: { + id: 'error', + message: 'Example API Error', + }, +} + +export const modelResource: ModelResource = { + model_id: 'claude-3-haiku', + ready: 'Yes', + created: '2023-01-21T13:02:37.320+00.00', + tokens_in: '0 tokens this period', + tokens_out: '0 tokens this period', + avg_performance: 'latency 0.4sec, 28 tokens/sec', } export const addon1: Heroku.AddOn = {