diff --git a/test/commands/ai/models/call.test.ts b/test/commands/ai/models/call.test.ts index b531fd6..22ea035 100644 --- a/test/commands/ai/models/call.test.ts +++ b/test/commands/ai/models/call.test.ts @@ -30,7 +30,7 @@ describe('ai:models:call', function () { process.env = {} sandbox = sinon.createSandbox() api = nock('https://api.heroku.com') - defaultInferenceApi = nock('https://inference.heroku.com') + defaultInferenceApi = nock('https://us.inference.heroku.com') .get('/available-models') .reply(200, availableModels) }) diff --git a/test/commands/ai/models/info.test.ts b/test/commands/ai/models/info.test.ts index fd9ab2a..2e89fb1 100644 --- a/test/commands/ai/models/info.test.ts +++ b/test/commands/ai/models/info.test.ts @@ -17,7 +17,7 @@ describe('ai:models:info', function () { beforeEach(function () { process.env = {} api = nock('https://api.heroku.com:443') - herokuAI = nock('https://inference.heroku.com') + herokuAI = nock('https://us.inference.heroku.com') }) afterEach(function () { @@ -36,7 +36,7 @@ describe('ai:models:info', function () { .reply(200, { INFERENCE_KEY: 's3cr3t_k3y', INFERENCE_MODEL_ID: 'claude-3-haiku', - INFERENCE_URL: 'inference.heroku.com', + INFERENCE_URL: 'us.inference.heroku.com', }) herokuAI .get('/models/claude-3-haiku') @@ -88,7 +88,7 @@ describe('ai:models:info', function () { .reply(200, { INFERENCE_KEY: 's3cr3t_k3y', INFERENCE_MODEL_ID: 'claude-3-haiku', - INFERENCE_URL: 'inference.heroku.com', + INFERENCE_URL: 'us.inference.heroku.com', }) herokuAI .get('/models/claude-3-haiku') @@ -105,7 +105,7 @@ describe('ai:models:info', function () { .reply(200, { INFERENCE_KEY: 's3cr3t_k3y', INFERENCE_MODEL_ID: 'claude-3-haiku', - INFERENCE_URL: 'inference.heroku.com', + INFERENCE_URL: 'us.inference.heroku.com', }) herokuAI .get('/models/claude-3-haiku') diff --git a/test/commands/ai/models/list.test.ts b/test/commands/ai/models/list.test.ts index 8b70820..1e9a5ea 100644 --- a/test/commands/ai/models/list.test.ts +++ b/test/commands/ai/models/list.test.ts @@ -13,7 +13,7 @@ describe('ai:models:list', function () { beforeEach(function () { process.env = {} - herokuAI = nock('https://inference.heroku.com') + herokuAI = nock('https://us.inference.heroku.com') }) afterEach(function () { diff --git a/test/lib/base.test.ts b/test/lib/base.test.ts index 7a2b0b8..68a801c 100644 --- a/test/lib/base.test.ts +++ b/test/lib/base.test.ts @@ -84,7 +84,7 @@ describe('attempt a request using the Heroku AI client', function () { context('when the command doesn’t require a resource name', function () { it('makes a request to the default host', async function () { - const defaultApiHost = nock('https://inference.heroku.com') + const defaultApiHost = nock('https://us.inference.heroku.com') .get('/models') .reply(200, [])