= {};
- backendClient: BackendClient;
+ backendClient: AxiosBackendClient;
connectedAccounts: ConnectedAccounts;
apps: Apps;
actions: Actions;
@@ -60,9 +60,9 @@ export class ComposioToolSet {
post: TPostProcessor[];
schema: TSchemaProcessor[];
} = {
- pre: [fileInputProcessor],
- post: [fileResponseProcessor],
- schema: [fileSchemaProcessor],
+ pre: [FILE_INPUT_PROCESSOR],
+ post: [FILE_DOWNLOADABLE_PROCESSOR],
+ schema: [FILE_SCHEMA_PROCESSOR],
};
private userDefinedProcessors: {
@@ -169,7 +169,7 @@ export class ComposioToolSet {
}
}
- const apps = await this.client.actions.list({
+ const appActions = await this.client.actions.list({
apps: parsedFilters.apps?.join(","),
tags: parsedFilters.tags?.join(","),
useCase: parsedFilters.useCase,
@@ -191,7 +191,10 @@ export class ComposioToolSet {
);
});
- const toolsActions = [...(apps?.items || []), ...toolsWithCustomActions];
+ const toolsActions = [
+ ...(appActions?.items || []),
+ ...toolsWithCustomActions,
+ ];
const allSchemaProcessor = [
...this.internalProcessors.schema,
@@ -199,17 +202,20 @@ export class ComposioToolSet {
? [this.userDefinedProcessors.schema]
: []),
];
-
- return toolsActions.map((tool) => {
+ const processedTools = [];
+ // Iterate over the tools and process them
+ for (const tool of toolsActions) {
let schema = tool as RawActionData;
- allSchemaProcessor.forEach((processor) => {
- schema = processor({
+ // Process the schema with all the processors
+ for (const processor of allSchemaProcessor) {
+ schema = await processor({
actionName: schema?.name,
toolSchema: schema,
});
- });
- return schema;
- });
+ }
+ processedTools.push(schema);
+ }
+ return processedTools;
}
async createAction>(
@@ -265,7 +271,7 @@ export class ComposioToolSet {
];
for (const processor of allInputProcessor) {
- params = processor({
+ params = await processor({
params: params,
actionName: action,
});
@@ -324,9 +330,10 @@ export class ComposioToolSet {
: []),
];
- let dataToReturn = { ...data };
+ // Dirty way to avoid copy
+ let dataToReturn = JSON.parse(JSON.stringify(data));
for (const processor of allOutputProcessor) {
- dataToReturn = processor({
+ dataToReturn = await processor({
actionName: meta.action,
toolResponse: dataToReturn,
});
diff --git a/js/src/sdk/client/schemas.gen.ts b/js/src/sdk/client/schemas.gen.ts
index d59ef218ecb..d65c91adc21 100644
--- a/js/src/sdk/client/schemas.gen.ts
+++ b/js/src/sdk/client/schemas.gen.ts
@@ -1492,6 +1492,38 @@ export const $AppQueryDTO = {
type: "object",
} as const;
+export const $TestConnector = {
+ properties: {
+ id: {
+ type: "string",
+ description: "The id of the test connector",
+ },
+ name: {
+ type: "string",
+ description: "The name of the test connector",
+ },
+ authScheme: {
+ enum: [
+ "OAUTH2",
+ "OAUTH1",
+ "OAUTH1A",
+ "API_KEY",
+ "BASIC",
+ "BEARER_TOKEN",
+ "GOOGLE_SERVICE_ACCOUNT",
+ "NO_AUTH",
+ "BASIC_WITH_JWT",
+ "COMPOSIO_LINK",
+ "CALCOM_AUTH",
+ ],
+ type: "string",
+ description: "The auth scheme of the test connector",
+ },
+ },
+ type: "object",
+ required: ["id", "name", "authScheme"],
+} as const;
+
export const $AppInfoResponseDto = {
properties: {
appId: {
@@ -1534,6 +1566,14 @@ export const $AppInfoResponseDto = {
auth_schemes: {
description: "The authentication schemes of the app",
},
+ testConnectors: {
+ items: {
+ type: "object",
+ },
+ type: "array",
+ $ref: "#/components/schemas/TestConnector",
+ description: "The authentication schemes of the app",
+ },
enabled: {
type: "boolean",
description: "Indicates if the app is enabled",
@@ -1746,6 +1786,13 @@ export const $GetConnectorInfoResDTO = {
description:
"When true, indicates that this connector uses Composio's built-in authentication handling rather than custom authentication logic.",
},
+ limitedActions: {
+ items: {
+ type: "string",
+ },
+ type: "array",
+ description: "Array of action strings that this connector is limited to.",
+ },
},
type: "object",
required: [
@@ -1756,6 +1803,7 @@ export const $GetConnectorInfoResDTO = {
"logo",
"appName",
"useComposioAuth",
+ "limitedActions",
],
} as const;
@@ -1824,6 +1872,14 @@ export const $CreateConnectorPayloadDTO = {
description:
"When set to true, creates a new integration even if one already exists for the given app. This is useful when you need multiple integrations with the same service.",
},
+ limitedActions: {
+ items: {
+ type: "string",
+ },
+ type: "array",
+ description:
+ "List of actions to limit the connector to. If not provided, all actions will be enabled.",
+ },
},
type: "object",
required: ["name"],
@@ -1836,6 +1892,14 @@ export const $PatchConnectorReqDTO = {
description:
"Authentication configuration for the connector. This object contains the necessary credentials and settings required to authenticate with the external service. You can get the required configuration fields from the `GET /api/v1/connectors/{connectorId}/config` endpoint.",
},
+ limitedActions: {
+ items: {
+ type: "string",
+ },
+ type: "array",
+ description:
+ "A list of actions that are limited or restricted for the connector. This can be used to specify which actions the connector is allowed or not allowed to perform. The list of possible actions can be found in the API documentation.",
+ },
enabled: {
type: "boolean",
description:
@@ -4725,6 +4789,66 @@ export const $ActionsQueryV2DTO = {
type: "object",
} as const;
+export const $FileInfoDTO = {
+ properties: {
+ app: {
+ type: "string",
+ description: "Name of the app where this file belongs to.",
+ },
+ action: {
+ type: "string",
+ description: "Name of the action where this file belongs to.",
+ },
+ filename: {
+ type: "string",
+ description: "Name of the original file.",
+ },
+ mimetype: {
+ type: "string",
+ description: "Mime type of the original file.",
+ },
+ md5: {
+ type: "string",
+ description: "MD5 of a file.",
+ },
+ },
+ type: "object",
+ required: ["app", "action", "filename", "mimetype", "md5"],
+} as const;
+
+export const $GetFilesResponseDTO = {
+ properties: {
+ items: {
+ $ref: "#/components/schemas/FileInfoDTO",
+ items: {
+ type: "object",
+ },
+ type: "array",
+ },
+ },
+ type: "object",
+ required: ["items"],
+} as const;
+
+export const $CreateUploadURLResponseDTO = {
+ properties: {
+ id: {
+ type: "string",
+ description: "ID of the file",
+ },
+ url: {
+ type: "string",
+ description: "Onetime upload URL",
+ },
+ key: {
+ type: "string",
+ description: "S3 upload location",
+ },
+ },
+ type: "object",
+ required: ["id", "url", "key"],
+} as const;
+
export const $TimePeriodReqDTO = {
properties: {
lastTimePeriod: {
@@ -5165,7 +5289,8 @@ export const $ComposioCreateConfigDTO = {
},
useComposioAuth: {
type: "boolean",
- description: "Whether to use Composio authentication",
+ description:
+ "Whether to use Composio authentication, default to true if no auth config is passed. Throws error we're not able to create integration.",
},
authScheme: {
type: "string",
@@ -5190,7 +5315,6 @@ export const $ComposioCreateConfigDTO = {
},
},
type: "object",
- required: ["authScheme"],
} as const;
export const $ConnectorCreateReqDTO = {
diff --git a/js/src/sdk/client/services.gen.ts b/js/src/sdk/client/services.gen.ts
index ffff0e27844..945e1cdcc4f 100644
--- a/js/src/sdk/client/services.gen.ts
+++ b/js/src/sdk/client/services.gen.ts
@@ -6,6 +6,8 @@ import {
type Options,
} from "@hey-api/client-axios";
import type {
+ ActionsControllerV2ListUserFilesError,
+ ActionsControllerV2ListUserFilesResponse,
AddProjectData,
AddProjectError,
AddProjectResponse,
@@ -18,6 +20,9 @@ import type {
CreateConnectorV2Data,
CreateConnectorV2Error,
CreateConnectorV2Response,
+ CreateFileUploadUrlData,
+ CreateFileUploadUrlError,
+ CreateFileUploadUrlResponse,
CreateProjectData,
CreateProjectError,
CreateProjectResponse,
@@ -700,6 +705,39 @@ export class ActionsService {
url: "/api/v2/actions/search/advanced",
});
}
+
+ /**
+ * List user files
+ */
+ public static v2ListUserFiles(
+ options?: Options
+ ) {
+ return (options?.client ?? client).get<
+ ActionsControllerV2ListUserFilesResponse,
+ ActionsControllerV2ListUserFilesError,
+ ThrowOnError
+ >({
+ ...options,
+ url: "/api/v2/actions/files/list",
+ });
+ }
+
+ /**
+ * Create file upload url
+ * Create file upload URL for action execution.
+ */
+ public static createFileUploadUrl(
+ options: Options
+ ) {
+ return (options?.client ?? client).post<
+ CreateFileUploadUrlResponse,
+ CreateFileUploadUrlError,
+ ThrowOnError
+ >({
+ ...options,
+ url: "/api/v2/actions/files/upload/{fileType}",
+ });
+ }
}
export class ConnectionsService {
diff --git a/js/src/sdk/client/types.gen.ts b/js/src/sdk/client/types.gen.ts
index d97865d6d0d..4e6fa40aef4 100644
--- a/js/src/sdk/client/types.gen.ts
+++ b/js/src/sdk/client/types.gen.ts
@@ -1068,6 +1068,48 @@ export type includeLocal = "true" | "false";
*/
export type sortBy = "alphabet" | "usage" | "no_sort";
+export type TestConnector = {
+ /**
+ * The id of the test connector
+ */
+ id: string;
+ /**
+ * The name of the test connector
+ */
+ name: string;
+ /**
+ * The auth scheme of the test connector
+ */
+ authScheme:
+ | "OAUTH2"
+ | "OAUTH1"
+ | "OAUTH1A"
+ | "API_KEY"
+ | "BASIC"
+ | "BEARER_TOKEN"
+ | "GOOGLE_SERVICE_ACCOUNT"
+ | "NO_AUTH"
+ | "BASIC_WITH_JWT"
+ | "COMPOSIO_LINK"
+ | "CALCOM_AUTH";
+};
+
+/**
+ * The auth scheme of the test connector
+ */
+export type authScheme =
+ | "OAUTH2"
+ | "OAUTH1"
+ | "OAUTH1A"
+ | "API_KEY"
+ | "BASIC"
+ | "BEARER_TOKEN"
+ | "GOOGLE_SERVICE_ACCOUNT"
+ | "NO_AUTH"
+ | "BASIC_WITH_JWT"
+ | "COMPOSIO_LINK"
+ | "CALCOM_AUTH";
+
export type AppInfoResponseDto = {
/**
* Unique identifier (UUID) for the app
@@ -1101,6 +1143,10 @@ export type AppInfoResponseDto = {
* The authentication schemes of the app
*/
auth_schemes?: unknown;
+ /**
+ * The authentication schemes of the app
+ */
+ testConnectors?: TestConnector;
/**
* Indicates if the app is enabled
*/
@@ -1234,9 +1280,9 @@ export type GetConnectorInfoResDTO = {
*/
useComposioAuth: boolean;
/**
- * List of actions that are limited to this connector, trying to execute any other action apart from these will throw an unauthorized error
+ * Array of action strings that this connector is limited to.
*/
- limitedActions: string[];
+ limitedActions: Array;
};
/**
@@ -1282,6 +1328,10 @@ export type CreateConnectorPayloadDTO = {
* When set to true, creates a new integration even if one already exists for the given app. This is useful when you need multiple integrations with the same service.
*/
forceNewIntegration?: boolean;
+ /**
+ * List of actions to limit the connector to. If not provided, all actions will be enabled.
+ */
+ limitedActions?: Array;
};
export type PatchConnectorReqDTO = {
@@ -1291,6 +1341,10 @@ export type PatchConnectorReqDTO = {
authConfig?: {
[key: string]: unknown;
};
+ /**
+ * A list of actions that are limited or restricted for the connector. This can be used to specify which actions the connector is allowed or not allowed to perform. The list of possible actions can be found in the API documentation.
+ */
+ limitedActions?: Array;
/**
* Flag to indicate if the connector is enabled. When set to false, the connector will not process any requests. You can toggle this value to temporarily disable the connector without deleting it. Default value can be found in the `GET /api/v1/connectors/{connectorId}` endpoint response.
*/
@@ -1663,10 +1717,6 @@ export type Parameter = {
value: string;
};
-/**
- * The location of the parameter. Can be 'query' or 'header'.
- */
-
export type Data = {
/**
* First field of the data object.
@@ -3251,6 +3301,48 @@ export type ActionsQueryV2DTO = {
sortBy?: "alphabet" | "usage" | "no_sort";
};
+export type FileInfoDTO = {
+ /**
+ * Name of the app where this file belongs to.
+ */
+ app: string;
+ /**
+ * Name of the action where this file belongs to.
+ */
+ action: string;
+ /**
+ * Name of the original file.
+ */
+ filename: string;
+ /**
+ * Mime type of the original file.
+ */
+ mimetype: string;
+ /**
+ * MD5 of a file.
+ */
+ md5: string;
+};
+
+export type GetFilesResponseDTO = {
+ items: FileInfoDTO;
+};
+
+export type CreateUploadURLResponseDTO = {
+ /**
+ * ID of the file
+ */
+ id: string;
+ /**
+ * Onetime upload URL
+ */
+ url: string;
+ /**
+ * S3 upload location
+ */
+ key: string;
+};
+
export type TimePeriodReqDTO = {
/**
* Time period to get the data for
@@ -3529,22 +3621,6 @@ export type ComposioSearchConfigDTO = {
| "CALCOM_AUTH";
};
-/**
- * Authentication scheme to use
- */
-export type authScheme =
- | "OAUTH2"
- | "OAUTH1"
- | "OAUTH1A"
- | "API_KEY"
- | "BASIC"
- | "BEARER_TOKEN"
- | "GOOGLE_SERVICE_ACCOUNT"
- | "NO_AUTH"
- | "BASIC_WITH_JWT"
- | "COMPOSIO_LINK"
- | "CALCOM_AUTH";
-
export type ConnectorSearchFilterDTOV2 = {
/**
* Filter options for the connector
@@ -3582,13 +3658,13 @@ export type ComposioCreateConfigDTO = {
*/
name?: string;
/**
- * Whether to use Composio authentication
+ * Whether to use Composio authentication, default to true if no auth config is passed. Throws error we're not able to create integration.
*/
useComposioAuth?: boolean;
/**
* Authentication scheme to use
*/
- authScheme:
+ authScheme?:
| "OAUTH2"
| "OAUTH1"
| "OAUTH1A"
@@ -4016,6 +4092,24 @@ export type AdvancedUseCaseSearchResponse2 = AdvancedUseCaseSearchResponse;
export type AdvancedUseCaseSearchError = unknown;
+export type ActionsControllerV2ListUserFilesResponse = GetFilesResponseDTO;
+
+export type ActionsControllerV2ListUserFilesError = unknown;
+
+export type CreateFileUploadUrlData = {
+ /**
+ * FileInfoDTO
+ */
+ body?: FileInfoDTO;
+ path: {
+ fileType: unknown;
+ };
+};
+
+export type CreateFileUploadUrlResponse = CreateUploadURLResponseDTO;
+
+export type CreateFileUploadUrlError = unknown;
+
export type ListConnectionsData = {
query?: {
appNames?: string;
diff --git a/js/src/sdk/index.ts b/js/src/sdk/index.ts
index f70bfa5b13e..d127193274f 100644
--- a/js/src/sdk/index.ts
+++ b/js/src/sdk/index.ts
@@ -11,7 +11,7 @@ import { Entity } from "./models/Entity";
import { Actions } from "./models/actions";
import { ActiveTriggers } from "./models/activeTriggers";
import { Apps } from "./models/apps";
-import { BackendClient } from "./models/backendClient";
+import { AxiosBackendClient } from "./models/backendClient";
import { ConnectedAccounts } from "./models/connectedAccounts";
import { Integrations } from "./models/integrations";
import { Triggers } from "./models/triggers";
@@ -36,7 +36,7 @@ export class Composio {
* It provides access to various models that allow for operations on connected accounts, apps,
* actions, triggers, integrations, and active triggers.
*/
- backendClient: BackendClient;
+ backendClient: AxiosBackendClient;
connectedAccounts: ConnectedAccounts;
apps: Apps;
actions: Actions;
@@ -94,7 +94,7 @@ export class Composio {
);
// Initialize the BackendClient with the parsed API key and base URL.
- this.backendClient = new BackendClient(
+ this.backendClient = new AxiosBackendClient(
apiKeyParsed,
baseURLParsed,
config?.runtime
diff --git a/js/src/sdk/index2.spec.ts b/js/src/sdk/index2.spec.ts
deleted file mode 100644
index 6dc3a26c587..00000000000
--- a/js/src/sdk/index2.spec.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-import { getTestConfig } from "../../config/getTestConfig";
-import { Composio } from "./index";
-
-const { COMPOSIO_API_KEY, BACKEND_HERMES_URL } = getTestConfig();
-
-describe("Entity spec suite", () => {
- it("should get an entity and then fetch a connection for a normal app", async () => {
- const app = "github";
- const composio = new Composio({
- apiKey: COMPOSIO_API_KEY,
- baseUrl: BACKEND_HERMES_URL,
- });
- const entity = composio.getEntity("default");
-
- expect(entity.id).toBe("default");
-
- const connection = await entity.getConnection({ app: app! });
- expect(connection?.appUniqueId).toBe(app);
- });
-});
diff --git a/js/src/sdk/models/Entity.spec.ts b/js/src/sdk/models/Entity.spec.ts
index 0ee309730f2..c93cc6825f3 100644
--- a/js/src/sdk/models/Entity.spec.ts
+++ b/js/src/sdk/models/Entity.spec.ts
@@ -43,6 +43,18 @@ describe("Entity class tests", () => {
expect(connection?.appUniqueId).toBe(app);
});
+ it("get connection for rand", async () => {
+ const entity2 = new Entity(backendClient, "ckemvy" + Date.now());
+ let hasError = false;
+ try {
+ const connection = await entity2.getConnection({ app: "gmail" });
+ expect(connection?.appUniqueId).toBe("gmail");
+ } catch (error) {
+ hasError = true;
+ }
+ expect(hasError).toBe(true);
+ });
+
it("execute action", async () => {
const connectedAccount = await entity.getConnection({ app: "github" });
diff --git a/js/src/sdk/models/Entity.ts b/js/src/sdk/models/Entity.ts
index b83acda1ec3..cccb8e0d564 100644
--- a/js/src/sdk/models/Entity.ts
+++ b/js/src/sdk/models/Entity.ts
@@ -13,7 +13,7 @@ import { TELEMETRY_EVENTS } from "../utils/telemetry/events";
import { ActionExecuteResponse, Actions } from "./actions";
import { ActiveTriggers } from "./activeTriggers";
import { Apps } from "./apps";
-import { BackendClient } from "./backendClient";
+import { AxiosBackendClient } from "./backendClient";
import {
ConnectedAccounts,
ConnectionItem,
@@ -43,7 +43,7 @@ export type ConnectedAccountListRes = GetConnectionsResponseDto;
export class Entity {
id: string;
- private backendClient: BackendClient;
+ private backendClient: AxiosBackendClient;
private triggerModel: Triggers;
private actionsModel: Actions;
private apps: Apps;
@@ -53,7 +53,7 @@ export class Entity {
private fileName: string = "js/src/sdk/models/Entity.ts";
- constructor(backendClient: BackendClient, id: string = "default") {
+ constructor(backendClient: AxiosBackendClient, id: string = "default") {
this.backendClient = backendClient;
this.id = id;
this.triggerModel = new Triggers(this.backendClient);
@@ -179,10 +179,6 @@ export class Entity {
user_uuid: this.id!,
});
- if (!connectedAccounts.items || connectedAccounts.items.length === 0) {
- return null;
- }
-
for (const account of connectedAccounts.items!) {
if (account?.labels && account?.labels.includes(LABELS.PRIMARY)) {
latestAccount = account;
diff --git a/js/src/sdk/models/actions.ts b/js/src/sdk/models/actions.ts
index 231594dee67..0744e32a511 100644
--- a/js/src/sdk/models/actions.ts
+++ b/js/src/sdk/models/actions.ts
@@ -18,7 +18,7 @@ import {
import { CEG } from "../utils/error";
import { TELEMETRY_LOGGER } from "../utils/telemetry";
import { TELEMETRY_EVENTS } from "../utils/telemetry/events";
-import { BackendClient } from "./backendClient";
+import { AxiosBackendClient } from "./backendClient";
/**
* Request types inferred from zod schemas
@@ -43,10 +43,10 @@ export type ActionFindActionEnumsByUseCaseRes = Array;
export class Actions {
// Remove this as we might not need it
- private backendClient: BackendClient;
+ private backendClient: AxiosBackendClient;
fileName: string = "js/src/sdk/models/actions.ts";
- constructor(backendClient: BackendClient) {
+ constructor(backendClient: AxiosBackendClient) {
this.backendClient = backendClient;
}
diff --git a/js/src/sdk/models/activeTriggers.ts b/js/src/sdk/models/activeTriggers.ts
index 59b16564ec2..4c334a2da03 100644
--- a/js/src/sdk/models/activeTriggers.ts
+++ b/js/src/sdk/models/activeTriggers.ts
@@ -8,7 +8,7 @@ import {
import { CEG } from "../utils/error";
import { TELEMETRY_LOGGER } from "../utils/telemetry";
import { TELEMETRY_EVENTS } from "../utils/telemetry/events";
-import { BackendClient } from "./backendClient";
+import { AxiosBackendClient } from "./backendClient";
export type TriggerItemParam = z.infer;
export type GetActiveTriggersData = z.infer;
@@ -16,9 +16,9 @@ export type TriggerItemRes = z.infer;
export type TriggerChangeResponse = { status: string };
export class ActiveTriggers {
// Remove this as we might not need it
- private backendClient: BackendClient;
+ private backendClient: AxiosBackendClient;
private fileName: string = "js/src/sdk/models/activeTriggers.ts";
- constructor(backendClient: BackendClient) {
+ constructor(backendClient: AxiosBackendClient) {
this.backendClient = backendClient;
}
diff --git a/js/src/sdk/models/apps.ts b/js/src/sdk/models/apps.ts
index 1c046b406bf..75955d1b9d8 100644
--- a/js/src/sdk/models/apps.ts
+++ b/js/src/sdk/models/apps.ts
@@ -16,7 +16,7 @@ import {
ZRequiredParamsFullResponse,
ZRequiredParamsResponse,
} from "../types/app";
-import { BackendClient } from "./backendClient";
+import { AxiosBackendClient } from "./backendClient";
// schema types generated from zod
export type AppGetRequiredParams = z.infer;
@@ -42,9 +42,9 @@ export type AppListRes = AppListResDTO;
export type AppItemListResponse = AppInfoResponseDto;
export class Apps {
- private backendClient: BackendClient;
+ private backendClient: AxiosBackendClient;
private fileName: string = "js/src/sdk/models/apps.ts";
- constructor(backendClient: BackendClient) {
+ constructor(backendClient: AxiosBackendClient) {
this.backendClient = backendClient;
}
diff --git a/js/src/sdk/models/backendClient.spec.ts b/js/src/sdk/models/backendClient.spec.ts
index 5188073619c..643622fb875 100644
--- a/js/src/sdk/models/backendClient.spec.ts
+++ b/js/src/sdk/models/backendClient.spec.ts
@@ -1,6 +1,6 @@
import { beforeAll, describe, expect, it } from "@jest/globals";
import { BACKEND_CONFIG, getTestConfig } from "../../../config/getTestConfig";
-import { BackendClient } from "./backendClient";
+import { AxiosBackendClient } from "./backendClient";
describe("Apps class tests", () => {
let _backendClient;
@@ -11,21 +11,22 @@ describe("Apps class tests", () => {
});
it("should create an Apps instance and retrieve apps list", async () => {
- _backendClient = new BackendClient(
+ _backendClient = new AxiosBackendClient(
testConfig.COMPOSIO_API_KEY,
testConfig.BACKEND_HERMES_URL
);
});
it("should throw an error if api key is not provided", async () => {
- expect(() => new BackendClient("", testConfig.BACKEND_HERMES_URL)).toThrow(
- "API key is not available"
- );
+ expect(
+ () => new AxiosBackendClient("", testConfig.BACKEND_HERMES_URL)
+ ).toThrow("API key is not available");
});
it("should throw and error if wrong base url is provided", async () => {
expect(
- () => new BackendClient(testConfig.COMPOSIO_API_KEY, "htt://wrong.url")
+ () =>
+ new AxiosBackendClient(testConfig.COMPOSIO_API_KEY, "htt://wrong.url")
).toThrow("🔗 Base URL htt://wrong.url is not valid");
});
});
diff --git a/js/src/sdk/models/backendClient.ts b/js/src/sdk/models/backendClient.ts
index e19a9f00401..27adf4cbc28 100644
--- a/js/src/sdk/models/backendClient.ts
+++ b/js/src/sdk/models/backendClient.ts
@@ -9,7 +9,7 @@ import { removeTrailingSlashIfExists } from "../utils/string";
/**
* Class representing the details required to initialize and configure the API client.
*/
-export class BackendClient {
+export class AxiosBackendClient {
/**
* The API key used for authenticating requests.
*/
diff --git a/js/src/sdk/models/connectedAccounts.ts b/js/src/sdk/models/connectedAccounts.ts
index 4dbad32b225..569fc709124 100644
--- a/js/src/sdk/models/connectedAccounts.ts
+++ b/js/src/sdk/models/connectedAccounts.ts
@@ -17,7 +17,7 @@ import { ZAuthMode } from "../types/integration";
import { CEG } from "../utils/error";
import { TELEMETRY_LOGGER } from "../utils/telemetry";
import { TELEMETRY_EVENTS } from "../utils/telemetry/events";
-import { BackendClient } from "./backendClient";
+import { AxiosBackendClient } from "./backendClient";
type ConnectedAccountsListData = z.infer & {
/** @deprecated use appUniqueKeys field instead */
@@ -48,14 +48,14 @@ export type ConnectionItem = ConnectionParams;
* Class representing connected accounts in the system.
*/
export class ConnectedAccounts {
- private backendClient: BackendClient;
+ private backendClient: AxiosBackendClient;
private fileName: string = "js/src/sdk/models/connectedAccounts.ts";
/**
* Initializes a new instance of the ConnectedAccounts class.
- * @param {BackendClient} backendClient - The backend client instance.
+ * @param {AxiosBackendClient} backendClient - The backend client instance.
*/
- constructor(backendClient: BackendClient) {
+ constructor(backendClient: AxiosBackendClient) {
this.backendClient = backendClient;
}
diff --git a/js/src/sdk/models/integrations.ts b/js/src/sdk/models/integrations.ts
index 9bfff996a23..8a4daf5aa2a 100644
--- a/js/src/sdk/models/integrations.ts
+++ b/js/src/sdk/models/integrations.ts
@@ -18,7 +18,7 @@ import { COMPOSIO_SDK_ERROR_CODES } from "../utils/errors/src/constants";
import { TELEMETRY_LOGGER } from "../utils/telemetry";
import { TELEMETRY_EVENTS } from "../utils/telemetry/events";
import { Apps } from "./apps";
-import { BackendClient } from "./backendClient";
+import { AxiosBackendClient } from "./backendClient";
// Types generated from zod schemas
@@ -48,11 +48,11 @@ export type IntegrationRequiredParamsRes = ExpectedInputFieldsDTO[];
export type IntegrationDeleteRes = DeleteRowAPIDTO;
export class Integrations {
- private backendClient: BackendClient;
+ private backendClient: AxiosBackendClient;
private fileName: string = "js/src/sdk/models/integrations.ts";
private apps: Apps;
- constructor(backendClient: BackendClient) {
+ constructor(backendClient: AxiosBackendClient) {
this.backendClient = backendClient;
this.apps = new Apps(backendClient);
}
diff --git a/js/src/sdk/models/triggers.ts b/js/src/sdk/models/triggers.ts
index 50d48e5c6c2..01cf4645934 100644
--- a/js/src/sdk/models/triggers.ts
+++ b/js/src/sdk/models/triggers.ts
@@ -1,6 +1,6 @@
import logger from "../../utils/logger";
import { PusherUtils, TriggerData } from "../utils/pusher";
-import { BackendClient } from "./backendClient";
+import { AxiosBackendClient } from "./backendClient";
import apiClient from "../client/client";
@@ -58,9 +58,9 @@ export type SingleInstanceTriggerParam = z.infer<
export class Triggers {
trigger_to_client_event = "trigger_to_client";
- private backendClient: BackendClient;
+ private backendClient: AxiosBackendClient;
private fileName: string = "js/src/sdk/models/triggers.ts";
- constructor(backendClient: BackendClient) {
+ constructor(backendClient: AxiosBackendClient) {
this.backendClient = backendClient;
}
diff --git a/js/src/sdk/testUtils/getBackendClient.ts b/js/src/sdk/testUtils/getBackendClient.ts
index 3427c7e9078..84a87323fb3 100644
--- a/js/src/sdk/testUtils/getBackendClient.ts
+++ b/js/src/sdk/testUtils/getBackendClient.ts
@@ -1,7 +1,7 @@
import { getTestConfig } from "../../../config/getTestConfig";
-import { BackendClient } from "../models/backendClient";
+import { AxiosBackendClient } from "../models/backendClient";
-export const getBackendClient = (): BackendClient => {
+export const getBackendClient = (): AxiosBackendClient => {
const testConfig = getTestConfig();
if (testConfig["COMPOSIO_API_KEY"] === undefined) {
throw new Error("COMPOSIO_API_KEY is not set in the test config");
@@ -11,5 +11,5 @@ export const getBackendClient = (): BackendClient => {
}
const COMPOSIO_API_KEY = testConfig["COMPOSIO_API_KEY"];
const BACKEND_HERMES_URL = testConfig["BACKEND_HERMES_URL"];
- return new BackendClient(COMPOSIO_API_KEY, BACKEND_HERMES_URL);
+ return new AxiosBackendClient(COMPOSIO_API_KEY, BACKEND_HERMES_URL);
};
diff --git a/js/src/sdk/utils/errors/src/composioError.ts b/js/src/sdk/utils/errors/src/composioError.ts
index b1b7c96112e..0e15e8cfb1f 100644
--- a/js/src/sdk/utils/errors/src/composioError.ts
+++ b/js/src/sdk/utils/errors/src/composioError.ts
@@ -1,6 +1,6 @@
import { logError } from "..";
import { getUUID } from "../../../../utils/common";
-import { getLogLevel } from "../../../../utils/logger";
+import logger, { getLogLevel, LOG_LEVELS } from "../../../../utils/logger";
/**
* Custom error class for Composio that provides rich error details, tracking, and improved debugging
@@ -67,19 +67,22 @@ export class ComposioError extends Error {
}
}
- // eslint-disable-next-line no-console
- console.log(
- `🚀 [Info] Give Feedback / Get Help: https://dub.composio.dev/discord `
- );
- // eslint-disable-next-line no-console
- console.log(
- `🐛 [Info] Create a new issue: https://github.com/ComposioHQ/composio/issues `
- );
- if (getLogLevel() !== "debug") {
+ // Only in case of info or debug, we will log the error
+ if (LOG_LEVELS[getLogLevel()] >= 2) {
// eslint-disable-next-line no-console
- console.log(
- `⛔ [Info] If you need to debug this error, set env variable COMPOSIO_LOGGING_LEVEL=debug`
+ logger.info(
+ `🚀 [Info] Give Feedback / Get Help: https://dub.composio.dev/discord `
);
+ // eslint-disable-next-line no-console
+ logger.info(
+ `🐛 [Info] Create a new issue: https://github.com/ComposioHQ/composio/issues `
+ );
+ if (getLogLevel() !== "debug") {
+ // eslint-disable-next-line no-console
+ logger.info(
+ `⛔ [Info] If you need to debug this error, set env variable COMPOSIO_LOGGING_LEVEL=debug`
+ );
+ }
}
logError({
diff --git a/js/src/sdk/utils/errors/src/formatter.ts b/js/src/sdk/utils/errors/src/formatter.ts
index 4bf5a9951da..da12e72aaaf 100644
--- a/js/src/sdk/utils/errors/src/formatter.ts
+++ b/js/src/sdk/utils/errors/src/formatter.ts
@@ -104,7 +104,7 @@ export const generateMetadataFromAxiosError = (
metadata?: Record;
}
): Record => {
- const requestId = axiosError.response?.headers["x-request-id"];
+ const requestId = axiosError.request?.headers["x-request-id"];
return {
fullUrl:
(axiosError.config?.baseURL ?? "") + (axiosError.config?.url ?? ""),
diff --git a/js/src/sdk/utils/fileUtils.ts b/js/src/sdk/utils/fileUtils.ts
index 3d95cf8e3f4..04849edc833 100644
--- a/js/src/sdk/utils/fileUtils.ts
+++ b/js/src/sdk/utils/fileUtils.ts
@@ -65,7 +65,6 @@ export const saveFile = (
isTempFile: boolean = false
) => {
try {
- // eslint-disable-next-line @typescript-eslint/no-require-imports
const path = require("path");
// eslint-disable-next-line @typescript-eslint/no-require-imports
const fs = require("fs");
@@ -73,7 +72,12 @@ export const saveFile = (
? getComposioTempFilesDir(true)
: getComposioDir(true);
const filePath = path.join(composioFilesDir, path.basename(file));
- fs.writeFileSync(filePath, content);
+
+ if (Buffer.isBuffer(content)) {
+ fs.writeFileSync(filePath, content);
+ } else {
+ fs.writeFileSync(filePath, content, "utf8");
+ }
return filePath;
} catch (_error) {
diff --git a/js/src/sdk/utils/processor/file.ts b/js/src/sdk/utils/processor/file.ts
index ad82d4c1dde..7bc97764def 100644
--- a/js/src/sdk/utils/processor/file.ts
+++ b/js/src/sdk/utils/processor/file.ts
@@ -3,89 +3,119 @@ import {
TPreProcessor,
TSchemaProcessor,
} from "../../../types/base_toolset";
-import logger from "../../../utils/logger";
-import { saveFile } from "../fileUtils";
+import { downloadFileFromS3, getFileDataAfterUploadingToS3 } from "./fileUtils";
-export const fileResponseProcessor: TPostProcessor = ({
+type FileBasePropertySchema = {
+ type: string;
+ title: string;
+ description: string;
+ file_uploadable?: boolean;
+} & Record;
+
+const FILE_SUFFIX = "_schema_parsed_file";
+
+const convertFileSchemaProperty = (
+ key: string,
+ property: FileBasePropertySchema
+) => {
+ if (!property.file_uploadable) {
+ return property;
+ }
+
+ return {
+ keyName: `${key}${FILE_SUFFIX}`,
+ type: "string",
+ description: property.description,
+ };
+};
+
+const processFileUpload = async (
+ params: Record,
+ actionName: string
+) => {
+ const result = { ...params };
+
+ for (const [key, value] of Object.entries(result)) {
+ if (!key.endsWith(FILE_SUFFIX)) continue;
+
+ const originalKey = key.replace(FILE_SUFFIX, "");
+ const fileData = await getFileDataAfterUploadingToS3(
+ value as string,
+ actionName
+ );
+
+ result[originalKey] = fileData;
+ delete result[key];
+ }
+
+ return result;
+};
+
+export const FILE_INPUT_PROCESSOR: TPreProcessor = async ({
+ params,
+ actionName,
+}) => {
+ return processFileUpload(params, actionName);
+};
+
+export const FILE_DOWNLOADABLE_PROCESSOR: TPostProcessor = async ({
actionName,
toolResponse,
}) => {
- const responseData =
- (toolResponse.data.response_data as Record) || {};
- const fileData = responseData.file as
- | { name: string; content: string }
- | undefined;
+ const result = JSON.parse(JSON.stringify(toolResponse));
- if (!fileData) return toolResponse;
+ for (const [key, value] of Object.entries(toolResponse.data)) {
+ const fileData = value as { s3url?: string; mimetype?: string };
- const fileNamePrefix = `${actionName}_${Date.now()}`;
- const filePath = saveFile(fileNamePrefix, fileData.content, true);
+ if (!fileData?.s3url) continue;
- delete responseData.file;
+ const downloadedFile = await downloadFileFromS3({
+ actionName,
+ s3Url: fileData.s3url,
+ mimeType: fileData.mimetype || "application/txt",
+ });
- return {
- ...toolResponse,
- data: {
- ...toolResponse.data,
- file_uri_path: filePath,
- },
- };
+ result.data[key] = {
+ uri: downloadedFile.filePath,
+ mimeType: downloadedFile.mimeType,
+ };
+ }
+
+ return result;
};
-export const fileInputProcessor: TPreProcessor = ({ params, actionName }) => {
- const requestData = Object.entries(params).reduce(
- (acc, [key, value]) => {
- if (key === "file_uri_path" && typeof value === "string") {
- try {
- //eslint-disable-next-line @typescript-eslint/no-require-imports
- const fileContent = require("fs").readFileSync(value, "utf-8");
- const fileName =
- value.split("/").pop() || `${actionName}_${Date.now()}`;
- acc["file"] = { name: fileName, content: fileContent };
- } catch (error) {
- logger.error(`Error reading file at ${value}:`, error);
- acc["file"] = { name: value, content: "" }; // Fallback to original value if reading fails
- }
- } else {
- acc[key] = value;
- }
- return acc;
- },
- {} as Record
- );
+export const FILE_SCHEMA_PROCESSOR: TSchemaProcessor = ({ toolSchema }) => {
+ const { properties, required: requiredProps = [] } = toolSchema.parameters;
+ const newProperties = { ...properties };
+ const newRequired = [...requiredProps];
- return requestData;
-};
+ for (const [key, property] of Object.entries(newProperties)) {
+ if (!property.file_uploadable) continue;
-export const fileSchemaProcessor: TSchemaProcessor = ({ toolSchema }) => {
- const { properties } = toolSchema.parameters;
- const clonedProperties = JSON.parse(JSON.stringify(properties));
-
- for (const propertyKey of Object.keys(clonedProperties)) {
- const object = clonedProperties[propertyKey];
- const isObject = typeof object === "object";
- const isFile =
- isObject &&
- object?.required?.includes("name") &&
- object?.required?.includes("content");
-
- if (isFile) {
- const newKey = `${propertyKey}_file_uri_path`;
- clonedProperties[newKey] = {
- type: "string",
- title: "Name",
- description: "Local absolute path to the file or http url to the file",
- };
-
- delete clonedProperties[propertyKey];
+ const { type, keyName, description } = convertFileSchemaProperty(
+ key,
+ property as FileBasePropertySchema
+ );
+
+ newProperties[keyName as string] = {
+ title: property.title,
+ type,
+ description,
+ };
+
+ if (requiredProps.includes(key)) {
+ newRequired[newRequired.indexOf(key)] = keyName as string;
}
+
+ delete newProperties[key];
}
return {
...toolSchema,
parameters: {
...toolSchema.parameters,
- properties: clonedProperties,
+ properties: newProperties,
+ required: newRequired,
},
};
};
diff --git a/js/src/sdk/utils/processor/fileUtils.ts b/js/src/sdk/utils/processor/fileUtils.ts
new file mode 100644
index 00000000000..05d3c994698
--- /dev/null
+++ b/js/src/sdk/utils/processor/fileUtils.ts
@@ -0,0 +1,129 @@
+import axios, { AxiosError } from "axios";
+import crypto from "crypto";
+import apiClient from "../../client/client";
+import { saveFile } from "../fileUtils";
+
+const readFileContent = async (
+ path: string
+): Promise<{ content: string; mimeType: string }> => {
+ try {
+ const content = require("fs").readFileSync(path);
+ return {
+ content: content.toString("base64"),
+ mimeType: "application/octet-stream",
+ };
+ } catch (error) {
+ throw new Error(`Error reading file at ${path}: ${error}`);
+ }
+};
+
+const readFileContentFromURL = async (
+ path: string
+): Promise<{ content: string; mimeType: string }> => {
+ const response = await axios.get(path, {
+ responseType: "arraybuffer",
+ });
+ const content = Buffer.from(response.data);
+ const mimeType =
+ response.headers["content-type"] || "application/octet-stream";
+ return {
+ content: content.toString("base64"),
+ mimeType,
+ };
+};
+
+const uploadFileToS3 = async (
+ content: string,
+ actionName: string,
+ appName: string,
+ mimeType: string
+): Promise => {
+ const extension = mimeType.split("/")[1] || "bin";
+ const response = await apiClient.actionsV2.createFileUploadUrl({
+ body: {
+ action: actionName,
+ app: appName,
+ filename: `${actionName}_${Date.now()}.${extension}`,
+ mimetype: mimeType,
+ md5: crypto
+ .createHash("md5")
+ .update(Buffer.from(content, "base64"))
+ .digest("hex"),
+ },
+ path: {
+ fileType: "request",
+ },
+ });
+
+ const data = response.data as unknown as { url: string; key: string };
+ const signedURL = data!.url;
+ const s3key = data!.key;
+
+ try {
+ const buffer = Buffer.from(content, "base64");
+ await axios.put(signedURL, buffer, {
+ headers: {
+ "Content-Type": mimeType,
+ "Content-Length": buffer.length,
+ },
+ });
+ } catch (e) {
+ const error = e as AxiosError;
+ if (error instanceof AxiosError && error.response?.status === 403) {
+ return signedURL;
+ }
+ throw new Error(`Error uploading file to S3: ${error}`);
+ }
+
+ return s3key;
+};
+
+export const getFileDataAfterUploadingToS3 = async (
+ path: string,
+ actionName: string
+): Promise<{
+ name: string;
+ mimetype: string;
+ s3key: string;
+}> => {
+ const isURL = path.startsWith("http");
+ const fileData = isURL
+ ? await readFileContentFromURL(path)
+ : await readFileContent(path);
+
+ const s3key = await uploadFileToS3(
+ fileData.content,
+ actionName,
+ actionName,
+ fileData.mimeType
+ );
+
+ return {
+ name: path.split("/").pop() || `${actionName}_${Date.now()}`,
+ mimetype: fileData.mimeType,
+ s3key: s3key,
+ };
+};
+export const downloadFileFromS3 = async ({
+ actionName,
+ s3Url,
+ mimeType,
+}: {
+ actionName: string;
+ s3Url: string;
+ mimeType: string;
+}) => {
+ const response = await axios.get(s3Url, {
+ responseType: "arraybuffer",
+ });
+
+ const extension = mimeType.split("/")[1] || "txt";
+ const fileName = `${actionName}_${Date.now()}.${extension}`;
+ const filePath = saveFile(fileName, response.data, true);
+ return {
+ name: fileName,
+ mimeType: mimeType,
+ s3Key: s3Url,
+ filePath: filePath,
+ };
+};
diff --git a/js/src/types/base_toolset.ts b/js/src/types/base_toolset.ts
index 8b61568e297..a791655cc43 100644
--- a/js/src/types/base_toolset.ts
+++ b/js/src/types/base_toolset.ts
@@ -52,7 +52,7 @@ export type TPreProcessor = ({
}: {
params: Record;
actionName: string;
-}) => Record;
+}) => Promise> | Record;
export type TPostProcessor = ({
actionName,
@@ -60,7 +60,7 @@ export type TPostProcessor = ({
}: {
actionName: string;
toolResponse: ActionExecutionResDto;
-}) => ActionExecutionResDto;
+}) => Promise | ActionExecutionResDto;
export type TSchemaProcessor = ({
actionName,
@@ -68,7 +68,7 @@ export type TSchemaProcessor = ({
}: {
actionName: string;
toolSchema: RawActionData;
-}) => RawActionData;
+}) => Promise | RawActionData;
export const ZToolSchemaFilter = z.object({
actions: z.array(z.string()).optional(),
diff --git a/js/src/utils/logger.ts b/js/src/utils/logger.ts
index ab1a1f6e020..53250f0976d 100644
--- a/js/src/utils/logger.ts
+++ b/js/src/utils/logger.ts
@@ -1,7 +1,7 @@
import { getEnvVariable } from "./shared";
// Define log levels with corresponding priorities
-const LOG_LEVELS = {
+export const LOG_LEVELS = {
error: 0, // Highest priority - critical errors
warn: 1, // Warning messages
info: 2, // General information
diff --git a/python/Makefile b/python/Makefile
index e00d7250af4..600ab7b3b01 100644
--- a/python/Makefile
+++ b/python/Makefile
@@ -83,7 +83,7 @@ fmt: format-code
.PHONY: check-code
check-code:
- tox run-parallel -e isort-check,black-check,flake8,mypy,pylint
+ tox run-parallel -e isort-check,black-check,flake8,mypy,pylint --parallel-no-spinner
.PHONY: chk
chk: check-code
diff --git a/python/composio/__version__.py b/python/composio/__version__.py
index a5f830a2c0b..bc8c296f6a5 100644
--- a/python/composio/__version__.py
+++ b/python/composio/__version__.py
@@ -1 +1 @@
-__version__ = "0.7.1"
+__version__ = "0.7.2"
diff --git a/python/composio/cli/context.py b/python/composio/cli/context.py
index 300cc7dba23..081fd7d5671 100644
--- a/python/composio/cli/context.py
+++ b/python/composio/cli/context.py
@@ -16,7 +16,7 @@
from composio.client import Composio
from composio.constants import (
ENV_COMPOSIO_API_KEY,
- LOCAL_CACHE_DIRECTORY_NAME,
+ LOCAL_CACHE_DIRECTORY,
USER_DATA_FILE_NAME,
)
from composio.storage.user import UserData
@@ -57,7 +57,7 @@ def console(self) -> Console:
def cache_dir(self) -> Path:
"""Cache directory."""
if self._cache_dir is None:
- self._cache_dir = Path.home() / LOCAL_CACHE_DIRECTORY_NAME
+ self._cache_dir = LOCAL_CACHE_DIRECTORY
if not self._cache_dir.exists():
self._cache_dir.mkdir(parents=True)
return self._cache_dir
diff --git a/python/composio/client/__init__.py b/python/composio/client/__init__.py
index 9a9afa18fd9..0842c930b91 100644
--- a/python/composio/client/__init__.py
+++ b/python/composio/client/__init__.py
@@ -6,7 +6,6 @@
import sys
import typing as t
from datetime import datetime
-from pathlib import Path
import requests
@@ -41,10 +40,10 @@
from composio.constants import (
DEFAULT_ENTITY_ID,
ENV_COMPOSIO_API_KEY,
- LOCAL_CACHE_DIRECTORY_NAME,
+ LOCAL_CACHE_DIRECTORY,
USER_DATA_FILE_NAME,
)
-from composio.exceptions import ApiKeyNotProvidedError
+from composio.exceptions import ApiKeyError, ApiKeyNotProvidedError, InvalidParams
from composio.storage.user import UserData
from composio.utils.decorators import deprecated
from composio.utils.shared import generate_request_id
@@ -99,8 +98,7 @@ def get_latest() -> "Composio":
@property
def api_key(self) -> str:
if self._api_key is None:
- cache_dir = Path.home() / LOCAL_CACHE_DIRECTORY_NAME
- user_data_path = cache_dir / USER_DATA_FILE_NAME
+ user_data_path = LOCAL_CACHE_DIRECTORY / USER_DATA_FILE_NAME
user_data = (
UserData.load(path=user_data_path) if user_data_path.exists() else None
)
@@ -113,7 +111,7 @@ def api_key(self) -> str:
self._api_key = env_api_key
if self._api_key is None:
- raise ApiKeyNotProvidedError()
+ raise ApiKeyNotProvidedError
self._api_key = self.validate_api_key(
key=t.cast(str, self._api_key),
@@ -171,10 +169,10 @@ def validate_api_key(key: str, base_url: t.Optional[str] = None) -> str:
timeout=60,
)
if response.status_code in (401, 403):
- raise ComposioClientError("API Key is not valid!")
+ raise ApiKeyError("API Key is not valid!")
if response.status_code != 200:
- raise ComposioClientError(f"Unexpected error: HTTP {response.status_code}")
+ raise ApiKeyError(f"Unexpected error: HTTP {response.status_code}")
_valid_keys.add(key)
return key
@@ -438,21 +436,18 @@ def initiate_connection(
:param integration: Optional existing IntegrationModel instance to be used.
:return: A ConnectionRequestModel instance representing the initiated connection.
"""
- if isinstance(app_name, App):
- app_name_str = app_name.slug
- else:
- app_name_str = app_name
-
- app = self.client.apps.get(name=app_name_str)
+ app = self.client.apps.get(name=App(app_name).slug)
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
if integration is None and auth_mode is not None:
if auth_mode not in AUTH_SCHEME_WITH_INITIATE:
- raise ComposioClientError(
+ raise InvalidParams(
f"'auth_mode' should be one of {AUTH_SCHEME_WITH_INITIATE}"
)
+
auth_mode = t.cast(AuthSchemeType, auth_mode)
if "OAUTH" not in auth_mode:
use_composio_auth = False
+
integration = self.client.integrations.create(
app_id=app.appId,
name=f"{app_name}_{timestamp}",
diff --git a/python/composio/client/collections.py b/python/composio/client/collections.py
index 4ed61418e19..9f891e34012 100644
--- a/python/composio/client/collections.py
+++ b/python/composio/client/collections.py
@@ -30,8 +30,14 @@
Trigger,
TriggerType,
)
-from composio.client.exceptions import ComposioClientError, ComposioSDKError
from composio.constants import PUSHER_CLUSTER, PUSHER_KEY
+from composio.exceptions import (
+ ErrorFetchingResource,
+ InvalidParams,
+ InvalidTriggerFilters,
+ SDKTimeoutError,
+ TriggerSubscriptionError,
+)
from composio.utils import help_msg, logging
from composio.utils.shared import generate_request_id
@@ -127,7 +133,7 @@ def save_user_access_data(
def wait_until_active(
self,
client: "Composio",
- timeout=60,
+ timeout: float = 60.0,
) -> "ConnectedAccountModel":
start_time = time.time()
while time.time() - start_time < timeout:
@@ -138,8 +144,7 @@ def wait_until_active(
return connection
time.sleep(1)
- # TODO: Replace with timeout error.
- raise ComposioClientError(
+ raise SDKTimeoutError(
"Connection did not become active within the timeout period."
)
@@ -212,8 +217,11 @@ def get(
"""
entity_ids = entity_ids or ()
if connection_id is not None and len(entity_ids) > 0:
- raise ComposioClientError(
- message="Cannot use both `connection_id` and `entity_ids` parameters as filter"
+ raise InvalidParams(
+ message=(
+ "Cannot use both `connection_id` and `entity_ids` "
+ "parameters as filter"
+ )
)
if connection_id is not None:
@@ -532,9 +540,7 @@ def __init__(self, client: "Composio") -> None:
def validate_filters(self, filters: _TriggerEventFilters):
docs_link_msg = "\nRead more here: https://docs.composio.dev/introduction/intro/quickstart_3"
if not isinstance(filters, dict):
- raise ComposioSDKError(
- "Expected filters to be a dictionary" + docs_link_msg
- )
+ raise InvalidParams("Expected filters to be a dictionary" + docs_link_msg)
expected_filters = list(_TriggerEventFilters.__annotations__)
for filter, value in filters.items():
@@ -546,7 +552,7 @@ def validate_filters(self, filters: _TriggerEventFilters):
if possible_values:
(possible_value,) = possible_values
error_msg += f" Did you mean {possible_value!r}?"
- raise ComposioSDKError(error_msg + docs_link_msg)
+ raise InvalidTriggerFilters(error_msg + docs_link_msg)
# Validate app name
if filter == "app_name":
@@ -555,7 +561,7 @@ def validate_filters(self, filters: _TriggerEventFilters):
elif isinstance(value, str):
slug = value
else:
- raise ComposioSDKError(
+ raise InvalidTriggerFilters(
f"Expected 'app_name' to be App or str, found {value!r}"
+ docs_link_msg
)
@@ -572,7 +578,7 @@ def validate_filters(self, filters: _TriggerEventFilters):
(possible_value,) = possible_values
error_msg += f" Did you mean {possible_value!r}?"
- raise ComposioSDKError(error_msg + docs_link_msg)
+ raise InvalidTriggerFilters(error_msg + docs_link_msg)
# Ensure at least one of the app's triggers are enabled on the account.
active_triggers = [
@@ -586,7 +592,7 @@ def validate_filters(self, filters: _TriggerEventFilters):
f"App {slug!r} has no triggers enabled on your account.\n"
"Find the possible triggers by running `composio triggers`."
)
- raise ComposioSDKError(error_msg + docs_link_msg)
+ raise InvalidTriggerFilters(error_msg + docs_link_msg)
# Validate trigger name
if filter == "trigger_name":
@@ -595,7 +601,7 @@ def validate_filters(self, filters: _TriggerEventFilters):
elif isinstance(value, str):
slug = value
else:
- raise ComposioSDKError(
+ raise InvalidTriggerFilters(
f"Expected 'trigger_name' to be Trigger or str, found {value!r}"
+ docs_link_msg
)
@@ -614,7 +620,7 @@ def validate_filters(self, filters: _TriggerEventFilters):
(possible_value,) = possible_values
error_msg += f" Did you mean {possible_value!r}?"
- raise ComposioSDKError(error_msg + docs_link_msg)
+ raise InvalidTriggerFilters(error_msg + docs_link_msg)
# Ensure the trigger is added on your account
active_triggers = [
@@ -625,7 +631,7 @@ def validate_filters(self, filters: _TriggerEventFilters):
f"Trigger {slug!r} is not enabled on your account.\nEnable"
f" the trigger by doing `composio triggers enable {slug}`."
)
- raise ComposioSDKError(error_msg + docs_link_msg)
+ raise InvalidTriggerFilters(error_msg + docs_link_msg)
def callback(
self,
@@ -849,7 +855,7 @@ def connect(self, timeout: float = 15.0) -> TriggerSubscription:
return self.subscription
time.sleep(0.5)
- raise TimeoutError(
+ raise SDKTimeoutError(
"Timed out while waiting for trigger listener to be established"
)
@@ -942,7 +948,7 @@ def subscribe(self, timeout: float = 15.0) -> TriggerSubscription:
)
client_id = response.json().get("client", {}).get("id")
if client_id is None:
- raise ComposioClientError("Error fetching client ID")
+ raise TriggerSubscriptionError("Error fetching client ID")
pusher = _PusherClient(
client_id=client_id,
@@ -1068,6 +1074,7 @@ class CreateUploadURLResponse(BaseModel):
id: str = Field(..., description="ID of the file")
url: str = Field(..., description="Onetime upload URL")
key: str = Field(..., description="S3 upload location")
+ exists: bool = Field(False, description="If the file already exists on S3")
class Actions(Collection[ActionModel]):
@@ -1133,13 +1140,13 @@ def is_action(obj):
return [self.model(**item) for item in local_items]
if len(actions) > 0 and len(apps) > 0:
- raise ComposioClientError(
+ raise ErrorFetchingResource(
"Error retrieving Actions, Both actions and apps "
"cannot be used as filters at the same time."
)
if len(actions) > 0 and len(tags) > 0:
- raise ComposioClientError(
+ raise ErrorFetchingResource(
"Error retrieving Actions, Both actions and tags "
"cannot be used as filters at the same time."
)
@@ -1284,8 +1291,9 @@ def _serialize_auth(auth: t.Optional[CustomAuthObject]) -> t.Optional[t.Dict]:
]
for param in data["parameters"]:
if param["in"] == "metadata":
- raise ComposioClientError(
- f"Param placement cannot be 'metadata' for remote action execution: {param}"
+ raise InvalidParams(
+ "Param placement cannot be 'metadata' for remote "
+ f"action execution: {param}"
)
return data
@@ -1326,7 +1334,7 @@ def execute(
).json()
if connected_account is None and auth is None:
- raise ComposioClientError(
+ raise InvalidParams(
"`connected_account` cannot be `None` when executing "
"an app which requires authentication"
)
diff --git a/python/composio/client/enums/action.py b/python/composio/client/enums/action.py
index 26525826ebf..4a586b6f675 100644
--- a/python/composio/client/enums/action.py
+++ b/python/composio/client/enums/action.py
@@ -4,18 +4,12 @@
from composio.client.enums.base import ActionData, replacement_action_name
from composio.client.enums.enum import Enum, EnumGenerator
from composio.constants import VERSION_LATEST, VERSION_LATEST_BASE
-from composio.exceptions import ComposioSDKError
+from composio.exceptions import EnumMetadataNotFound, InvalidVersionString, VersionError
_ACTION_CACHE: t.Dict[str, "Action"] = {}
-class InvalidVersionString(ComposioSDKError):
-
- def __init__(self, message: str, *args: t.Any, delegate: bool = False) -> None:
- super().__init__(message, *args, delegate=delegate)
-
-
def clean_version_string(version: str) -> str:
version = version.lower()
if version in (VERSION_LATEST, VERSION_LATEST_BASE):
@@ -80,7 +74,7 @@ def fetch_and_cache(self) -> t.Optional[ActionData]:
response, *_ = response
if request.status_code == 404 or "Not Found" in response.get("message", ""):
- raise ComposioSDKError(
+ raise EnumMetadataNotFound(
message=(
f"No metadata found for enum `{self.slug}`, "
"You might be trying to use an app or action "
@@ -158,14 +152,13 @@ def available_versions(self) -> t.List[str]:
return self.load().available_version
def with_version(self, version: str) -> "Action":
+ # pylint: disable=protected-access
if self.is_local:
- raise RuntimeError("Versioning is not allowed for local tools")
+ raise VersionError("Versioning is not allowed for local tools")
action = Action(self.slug, cache=False)
- action._data = self.load() # pylint: disable=protected-access
- action._version = clean_version_string( # pylint: disable=protected-access
- version=version
- )
+ action._data = self.load()
+ action._version = clean_version_string(version=version)
return action
def latest(self) -> "Action":
diff --git a/python/composio/client/enums/action.pyi b/python/composio/client/enums/action.pyi
index 57169aff9b9..9cc0f6cd155 100644
--- a/python/composio/client/enums/action.pyi
+++ b/python/composio/client/enums/action.pyi
@@ -4,14 +4,10 @@ import warnings
from composio.client.enums.base import ActionData, replacement_action_name
from composio.client.enums.enum import Enum, EnumGenerator
from composio.constants import VERSION_LATEST, VERSION_LATEST_BASE
-from composio.exceptions import ComposioSDKError
+from composio.exceptions import EnumMetadataNotFound, InvalidVersionString, VersionError
_ACTION_CACHE: t.Dict[str, "Action"] = {}
-class InvalidVersionString(ComposioSDKError):
- def __init__(self, message: str, *args: t.Any, delegate: bool = False) -> None:
- super().__init__(message, *args, delegate=delegate)
-
def clean_version_string(version: str) -> str:
version = version.lower()
if version in (VERSION_LATEST, VERSION_LATEST_BASE):
@@ -228,6 +224,67 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
AIRTABLE_LIST_RECORDS: "Action"
AIRTABLE_UPDATE_MULTIPLE_RECORDS: "Action"
AIRTABLE_UPDATE_RECORD: "Action"
+ AMPLITUDE_ANDROID_ATTRIBUTION: "Action"
+ AMPLITUDE_AVERAGE_SESSIONS_PER_USER: "Action"
+ AMPLITUDE_AVERAGE_SESSION_LENGTH: "Action"
+ AMPLITUDE_BATCH_EVENT_UPLOAD: "Action"
+ AMPLITUDE_COHORT_DOWNLOAD: "Action"
+ AMPLITUDE_CUSTOM_PERIOD: "Action"
+ AMPLITUDE_DELETE_AN_EVENT_CATEGORY: "Action"
+ AMPLITUDE_DELETE_A_TABLE: "Action"
+ AMPLITUDE_DELETE_DELETION_REQUEST: "Action"
+ AMPLITUDE_DELETE_EVENT_PROPERTY: "Action"
+ AMPLITUDE_DELETE_EVENT_TYPE: "Action"
+ AMPLITUDE_DELETE_GROUP: "Action"
+ AMPLITUDE_DELETE_USER: "Action"
+ AMPLITUDE_DELETE_USER_PROPERTY: "Action"
+ AMPLITUDE_EVENT_SEGMENTATION_CHART: "Action"
+ AMPLITUDE_FUNNEL_ANALYSIS: "Action"
+ AMPLITUDE_GET_ALL_COHORTS: "Action"
+ AMPLITUDE_GET_ALL_EVENT_CATEGORIES: "Action"
+ AMPLITUDE_GET_ALL_EVENT_TYPES: "Action"
+ AMPLITUDE_GET_ALL_GLOBAL_CHART_ANNOTATIONS: "Action"
+ AMPLITUDE_GET_ALL_LOOKUP_TABLES_COPY: "Action"
+ AMPLITUDE_GET_ALL_USER_PROPERTIES: "Action"
+ AMPLITUDE_GET_AN_EVENT_CATEGORY: "Action"
+ AMPLITUDE_GET_AN_EVENT_TYPE: "Action"
+ AMPLITUDE_GET_A_SINGLE_COHORT: "Action"
+ AMPLITUDE_GET_A_SINGLE_LOOKUP_TABLE: "Action"
+ AMPLITUDE_GET_COHORT_STATUS: "Action"
+ AMPLITUDE_GET_DELETION_REQUESTS: "Action"
+ AMPLITUDE_GET_EVENTS_LIST: "Action"
+ AMPLITUDE_GET_EVENT_PROPERTY: "Action"
+ AMPLITUDE_GET_EXPORT: "Action"
+ AMPLITUDE_GET_GROUPS: "Action"
+ AMPLITUDE_GET_GROUP_BY_ID: "Action"
+ AMPLITUDE_GET_MULTIPLE_RECOMMENDATIONS_COPY: "Action"
+ AMPLITUDE_GET_OUTPUT_FILE: "Action"
+ AMPLITUDE_GET_REQUEST_STATUS: "Action"
+ AMPLITUDE_GET_RESULTS_FROM_EXISTING_CHART: "Action"
+ AMPLITUDE_GET_THAT_SHOWS_LIMITS_OF_GET_PARAM_LENGTH: "Action"
+ AMPLITUDE_GET_USER: "Action"
+ AMPLITUDE_GET_USERS: "Action"
+ AMPLITUDE_GET_USER_BY_ID: "Action"
+ AMPLITUDE_GET_USER_PROPERTY: "Action"
+ AMPLITUDE_HTTP_API_IDENTIFY: "Action"
+ AMPLITUDE_IDENTIFY_API_CALL_EXAMPLE: "Action"
+ AMPLITUDE_POST_DELETION_REQUEST_JSON: "Action"
+ AMPLITUDE_REAL_TIME_ACTIVE_USERS: "Action"
+ AMPLITUDE_RETENTION_ANALYSIS: "Action"
+ AMPLITUDE_REVENUE_LTV: "Action"
+ AMPLITUDE_SESSION_LENGTH_DISTRIBUTION: "Action"
+ AMPLITUDE_UNMAPPING_REQUEST: "Action"
+ AMPLITUDE_UPDATE_AN_EVENT_CATEGORY: "Action"
+ AMPLITUDE_UPDATE_AN_EVENT_TYPE: "Action"
+ AMPLITUDE_UPDATE_EVENT_PROPERTY: "Action"
+ AMPLITUDE_UPDATE_GROUP: "Action"
+ AMPLITUDE_UPDATE_LOOKUP_TABLE: "Action"
+ AMPLITUDE_UPDATE_MEMBERSHIP: "Action"
+ AMPLITUDE_UPDATE_USER_PROPERTY: "Action"
+ AMPLITUDE_UPLOAD_IDS: "Action"
+ AMPLITUDE_USER_ACTIVITY: "Action"
+ AMPLITUDE_USER_COMPOSITION: "Action"
+ AMPLITUDE_USER_UPDATE: "Action"
ANTHROPIC_BASH_COMMAND: "Action"
ANTHROPIC_COMPUTER: "Action"
ANTHROPIC_TEXT_EDITOR: "Action"
@@ -1329,6 +1386,153 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
BLACKBOARD_UPDATE_USER: "Action"
BLACKBOARD_UPLOAD: "Action"
BLACKBOARD_VIEW_COURSE_GROUP_ACCESSIBILITIES: "Action"
+ BOLNA_DELETE_AGENT_BY_ID: "Action"
+ BOLNA_DELETE_BATCH_BY_ID: "Action"
+ BOLNA_FETCH_ALL_BATCHES_BY_AGENT_ID: "Action"
+ BOLNA_GET_ALL_AGENTS: "Action"
+ BOLNA_GET_ALL_PHONE_NUMBERS: "Action"
+ BOLNA_GET_EXECUTION_BY_ID: "Action"
+ BOLNA_MAKE_A_PHONE_CALL_FROM_AGENT: "Action"
+ BOLNA_RETRIEVE_AGENT_BY_ID: "Action"
+ BOLNA_RETRIEVE_AGENT_EXECUTION_DETAILS: "Action"
+ BOLNA_RETRIEVE_AGENT_EXECUTION_STATUS: "Action"
+ BOLNA_RETRIEVE_BATCH_DETAILS_BY_ID: "Action"
+ BOLNA_RETRIEVE_BATCH_EXECUTION_LIST: "Action"
+ BOLNA_SCHEDULE_BATCH_BY_ID: "Action"
+ BOLNA_SETUP_INBOUND_CALL_FOR_AGENT: "Action"
+ BOLNA_STOP_BATCH_BY_ID: "Action"
+ BORNEO_ADD_DISCOVERED_RECIPIENTS_BY_UUID: "Action"
+ BORNEO_ADD_EMPLOYEE_DETAILS: "Action"
+ BORNEO_ARCHIVE_DISCOVERED_RECIPIENT_VIA_ID: "Action"
+ BORNEO_CREATE_DATA_PROCESSING_THRESHOLD: "Action"
+ BORNEO_CREATE_DEPARTMENT_WITH_TRANSLATIONS: "Action"
+ BORNEO_CREATE_DOMAIN_WITH_FREQUENCY: "Action"
+ BORNEO_CREATE_DPIA_FOR_PROCESSING_ACTIVITY: "Action"
+ BORNEO_CREATE_HEADQUARTER_ENTRY_WITH_DETAILS: "Action"
+ BORNEO_CREATE_INFOTYPE_CATEGORY: "Action"
+ BORNEO_CREATE_LEGAL_DOCUMENT: "Action"
+ BORNEO_CREATE_NEW_ASSET: "Action"
+ BORNEO_CREATE_OR_FILTER_CONNECTOR: "Action"
+ BORNEO_CREATE_PROCESSING_ACTIVITIES_RECORD: "Action"
+ BORNEO_CREATE_PROCESSING_ACTIVITY_THRESHOLDS: "Action"
+ BORNEO_CREATE_RECIPIENT: "Action"
+ BORNEO_DELETE_ASSET_BY_ID: "Action"
+ BORNEO_DELETE_CATEGORY_BY_LABEL: "Action"
+ BORNEO_DELETE_DASHBOARD_REPORT_BY_ID: "Action"
+ BORNEO_DELETE_DEPARTMENT_BY_ID: "Action"
+ BORNEO_DELETE_DOMAIN_BY_ID: "Action"
+ BORNEO_DELETE_DPIA_BY_ID: "Action"
+ BORNEO_DELETE_EMPLOYEE_BY_ID: "Action"
+ BORNEO_DELETE_HEADQUARTER_BY_ID: "Action"
+ BORNEO_DELETE_LEGAL_DOCUMENT_BY_ID: "Action"
+ BORNEO_DELETE_LOPDP_THRESHOLD_BY_ID: "Action"
+ BORNEO_DELETE_PROCESSING_ACTIVITY_BY_ID: "Action"
+ BORNEO_DELETE_RECIPIENT_BY_ID: "Action"
+ BORNEO_DELETE_RESOURCE_TAGS: "Action"
+ BORNEO_DELETE_THRESHOLD_BY_ID: "Action"
+ BORNEO_DOWNLOAD_DASHBOARD_REPORT: "Action"
+ BORNEO_DOWNLOAD_DASHBOARD_REPORT_EDITION: "Action"
+ BORNEO_EXPORT_FILTERED_LEAF_RESOURCES: "Action"
+ BORNEO_EXPORT_INVENTORY_LIST_WITH_FILTERS: "Action"
+ BORNEO_EXPORT_PAGE_INSPECTION_RESULTS: "Action"
+ BORNEO_EXPORT_PROCESSING_ACTIVITIES_LIST: "Action"
+ BORNEO_EXPORT_RECIPIENT_LIST_WITH_FILTERS: "Action"
+ BORNEO_FETCH_CATEGORY_BY_LABEL: "Action"
+ BORNEO_FETCH_DISCOVERED_DOCUMENT_BY_ID: "Action"
+ BORNEO_FETCH_LEGAL_DOCUMENT_LIST_BY_FILTER: "Action"
+ BORNEO_FETCH_SCAN_ITERATION_BY_ID: "Action"
+ BORNEO_FILTER_ACCESS_LOGS_WITH_CRITERIA: "Action"
+ BORNEO_FILTER_DEPARTMENTS_LIST_VIA_POST: "Action"
+ BORNEO_FILTER_EMPLOYEE_LIST: "Action"
+ BORNEO_FILTER_RECIPIENTS_LIST: "Action"
+ BORNEO_GET_ACCOUNT_BY_ID: "Action"
+ BORNEO_GET_DEPARTMENT_BY_ID: "Action"
+ BORNEO_GET_DISCOVERED_INFOTYPE_BY_ID: "Action"
+ BORNEO_GET_DOMAIN_BY_ID: "Action"
+ BORNEO_GET_LEGAL_DOCUMENT_BY_ID: "Action"
+ BORNEO_GET_PROCESSING_ACTIVITY_BY_ID: "Action"
+ BORNEO_GET_RESOURCE_CATALOG_BY_RESOURCE_ID: "Action"
+ BORNEO_GET_RESOURCE_STATS: "Action"
+ BORNEO_GET_SCAN_DETAILS_BY_RESOURCE_ID: "Action"
+ BORNEO_GET_THRESHOLD_BY_ID: "Action"
+ BORNEO_GET_TOM_BY_ID: "Action"
+ BORNEO_LIST_AND_FILTER_INSPECTION_RESULTS: "Action"
+ BORNEO_LIST_ASSETS_WITH_FILTERS: "Action"
+ BORNEO_LIST_DASHBOARD_REPORTS_WITH_FILTERS: "Action"
+ BORNEO_LIST_DASHBOARD_REPORT_EDITIONS_POST: "Action"
+ BORNEO_LIST_DISCOVERED_DOCUMENTS: "Action"
+ BORNEO_LIST_DISCOVERED_INFO_TYPES_WITH_FILTERS: "Action"
+ BORNEO_LIST_DISCOVERED_RECIPIENTS: "Action"
+ BORNEO_LIST_DOMAINS_WITH_PAGINATION_AND_SORTING: "Action"
+ BORNEO_LIST_EMPLOYEES_WITH_FILTERS: "Action"
+ BORNEO_LIST_ERROR_WITH_FILTER_CONDITIONS: "Action"
+ BORNEO_LIST_FILTERED_INVENTORY_RESOURCES: "Action"
+ BORNEO_LIST_FILTERED_LEAF_RESOURCES: "Action"
+ BORNEO_LIST_FILTERS_BY_SCAN_ID: "Action"
+ BORNEO_LIST_FILTERS_FOR_PROCESSING_ACTIVITIES: "Action"
+ BORNEO_LIST_ISSUES_WITH_FILTERS: "Action"
+ BORNEO_LIST_PROCESSING_ACTIVITIES: "Action"
+ BORNEO_LIST_RECIPIENTS_WITH_FILTERING: "Action"
+ BORNEO_LIST_RECIPIENT_FILTER_TYPES: "Action"
+ BORNEO_LIST_SCAN_EXECUTION_RESULTS: "Action"
+ BORNEO_LIST_SCAN_ITERATIONS: "Action"
+ BORNEO_LIST_TOMS_WITH_FILTERS: "Action"
+ BORNEO_PAUSE_SCAN_USING_SCAN_ID: "Action"
+ BORNEO_POLL_DOMAIN: "Action"
+ BORNEO_POST_AUDIT_LOGS_WITH_FILTERS: "Action"
+ BORNEO_POST_CATEGORIES_LIST_WITH_SORTING: "Action"
+ BORNEO_POST_CLASSIFICATION_STATS: "Action"
+ BORNEO_POST_CONNECTION_STATUS_BY_FILTER: "Action"
+ BORNEO_POST_DASHBOARD_REPORT: "Action"
+ BORNEO_POST_DISCOVERED_RECIPIENT_USER: "Action"
+ BORNEO_POST_FILTERED_ACCOUNTS_DATA: "Action"
+ BORNEO_POST_FILTERED_DEPARTMENT_LIST: "Action"
+ BORNEO_POST_RESOURCE_LINEAGE_WITH_FILTERS: "Action"
+ BORNEO_POST_SCAN_LIST_DATA: "Action"
+ BORNEO_POST_USER_PROFILE_LIST_WITH_FILTERS: "Action"
+ BORNEO_PUT_THRESHOLD_DATA_PROCESSING_STATUS: "Action"
+ BORNEO_RESUME_SCAN_BY_ID: "Action"
+ BORNEO_RETRIEVE_ASSET_BY_ID: "Action"
+ BORNEO_RETRIEVE_CLOUD_ACCOUNT_BY_ID: "Action"
+ BORNEO_RETRIEVE_CONNECTOR_BY_ID: "Action"
+ BORNEO_RETRIEVE_DASHBOARD_REPORT_BY_ID: "Action"
+ BORNEO_RETRIEVE_DASHBOARD_REPORT_EDITION_BY_ID: "Action"
+ BORNEO_RETRIEVE_DASHBOARD_REPORT_TRIGGER: "Action"
+ BORNEO_RETRIEVE_DISCOVERED_RECIPIENT_BY_ID: "Action"
+ BORNEO_RETRIEVE_DPIA_BY_ID: "Action"
+ BORNEO_RETRIEVE_EMPLOYEE_DETAILS_BY_ID: "Action"
+ BORNEO_RETRIEVE_ERROR_BY_ID: "Action"
+ BORNEO_RETRIEVE_FILTERED_EVENT_LIST: "Action"
+ BORNEO_RETRIEVE_HEADQUARTERS_LIST: "Action"
+ BORNEO_RETRIEVE_HEADQUARTER_INFO_BY_ID: "Action"
+ BORNEO_RETRIEVE_INSIGHT_BY_TYPE_AND_ID: "Action"
+ BORNEO_RETRIEVE_ISSUE_DETAILS_BY_ID: "Action"
+ BORNEO_RETRIEVE_LOPDP_THRESHOLD_BY_ID: "Action"
+ BORNEO_RETRIEVE_RECIPIENT_DETAILS_BY_ID: "Action"
+ BORNEO_RETRIEVE_RECIPIENT_PROCESSING_ACTIVITIES: "Action"
+ BORNEO_RETRIEVE_RESOURCE_INVENTORY_BY_ID: "Action"
+ BORNEO_RETRIEVE_SCAN_BY_ID: "Action"
+ BORNEO_RETRIEVE_USER_PROFILE_BY_USER_ID: "Action"
+ BORNEO_SCAN_LEGAL_DOCUMENT_BY_ID: "Action"
+ BORNEO_SCHEDULE_AND_PERFORM_RESOURCE_SCAN: "Action"
+ BORNEO_STOP_SCAN_BY_ID: "Action"
+ BORNEO_SUBMIT_SCAN_RESOURCE_STATUS_UPDATE: "Action"
+ BORNEO_UPDATE_ASSET_BY_ID: "Action"
+ BORNEO_UPDATE_DASHBOARD_REPORT_SCHEDULING: "Action"
+ BORNEO_UPDATE_DEPARTMENT_NAME: "Action"
+ BORNEO_UPDATE_DISCOVERED_DOCUMENT_STATUS: "Action"
+ BORNEO_UPDATE_DISCOVERED_INFOTYPE_STATUS: "Action"
+ BORNEO_UPDATE_DOMAIN_POLLING_SETTINGS: "Action"
+ BORNEO_UPDATE_DPIA_BY_ID: "Action"
+ BORNEO_UPDATE_EMPLOYEE_BY_ID: "Action"
+ BORNEO_UPDATE_HEADQUARTERS_DETAILS: "Action"
+ BORNEO_UPDATE_INFOTYPE_CATEGORY_BY_LABEL: "Action"
+ BORNEO_UPDATE_PROCESSING_ACTIVITY: "Action"
+ BORNEO_UPDATE_RECIPIENT_DETAILS: "Action"
+ BORNEO_UPDATE_RECIPIENT_STATUS: "Action"
+ BORNEO_UPDATE_THRESHOLD_CONFIGURATION: "Action"
+ BORNEO_UPDATE_TOM_STATUS_AND_DOCUMENTS: "Action"
+ BORNEO_VERIFY_EMAIL_TOKEN_WITH_ID: "Action"
BREVO_ACTIVATE_THE_E_COMMERCE_APP: "Action"
BREVO_ADD_A_NEW_DOMAIN_TO_THE_LIST_OF_BLOCKED_DOMAINS: "Action"
BREVO_ADD_EMAIL_AND_OR_SMS_CREDITS_TO_A_SPECIFIC_CHILD_ACCOUNT: "Action"
@@ -2021,6 +2225,49 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
COMPOSIO_SEARCH_TAVILY_SEARCH: "Action"
COMPOSIO_SEARCH_TRENDS_SEARCH: "Action"
COMPOSIO_WAIT_FOR_CONNECTION: "Action"
+ D2LBRIGHTSPACE_COPY_ROLE: "Action"
+ D2LBRIGHTSPACE_CREATE_A_COURSE_TEMPLATE: "Action"
+ D2LBRIGHTSPACE_CREATE_GRADE_OBJECT: "Action"
+ D2LBRIGHTSPACE_CREATE_QUIZ: "Action"
+ D2LBRIGHTSPACE_CREATE_USER: "Action"
+ D2LBRIGHTSPACE_DELETE_A_COURSE_TEMPLATE: "Action"
+ D2LBRIGHTSPACE_DELETE_COURSE: "Action"
+ D2LBRIGHTSPACE_DELETE_GRADE_OBJECT: "Action"
+ D2LBRIGHTSPACE_DELETE_QUIZ: "Action"
+ D2LBRIGHTSPACE_DELETE_QUIZ_CATEGORY: "Action"
+ D2LBRIGHTSPACE_DELETE_USER: "Action"
+ D2LBRIGHTSPACE_DELETE_USER_DEMOGRAPHICS: "Action"
+ D2LBRIGHTSPACE_GET_A_COURSE_OFFERING: "Action"
+ D2LBRIGHTSPACE_GET_A_COURSE_TEMPLATE: "Action"
+ D2LBRIGHTSPACE_GET_COURSE_SCHEMA: "Action"
+ D2LBRIGHTSPACE_GET_COURSE_TEMPLATE_SCHEMA: "Action"
+ D2LBRIGHTSPACE_GET_CURRENT_USER: "Action"
+ D2LBRIGHTSPACE_GET_ENROLLED_ROLES: "Action"
+ D2LBRIGHTSPACE_GET_GRADE_ACCESS: "Action"
+ D2LBRIGHTSPACE_GET_GRADE_OBJECT: "Action"
+ D2LBRIGHTSPACE_GET_GRADE_OBJECTS: "Action"
+ D2LBRIGHTSPACE_GET_GRADE_SETUP: "Action"
+ D2LBRIGHTSPACE_GET_GRADE_STATISTICS: "Action"
+ D2LBRIGHTSPACE_GET_ORG_UNIT_DEMOGRAPHICS: "Action"
+ D2LBRIGHTSPACE_GET_QUIZ: "Action"
+ D2LBRIGHTSPACE_GET_QUIZZES: "Action"
+ D2LBRIGHTSPACE_GET_QUIZ_ACCESS: "Action"
+ D2LBRIGHTSPACE_GET_QUIZ_ATTEMPT: "Action"
+ D2LBRIGHTSPACE_GET_QUIZ_ATTEMPTS: "Action"
+ D2LBRIGHTSPACE_GET_QUIZ_CATEGORIES: "Action"
+ D2LBRIGHTSPACE_GET_QUIZ_CATEGORY: "Action"
+ D2LBRIGHTSPACE_GET_QUIZ_QUESTIONS: "Action"
+ D2LBRIGHTSPACE_GET_ROLES: "Action"
+ D2LBRIGHTSPACE_GET_ROLE_BY_ID: "Action"
+ D2LBRIGHTSPACE_GET_USERS: "Action"
+ D2LBRIGHTSPACE_GET_USER_BY_ID: "Action"
+ D2LBRIGHTSPACE_UPDATE_A_COURSE_OFFERING: "Action"
+ D2LBRIGHTSPACE_UPDATE_A_COURSE_TEMPLATE: "Action"
+ D2LBRIGHTSPACE_UPDATE_GRADE_OBJECT: "Action"
+ D2LBRIGHTSPACE_UPDATE_GRADE_SETUP: "Action"
+ D2LBRIGHTSPACE_UPDATE_QUIZ: "Action"
+ D2LBRIGHTSPACE_UPDATE_QUIZ_CATEGORY: "Action"
+ D2LBRIGHTSPACE_UPDATE_USER: "Action"
DISCORDBOT_ADD_GROUP_DM_USER: "Action"
DISCORDBOT_ADD_GUILD_MEMBER: "Action"
DISCORDBOT_ADD_GUILD_MEMBER_ROLE: "Action"
@@ -2865,6 +3112,7 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
FIRECRAWL_CANCEL_CRAWL_JOB: "Action"
FIRECRAWL_CRAWL_JOB_STATUS: "Action"
FIRECRAWL_CRAWL_URLS: "Action"
+ FIRECRAWL_EXTRACT: "Action"
FIRECRAWL_MAP_URLS: "Action"
FIRECRAWL_SCRAPE_EXTRACT_DATA_LLM: "Action"
FIREFLIES_ADD_TO_LIVE: "Action"
@@ -3879,6 +4127,7 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
GOOGLEDRIVE_CREATE_FILE_FROM_TEXT: "Action"
GOOGLEDRIVE_CREATE_FOLDER: "Action"
GOOGLEDRIVE_DELETE_FOLDER_OR_FILE: "Action"
+ GOOGLEDRIVE_DOWNLOAD_FILE: "Action"
GOOGLEDRIVE_EDIT_FILE: "Action"
GOOGLEDRIVE_FIND_FILE: "Action"
GOOGLEDRIVE_FIND_FOLDER: "Action"
@@ -3964,6 +4213,7 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
HEYGEN_V2_VIDEO_TRANSLATE_TARGET_LANGUAGES: "Action"
HEYGEN_V2_VOICES: "Action"
HISTORY_FETCHER_GET_WORKSPACE_HISTORY: "Action"
+ HUBSPOT_ADD_ASSET_ASSOCIATION: "Action"
HUBSPOT_ADD_TOKEN_TO_EVENT_TEMPLATE: "Action"
HUBSPOT_ARCHIVE_A_BATCH_OF_QUOTES_BY_ID: "Action"
HUBSPOT_ARCHIVE_BATCH_OF_COMPANIES_BY_ID: "Action"
@@ -3988,13 +4238,19 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
HUBSPOT_AUDIT_PIPELINE_CHANGES_BY_ID: "Action"
HUBSPOT_BATCH_READ_COMPANIES_BY_PROPERTIES: "Action"
HUBSPOT_BATCH_UPDATE_QUOTES: "Action"
+ HUBSPOT_CAMPAIGN_SEARCH: "Action"
HUBSPOT_CANCEL_ACTIVE_IMPORT: "Action"
+ HUBSPOT_CLONE_MARKETING_EMAIL: "Action"
HUBSPOT_CONFIGURE_CALLING_EXTENSION_SETTINGS: "Action"
HUBSPOT_CREATE: "Action"
+ HUBSPOT_CREATE_AB_TEST_VARIATION: "Action"
HUBSPOT_CREATE_AND_RETURN_A_NEW_PROPERTY_GROUP: "Action"
HUBSPOT_CREATE_ASSOCIATION_FOR_OBJECT_TYPE: "Action"
+ HUBSPOT_CREATE_A_BATCH_OF_CAMPAIGNS: "Action"
HUBSPOT_CREATE_A_BATCH_OF_COMPANIES: "Action"
HUBSPOT_CREATE_A_BATCH_OF_CONTACTS: "Action"
+ HUBSPOT_CREATE_A_CAMPAIGN: "Action"
+ HUBSPOT_CREATE_A_NEW_MARKETING_EMAIL: "Action"
HUBSPOT_CREATE_BATCH_OF_CONTACTS: "Action"
HUBSPOT_CREATE_BATCH_OF_DEALS: "Action"
HUBSPOT_CREATE_BATCH_OF_FEEDBACK_SUBMISSIONS: "Action"
@@ -4012,6 +4268,7 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
HUBSPOT_CREATE_MULTIPLE_TIMELINE_EVENTS_BATCH: "Action"
HUBSPOT_CREATE_NEW_DEAL_OBJECT: "Action"
HUBSPOT_CREATE_NEW_OBJECT_SCHEMA_WITH_CUSTOM_PROPERTIES: "Action"
+ HUBSPOT_CREATE_OR_UPDATE_DRAFT_VERSION: "Action"
HUBSPOT_CREATE_PIPELINE_FOR_OBJECT_TYPE: "Action"
HUBSPOT_CREATE_PIPELINE_STAGE: "Action"
HUBSPOT_CREATE_PRODUCT_BATCH: "Action"
@@ -4020,28 +4277,48 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
HUBSPOT_CREATE_QUOTE_OBJECT: "Action"
HUBSPOT_CREATE_TICKET_OBJECT: "Action"
HUBSPOT_CREATE_TIMELINE_EVENT_BASED_ON_TEMPLATE: "Action"
+ HUBSPOT_CREATE_WORKFLOW: "Action"
HUBSPOT_CUSTOMIZABLE_CONTACTS_PAGE_RETRIEVAL: "Action"
+ HUBSPOT_DELETE_A_BATCH_OF_CAMPAIGNS: "Action"
+ HUBSPOT_DELETE_A_MARKETING_EMAIL: "Action"
HUBSPOT_DELETE_CALLING_EXTENSION_SETTINGS: "Action"
+ HUBSPOT_DELETE_CAMPAIGN: "Action"
HUBSPOT_DELETE_CONTACT_FOR_GDPR_COMPLIANCE: "Action"
HUBSPOT_DELETE_PIPELINE_BY_ID: "Action"
HUBSPOT_DELETE_PIPELINE_STAGE_BY_ID: "Action"
HUBSPOT_DELETE_SCHEMA_BY_OBJECT_TYPE: "Action"
HUBSPOT_DELETE_TIMELINE_EVENT_TEMPLATE: "Action"
HUBSPOT_DELETE_VIDEO_CONFERENCING_APP_SETTINGS: "Action"
+ HUBSPOT_DELETE_WORKFLOW: "Action"
HUBSPOT_FETCH_CONTACT_DETAILS_BY_ID: "Action"
+ HUBSPOT_FETCH_CONTACT_IDS: "Action"
HUBSPOT_FETCH_IMPORT_ERROR_DETAILS: "Action"
HUBSPOT_FETCH_OBJECT_DETAILS_BY_ID_OR_PROPERTY: "Action"
HUBSPOT_FETCH_RECORDING_SETTINGS_BY_APP_ID: "Action"
+ HUBSPOT_FETCH_REVENUE: "Action"
HUBSPOT_GET_ACTIVE_IMPORTS_LIST: "Action"
+ HUBSPOT_GET_AGGREGATED_STATISTICS: "Action"
+ HUBSPOT_GET_AGGREGATED_STATISTIC_INTERVALS: "Action"
+ HUBSPOT_GET_ALL_MARKETING_EMAILS_FOR_A_HUB_SPOT_ACCOUNT: "Action"
+ HUBSPOT_GET_ALL_WORKFLOWS: "Action"
HUBSPOT_GET_A_PAGE_OF_OWNERS: "Action"
+ HUBSPOT_GET_A_REVISION_OF_A_MARKETING_EMAIL: "Action"
+ HUBSPOT_GET_CAMPAIGN_METRICS: "Action"
+ HUBSPOT_GET_DRAFT_VERSION_OF_A_MARKETING_EMAIL: "Action"
HUBSPOT_GET_IMPORT_RECORD_INFORMATION: "Action"
HUBSPOT_GET_PIPELINE_STAGE_AUDIT: "Action"
HUBSPOT_GET_QUOTE_BY_ID: "Action"
+ HUBSPOT_GET_REVISIONS_OF_A_MARKETING_EMAIL: "Action"
HUBSPOT_GET_SPECIFIC_EVENT_TEMPLATE_FOR_APP: "Action"
+ HUBSPOT_GET_THE_DETAILS_OF_A_SPECIFIED_MARKETING_EMAIL: "Action"
+ HUBSPOT_GET_THE_VARIATION_OF_A_AN_A_B_MARKETING_EMAIL: "Action"
+ HUBSPOT_GET_WORKFLOW_BY_ID: "Action"
+ HUBSPOT_GET_WORKFLOW_EMAIL_CAMPAIGNS: "Action"
HUBSPOT_INITIATE_DATA_IMPORT_PROCESS: "Action"
HUBSPOT_INSTANTIATE_AND_RETURN_NEW_COMPANY_OBJECT: "Action"
HUBSPOT_LIST: "Action"
HUBSPOT_LIST_ALL_EVENT_TEMPLATES_FOR_APP: "Action"
+ HUBSPOT_LIST_ASSETS: "Action"
HUBSPOT_LIST_ASSOCIATION_TYPES: "Action"
HUBSPOT_LIST_CONTACTS_PAGE: "Action"
HUBSPOT_LIST_FEEDBACK_SUBMISSIONS_PAGE: "Action"
@@ -4081,7 +4358,9 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
HUBSPOT_READ_ALL_PROPERTY_GROUPS: "Action"
HUBSPOT_READ_AN_OWNER_BY_GIVEN_ID_OR_USER_ID: "Action"
HUBSPOT_READ_APAGE_OF_OBJECTS_BY_TYPE: "Action"
+ HUBSPOT_READ_A_BATCH_OF_CAMPAIGNS: "Action"
HUBSPOT_READ_A_BATCH_OF_CRM_OBJECT_PROPERTIES: "Action"
+ HUBSPOT_READ_A_CAMPAIGN: "Action"
HUBSPOT_READ_A_CRM_PROPERTY_BY_NAME: "Action"
HUBSPOT_READ_A_PAGE_OF_DEALS: "Action"
HUBSPOT_READ_A_PROPERTY: "Action"
@@ -4093,12 +4372,14 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
HUBSPOT_READ_BATCH_OF_PRODUCTS_BY_ID_OR_PROPERTY: "Action"
HUBSPOT_READ_BATCH_OF_QUOTES_BY_PROPERTY_VALUES: "Action"
HUBSPOT_READ_BATCH_OF_TICKETS_BY_ID_OR_UNIQUE_VALUES: "Action"
+ HUBSPOT_READ_BUDGET: "Action"
HUBSPOT_READ_CRM_CONTACT_BY_ID: "Action"
HUBSPOT_READ_CRM_OBJECT_BY_ID: "Action"
HUBSPOT_READ_FEEDBACK_SUBMISSION_BY_ID: "Action"
HUBSPOT_READ_PRODUCT_BY_ID: "Action"
HUBSPOT_READ_PROPERTY_GROUPS_FOR_OBJECT_TYPE: "Action"
HUBSPOT_RECYCLE_CONTACT_BY_ID: "Action"
+ HUBSPOT_REMOVE_ASSET_ASSOCIATION: "Action"
HUBSPOT_REMOVE_ASSOCIATION_FROM_SCHEMA: "Action"
HUBSPOT_REMOVE_DEAL_OBJECT_BY_ID: "Action"
HUBSPOT_REMOVE_TOKEN_FROM_EVENT_TEMPLATE: "Action"
@@ -4106,6 +4387,9 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
HUBSPOT_RENDER_EVENT_HEADER_OR_DETAIL_AS_HTML: "Action"
HUBSPOT_REPLACE_ALL_PROPERTIES_OF_PIPELINE: "Action"
HUBSPOT_REPLACE_PIPELINE_STAGE_PROPERTIES: "Action"
+ HUBSPOT_RESET_DRAFT: "Action"
+ HUBSPOT_RESTORE_A_REVISION_OF_A_MARKETING_EMAIL: "Action"
+ HUBSPOT_RESTORE_A_REVISION_OF_A_MARKETING_EMAIL_TO_DRAFT_STATE: "Action"
HUBSPOT_RETRIEVE_ALL_OBJECT_SCHEMAS: "Action"
HUBSPOT_RETRIEVE_ALL_PIPELINES: "Action"
HUBSPOT_RETRIEVE_ALL_PIPELINES_FOR_SPECIFIED_OBJECT_TYPE: "Action"
@@ -4138,8 +4422,10 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
HUBSPOT_SECURE_PRODUCT_SEARCH_BY_CRITERIA: "Action"
HUBSPOT_SET_CALL_RECORDING_SETTINGS: "Action"
HUBSPOT_UPDATE: "Action"
+ HUBSPOT_UPDATE_A_BATCH_OF_CAMPAIGNS: "Action"
HUBSPOT_UPDATE_A_BATCH_OF_CONTACTS: "Action"
HUBSPOT_UPDATE_A_BATCH_OF_LINE_ITEMS: "Action"
+ HUBSPOT_UPDATE_A_MARKETING_EMAIL: "Action"
HUBSPOT_UPDATE_BATCH_FEEDBACK_SUBMISSIONS: "Action"
HUBSPOT_UPDATE_BATCH_OF_COMPANIES: "Action"
HUBSPOT_UPDATE_BATCH_OF_CRMOBJECTS_TICKETS: "Action"
@@ -4147,6 +4433,7 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
HUBSPOT_UPDATE_BATCH_OF_OBJECTS_BY_IDOR_PROPERTY_VALUES: "Action"
HUBSPOT_UPDATE_BATCH_OF_PRODUCTS: "Action"
HUBSPOT_UPDATE_CALLING_APP_RECORDING_SETTINGS: "Action"
+ HUBSPOT_UPDATE_CAMPAIGN: "Action"
HUBSPOT_UPDATE_EXISTING_EVENT_TEMPLATE: "Action"
HUBSPOT_UPDATE_EXISTING_OBJECT_SCHEMA: "Action"
HUBSPOT_UPDATE_FEEDBACK_SUBMISSION_BY_ID: "Action"
@@ -5337,13 +5624,34 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
MICROSOFT_TEAMS_CHATS_GET_ALL_CHATS: "Action"
MICROSOFT_TEAMS_CHATS_GET_ALL_MESSAGES: "Action"
MICROSOFT_TEAMS_TEAMS_CREATE_CHANNEL: "Action"
+ MICROSOFT_TEAMS_TEAMS_CREATE_CHAT: "Action"
MICROSOFT_TEAMS_TEAMS_GET_MESSAGE: "Action"
MICROSOFT_TEAMS_TEAMS_LIST: "Action"
MICROSOFT_TEAMS_TEAMS_LIST_CHANNELS: "Action"
MICROSOFT_TEAMS_TEAMS_LIST_CHAT_MESSAGES: "Action"
+ MICROSOFT_TEAMS_TEAMS_LIST_PEOPLE: "Action"
MICROSOFT_TEAMS_TEAMS_POST_CHANNEL_MESSAGE: "Action"
MICROSOFT_TEAMS_TEAMS_POST_CHAT_MESSAGE: "Action"
MICROSOFT_TEAMS_TEAMS_POST_MESSAGE_REPLY: "Action"
+ MIXPANEL_AGGREGATED_EVENT_PROPERTY_VALUES: "Action"
+ MIXPANEL_AGGREGATE_EVENT_COUNTS: "Action"
+ MIXPANEL_JQL_QUERY: "Action"
+ MIXPANEL_LIST_COHORTS: "Action"
+ MIXPANEL_LIST_FUNNELS: "Action"
+ MIXPANEL_PROFILE_EVENT_ACTIVITY: "Action"
+ MIXPANEL_QUERY_FREQUENCY_REPORT: "Action"
+ MIXPANEL_QUERY_FUNNEL: "Action"
+ MIXPANEL_QUERY_INSIGHT: "Action"
+ MIXPANEL_QUERY_NUMERIC_AVERAGE: "Action"
+ MIXPANEL_QUERY_NUMERIC_SEGMENTATION: "Action"
+ MIXPANEL_QUERY_NUMERIC_SUM: "Action"
+ MIXPANEL_QUERY_PROFILES: "Action"
+ MIXPANEL_QUERY_RETENTION_REPORT: "Action"
+ MIXPANEL_QUERY_SEGMENTATION: "Action"
+ MIXPANEL_TODAYS_TOP_EVENTS: "Action"
+ MIXPANEL_TOP_EVENTS: "Action"
+ MIXPANEL_TOP_EVENT_PROPERTIES: "Action"
+ MIXPANEL_TOP_EVENT_PROPERTY_VALUES: "Action"
MONDAY_ADD_USERS_TO_BOARD: "Action"
MONDAY_ARCHIVE_BOARD: "Action"
MONDAY_ARCHIVE_ITEM: "Action"
@@ -5453,8 +5761,8 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
NOTION_SEARCH_NOTION_PAGE: "Action"
NOTION_UPDATE_ROW_DATABASE: "Action"
NOTION_UPDATE_SCHEMA_DATABASE: "Action"
- ONEPAGE_GENERIC_SEARCH_API: "Action"
- ONEPAGE_RETRIEVE_TOKEN_DETAILS_AND_VALIDATE_TOKEN_STATE: "Action"
+ ONEPAGE_SEARCH_INPUT_POST_REQUEST: "Action"
+ ONEPAGE_TOKEN_DETAILS_REQUEST: "Action"
ONE_DRIVE_ONEDRIVE_CREATE_FOLDER: "Action"
ONE_DRIVE_ONEDRIVE_CREATE_TEXT_FILE: "Action"
ONE_DRIVE_ONEDRIVE_FIND_FILE: "Action"
@@ -6505,6 +6813,14 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
POSTHOG_UPDATE_SPECIFIC_PROXY_RECORD_FIELDS: "Action"
RAGTOOL_ADD_CONTENT_TO_RAG_TOOL: "Action"
RAGTOOL_RAG_TOOL_QUERY: "Action"
+ RECALLAI_CREATE_BOT: "Action"
+ RECALLAI_DELETE_BOT: "Action"
+ RECALLAI_LIST_BOTS: "Action"
+ RECALLAI_LIST_CHAT_MESSAGES: "Action"
+ RECALLAI_REMOVE_BOT_FROM_CALL: "Action"
+ RECALLAI_RETRIEVE_BOT: "Action"
+ RECALLAI_START_RECORDING: "Action"
+ RECALLAI_STOP_RECORDING: "Action"
REDDIT_CREATE_REDDIT_POST: "Action"
REDDIT_DELETE_REDDIT_COMMENT: "Action"
REDDIT_DELETE_REDDIT_POST: "Action"
@@ -6524,6 +6840,7 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
RETELLAI_RETRIEVE_DETAILS_OF_A_SPECIFIC_VOICE: "Action"
RETELLAI_UPDATE_PHONE_NUMBER: "Action"
ROCKETLANE_CREATE_TASK_WITH_ASSIGNEE_AND_DATES: "Action"
+ ROCKETLANE_GET_PROJECT_LIST: "Action"
ROCKETLANE_RETRIEVE_HOME_ENDPOINT: "Action"
ROCKETLANE_RETRIEVE_PROJECT_DETAILS_BY_ID: "Action"
ROCKETLANE_RETRIEVE_SUBSCRIPTION_DETAILS: "Action"
@@ -6559,6 +6876,20 @@ class Action(Enum[ActionData], metaclass=EnumGenerator):
SALESFORCE_UPDATE_LEAD_BY_ID_WITH_JSON_PAYLOAD: "Action"
SALESFORCE_UPDATE_OPPORTUNITY_BY_ID: "Action"
SALESFORCE_UPDATE_SPECIFIC_NOTE_BY_ID: "Action"
+ SEMANTICSCHOLAR_DETAILS_ABOUT_AN_AUTHOR: "Action"
+ SEMANTICSCHOLAR_DETAILS_ABOUT_AN_AUTHOR_S_PAPERS: "Action"
+ SEMANTICSCHOLAR_DETAILS_ABOUT_A_PAPER: "Action"
+ SEMANTICSCHOLAR_DETAILS_ABOUT_A_PAPER_S_AUTHORS: "Action"
+ SEMANTICSCHOLAR_DETAILS_ABOUT_A_PAPER_S_CITATIONS: "Action"
+ SEMANTICSCHOLAR_DETAILS_ABOUT_A_PAPER_S_REFERENCES: "Action"
+ SEMANTICSCHOLAR_GET_DETAILS_FOR_MULTIPLE_AUTHORS_AT_ONCE: "Action"
+ SEMANTICSCHOLAR_GET_DETAILS_FOR_MULTIPLE_PAPERS_AT_ONCE: "Action"
+ SEMANTICSCHOLAR_PAPER_BULK_SEARCH: "Action"
+ SEMANTICSCHOLAR_PAPER_RELEVANCE_SEARCH: "Action"
+ SEMANTICSCHOLAR_PAPER_TITLE_SEARCH: "Action"
+ SEMANTICSCHOLAR_SEARCH_FOR_AUTHORS_BY_NAME: "Action"
+ SEMANTICSCHOLAR_SUGGEST_PAPER_QUERY_COMPLETIONS: "Action"
+ SEMANTICSCHOLAR_TEXT_SNIPPET_SEARCH: "Action"
SENDGRID_ACTIVATE_A_TRANSACTIONAL_TEMPLATE_VERSION: "Action"
SENDGRID_ADD_AN_IP_ADDRESS_TO_A_POOL: "Action"
SENDGRID_ADD_AN_IP_TO_AN_AUTHENTICATED_DOMAIN: "Action"
diff --git a/python/composio/client/enums/app.pyi b/python/composio/client/enums/app.pyi
index 3c6e837e0e4..1ad8565cf9e 100644
--- a/python/composio/client/enums/app.pyi
+++ b/python/composio/client/enums/app.pyi
@@ -57,6 +57,8 @@ class App(Enum[AppData], metaclass=EnumGenerator):
BLACKBAUD: "App"
BLACKBOARD: "App"
BOLDSIGN: "App"
+ BOLNA: "App"
+ BORNEO: "App"
BOTBABA: "App"
BOX: "App"
BRAINTREE: "App"
@@ -89,6 +91,7 @@ class App(Enum[AppData], metaclass=EnumGenerator):
COMPOSIO_SEARCH: "App"
CONTENTFUL: "App"
CUSTOMER_IO: "App"
+ D2LBRIGHTSPACE: "App"
DAILYBOT: "App"
DATADOG: "App"
DATAGMA: "App"
@@ -220,6 +223,7 @@ class App(Enum[AppData], metaclass=EnumGenerator):
RAFFLYS: "App"
RAGTOOL: "App"
RAVENSEOTOOLS: "App"
+ RECALLAI: "App"
REDDIT: "App"
RETELLAI: "App"
RING_CENTRAL: "App"
@@ -230,6 +234,7 @@ class App(Enum[AppData], metaclass=EnumGenerator):
SALESFORCE: "App"
SCREENSHOTONE: "App"
SEISMIC: "App"
+ SEMANTICSCHOLAR: "App"
SENDGRID: "App"
SENTRY: "App"
SERPAPI: "App"
diff --git a/python/composio/client/enums/base.py b/python/composio/client/enums/base.py
index 83ac5194563..bd145444fe2 100644
--- a/python/composio/client/enums/base.py
+++ b/python/composio/client/enums/base.py
@@ -2,7 +2,6 @@
Enum helper base.
"""
-import difflib
import typing as t
from pydantic import Field
@@ -13,7 +12,6 @@
VERSION_LATEST,
VERSION_LATEST_BASE,
)
-from composio.exceptions import ComposioSDKError
from composio.storage.base import LocalStorage
@@ -27,19 +25,6 @@
TRIGGERS_CACHE = LOCAL_CACHE_DIRECTORY / "triggers"
-class EnumStringNotFound(ComposioSDKError):
- """Raise when user provides invalid enum string."""
-
- def __init__(self, value: str, enum: str, possible_values: t.List[str]) -> None:
- error_message = f"Invalid value `{value}` for enum class `{enum}`"
- matches = difflib.get_close_matches(value, possible_values, n=1)
- if matches:
- (match,) = matches
- error_message += f". Did you mean {match!r}?"
-
- super().__init__(message=error_message)
-
-
class SentinalObject:
"""Sentinel object."""
diff --git a/python/composio/client/enums/enum.py b/python/composio/client/enums/enum.py
index 8a341df0519..c075c2227c4 100644
--- a/python/composio/client/enums/enum.py
+++ b/python/composio/client/enums/enum.py
@@ -5,10 +5,10 @@
import typing_extensions as te
from composio.constants import LOCAL_CACHE_DIRECTORY
-from composio.exceptions import ComposioSDKError
+from composio.exceptions import EnumStringNotFound, InvalidEnum
from composio.storage.base import LocalStorage
-from .base import ActionData, EnumStringNotFound, SentinalObject
+from .base import ActionData, SentinalObject
DataT = t.TypeVar("DataT", bound=LocalStorage)
@@ -72,7 +72,8 @@ def __init__(
if hasattr(value, "sentinel"): # TODO: get rid of SentinalObject
slug = value.enum # type: ignore
if not isinstance(slug, str):
- raise ComposioSDKError(f"Invalid enum type: {slug!r}, expected str")
+ raise InvalidEnum(f"Invalid enum type: {slug!r}, expected str")
+
else:
slug = str(value)
diff --git a/python/composio/client/enums/tag.pyi b/python/composio/client/enums/tag.pyi
index 13e72e5352a..fc5cf51eb93 100644
--- a/python/composio/client/enums/tag.pyi
+++ b/python/composio/client/enums/tag.pyi
@@ -53,6 +53,34 @@ class Tag(Enum[TagData], metaclass=EnumGenerator):
AHREFS_PROJECTS: "Tag"
AHREFS_SERP_OVERVIEW: "Tag"
AIRTABLE_IMPORTANT: "Tag"
+ AMPLITUDE_ATTRIBUTION_API: "Tag"
+ AMPLITUDE_BATCH_EVENT_UPLOAD_API: "Tag"
+ AMPLITUDE_BEHAVIORAL_COHORTS_API: "Tag"
+ AMPLITUDE_CCPA_DSAR_API: "Tag"
+ AMPLITUDE_CHART_ANNOTATIONS_API: "Tag"
+ AMPLITUDE_DASHBOARD_REST_API: "Tag"
+ AMPLITUDE_EVENT_CATEGORY: "Tag"
+ AMPLITUDE_EVENT_PROPERTY: "Tag"
+ AMPLITUDE_EVENT_SEGMENTATION_CHART: "Tag"
+ AMPLITUDE_EVENT_STREAMING_METRICS_SUMMARY_API: "Tag"
+ AMPLITUDE_EVENT_TYPE: "Tag"
+ AMPLITUDE_EXPORT_API: "Tag"
+ AMPLITUDE_FUNNEL_ANALYSIS: "Tag"
+ AMPLITUDE_GROUP_ROUTES: "Tag"
+ AMPLITUDE_HTTP_V2_API: "Tag"
+ AMPLITUDE_IDENTIFY_API: "Tag"
+ AMPLITUDE_IMPORTANT: "Tag"
+ AMPLITUDE_LOOKUP_TABLE_API: "Tag"
+ AMPLITUDE_RETENTION_ANALYSIS: "Tag"
+ AMPLITUDE_SESSIONS: "Tag"
+ AMPLITUDE_USER_ACTIVITY: "Tag"
+ AMPLITUDE_USER_COMPOSITION: "Tag"
+ AMPLITUDE_USER_MAPPING__ALIASING__API: "Tag"
+ AMPLITUDE_USER_PRIVACY_API: "Tag"
+ AMPLITUDE_USER_PROFILE_API: "Tag"
+ AMPLITUDE_USER_PROPERTY: "Tag"
+ AMPLITUDE_USER_ROUTES: "Tag"
+ AMPLITUDE_USER_SEARCH: "Tag"
APALEO_IMPORTANT: "Tag"
APALEO_PROPERTY: "Tag"
APALEO_PROPERTYACTIONS: "Tag"
@@ -219,6 +247,38 @@ class Tag(Enum[TagData], metaclass=EnumGenerator):
BLACKBOARD_TERMS: "Tag"
BLACKBOARD_UPLOADS: "Tag"
BLACKBOARD_USERS: "Tag"
+ BORNEO_ACCOUNTS: "Tag"
+ BORNEO_ASSETS: "Tag"
+ BORNEO_CATALOG: "Tag"
+ BORNEO_CONNECTORS: "Tag"
+ BORNEO_DASHBOARDREPORT: "Tag"
+ BORNEO_DEPARTMENT: "Tag"
+ BORNEO_DISCOVEREDDOCUMENTS: "Tag"
+ BORNEO_DISCOVEREDINFOTYPES: "Tag"
+ BORNEO_DISCOVEREDRECIPIENTS: "Tag"
+ BORNEO_DOCUMENTS: "Tag"
+ BORNEO_DOMAINS: "Tag"
+ BORNEO_DPIAS: "Tag"
+ BORNEO_EMAILVERIFICATION: "Tag"
+ BORNEO_EMPLOYEE: "Tag"
+ BORNEO_ERRORS: "Tag"
+ BORNEO_HEADQUARTERS: "Tag"
+ BORNEO_IMPORTANT: "Tag"
+ BORNEO_INFOTYPES: "Tag"
+ BORNEO_INVENTORY: "Tag"
+ BORNEO_LOGS: "Tag"
+ BORNEO_LOPDPTHRESHOLDS: "Tag"
+ BORNEO_PROCESSING_ACTIVITIES: "Tag"
+ BORNEO_RECIPIENTS: "Tag"
+ BORNEO_REMEDIATION: "Tag"
+ BORNEO_RESOURCE: "Tag"
+ BORNEO_RESOURCES: "Tag"
+ BORNEO_SCANITERATIONS: "Tag"
+ BORNEO_SCANS: "Tag"
+ BORNEO_STATUSDASHBOARD: "Tag"
+ BORNEO_THRESHOLDS: "Tag"
+ BORNEO_TOMS: "Tag"
+ BORNEO_USER_PROFILE: "Tag"
BREVO_ACCOUNT: "Tag"
BREVO_COMPANIES: "Tag"
BREVO_CONTACTS: "Tag"
@@ -354,6 +414,13 @@ class Tag(Enum[TagData], metaclass=EnumGenerator):
CLICKUP_USERS: "Tag"
CLICKUP_VIEWS: "Tag"
CLICKUP_WEBHOOKS: "Tag"
+ D2LBRIGHTSPACE_COURSES: "Tag"
+ D2LBRIGHTSPACE_DEMOGRAPHICS: "Tag"
+ D2LBRIGHTSPACE_GRADES: "Tag"
+ D2LBRIGHTSPACE_IMPORTANT: "Tag"
+ D2LBRIGHTSPACE_QUIZZES: "Tag"
+ D2LBRIGHTSPACE_ROLES: "Tag"
+ D2LBRIGHTSPACE_USERS: "Tag"
DISCORDBOT_IMPORTANT: "Tag"
DISCORD_IMPORTANT: "Tag"
DOCUSIGN_ACCOUNTBRANDS: "Tag"
@@ -466,7 +533,10 @@ class Tag(Enum[TagData], metaclass=EnumGenerator):
FIGMA_USERS: "Tag"
FIGMA_VARIABLES: "Tag"
FIGMA_WEBHOOKS: "Tag"
+ FIRECRAWL_EXTRACTION: "Tag"
FIRECRAWL_IMPORTANT: "Tag"
+ FIRECRAWL_SCRAPING: "Tag"
+ FIRECRAWL_WEB: "Tag"
GITHUB_ACTIONS: "Tag"
GITHUB_ACTIVITY: "Tag"
GITHUB_APPS: "Tag"
@@ -515,13 +585,17 @@ class Tag(Enum[TagData], metaclass=EnumGenerator):
HEYGEN_USER: "Tag"
HEYGEN_VIDEO_TRANSLATE_API: "Tag"
HEYGEN_WEBHOOKS: "Tag"
+ HUBSPOT_ASSET: "Tag"
+ HUBSPOT_AUTOMATION: "Tag"
HUBSPOT_BASIC: "Tag"
HUBSPOT_BATCH: "Tag"
+ HUBSPOT_BUDGET: "Tag"
HUBSPOT_CORE: "Tag"
HUBSPOT_EVENTS: "Tag"
HUBSPOT_GDPR: "Tag"
HUBSPOT_GROUPS: "Tag"
HUBSPOT_IMPORTANT: "Tag"
+ HUBSPOT_MARKETING_EMAILS: "Tag"
HUBSPOT_OWNERS: "Tag"
HUBSPOT_PIPELINES: "Tag"
HUBSPOT_PIPELINE_AUDITS: "Tag"
@@ -530,9 +604,12 @@ class Tag(Enum[TagData], metaclass=EnumGenerator):
HUBSPOT_PUBLIC_IMPORTS: "Tag"
HUBSPOT_PUBLIC_OBJECT: "Tag"
HUBSPOT_PUBLIC_OBJECT_SCHEMAS: "Tag"
+ HUBSPOT_PUBLIC_VNEXT_EMAILS: "Tag"
HUBSPOT_RECORDING_SETTINGS: "Tag"
+ HUBSPOT_REPORTS: "Tag"
HUBSPOT_SEARCH: "Tag"
HUBSPOT_SETTINGS: "Tag"
+ HUBSPOT_STATISTICS: "Tag"
HUBSPOT_TEMPLATES: "Tag"
HUBSPOT_TOKENS: "Tag"
HUBSPOT_TYPES: "Tag"
@@ -705,6 +782,26 @@ class Tag(Enum[TagData], metaclass=EnumGenerator):
MEM0_RUNS: "Tag"
MEM0_STATS: "Tag"
MEM0_USERS: "Tag"
+ MICROSOFT_TEAMS_CHAT: "Tag"
+ MICROSOFT_TEAMS_IMPORTANT: "Tag"
+ MICROSOFT_TEAMS_MICROSOFT_GRAPH: "Tag"
+ MICROSOFT_TEAMS_MICROSOFT_TEAMS: "Tag"
+ MICROSOFT_TEAMS_PEOPLE: "Tag"
+ MIXPANEL_ANALYTICS: "Tag"
+ MIXPANEL_AVERAGE: "Tag"
+ MIXPANEL_COHORTS: "Tag"
+ MIXPANEL_EVENTS: "Tag"
+ MIXPANEL_FREQUENCY: "Tag"
+ MIXPANEL_FUNNELS: "Tag"
+ MIXPANEL_IMPORTANT: "Tag"
+ MIXPANEL_INSIGHTS: "Tag"
+ MIXPANEL_JQL: "Tag"
+ MIXPANEL_NUMERIC: "Tag"
+ MIXPANEL_PROFILES: "Tag"
+ MIXPANEL_PROPERTIES: "Tag"
+ MIXPANEL_RETENTION: "Tag"
+ MIXPANEL_SEGMENTATION: "Tag"
+ MIXPANEL_SUM: "Tag"
MONDAY_ACCESS: "Tag"
MONDAY_ARCHIVE: "Tag"
MONDAY_BOARD: "Tag"
@@ -877,6 +974,9 @@ class Tag(Enum[TagData], metaclass=EnumGenerator):
SALESFORCE_LEAD: "Tag"
SALESFORCE_NOTE: "Tag"
SALESFORCE_OPPORTUNITY: "Tag"
+ SEMANTICSCHOLAR_AUTHOR_DATA: "Tag"
+ SEMANTICSCHOLAR_PAPER_DATA: "Tag"
+ SEMANTICSCHOLAR_SNIPPET_TEXT: "Tag"
SENDGRID_ACCOUNT: "Tag"
SENDGRID_ACCOUNT_STATE: "Tag"
SENDGRID_ALERTS: "Tag"
diff --git a/python/composio/client/enums/trigger.pyi b/python/composio/client/enums/trigger.pyi
index ffe29311c63..a5f8a48a459 100644
--- a/python/composio/client/enums/trigger.pyi
+++ b/python/composio/client/enums/trigger.pyi
@@ -29,6 +29,9 @@ class Trigger(Enum[TriggerData], metaclass=EnumGenerator):
HUBSPOT_CONTACT_CREATED_TRIGGER: "Trigger"
HUBSPOT_DEAL_STAGE_UPDATED_TRIGGER: "Trigger"
HUBSPOT_FORM_SUBMITTED: "Trigger"
+ JIRA_JIRA_NEW_ISSUE_TRIGGER: "Trigger"
+ JIRA_JIRA_NEW_PROJECT_TRIGGER: "Trigger"
+ JIRA_JIRA_UPDATED_ISSUE_TRIGGER: "Trigger"
LINEAR_COMMENT_EVENT_TRIGGER: "Trigger"
LINEAR_ISSUE_CREATED_TRIGGER: "Trigger"
LINEAR_ISSUE_UPDATED_TRIGGER: "Trigger"
diff --git a/python/composio/client/exceptions.py b/python/composio/client/exceptions.py
index c3abf0895a6..d1507aabac4 100644
--- a/python/composio/client/exceptions.py
+++ b/python/composio/client/exceptions.py
@@ -2,42 +2,11 @@
Http client exceptions.
"""
-import typing as t
+from composio.exceptions import ComposioClientError, HTTPError, NoItemsFound
-from composio.exceptions import ComposioSDKError
-
-class HTTPError(ComposioSDKError):
- """
- Exception class for HTTP API errors.
- """
-
- def __init__(
- self,
- message: str,
- status_code: int,
- *args: t.Any,
- delegate: bool = False,
- ) -> None:
- """
- Initialize HTTPError class.
-
- :param message: Content from the API response
- :param status_code: HTTP response status code
- :param delegate: Whether to delegate the error message to the log
- collection server or not
- """
- super().__init__(message, *args, delegate=delegate)
- self.status_code = status_code
-
-
-class ComposioClientError(ComposioSDKError):
- """
- Exception class for Composio client errors.
- """
-
-
-class NoItemsFound(ComposioClientError):
- """
- Exception class for empty collection values.
- """
+__all__ = (
+ "NoItemsFound",
+ "ComposioClientError",
+ "HTTPError",
+)
diff --git a/python/composio/client/files.py b/python/composio/client/files.py
index c66d2fba53b..c0e5385e81e 100644
--- a/python/composio/client/files.py
+++ b/python/composio/client/files.py
@@ -8,7 +8,11 @@
import typing_extensions as te
from pydantic import BaseModel, ConfigDict, Field
-from composio.exceptions import ComposioSDKError
+from composio.exceptions import (
+ ErrorDownloadingFile,
+ ErrorUploadingFile,
+ SDKFileNotFoundError,
+)
from composio.utils import mimetypes
@@ -54,7 +58,7 @@ def from_path(
) -> te.Self:
file = Path(file)
if not file.exists():
- raise ComposioSDKError(f"File not found: {file}")
+ raise SDKFileNotFoundError(f"File not found: {file}")
mimetype = mimetypes.guess(file=file)
s3meta = client.actions.create_file_upload(
@@ -64,8 +68,8 @@ def from_path(
mimetype=mimetype,
md5=get_md5(file=file),
)
- if not upload(url=s3meta.url, file=file):
- raise ComposioSDKError(f"Error uploading file: {file}")
+ if not s3meta.exists and not upload(url=s3meta.url, file=file):
+ raise ErrorUploadingFile(f"Error uploading file: {file}")
return cls(
name=file.name,
@@ -86,7 +90,7 @@ def download(self, outdir: Path, chunk_size: int = _DEFAULT_CHUNK_SIZE) -> Path:
outdir.mkdir(exist_ok=True, parents=True)
response = requests.get(url=self.s3url, stream=True)
if response.status_code != 200:
- raise ComposioSDKError(f"Error downloading file: {self.s3url}")
+ raise ErrorDownloadingFile(f"Error downloading file: {self.s3url}")
with outfile.open("wb") as fd:
for chunk in response.iter_content(chunk_size=chunk_size):
diff --git a/python/composio/client/http.py b/python/composio/client/http.py
index d1d18a762a6..0435f9993df 100644
--- a/python/composio/client/http.py
+++ b/python/composio/client/http.py
@@ -8,6 +8,7 @@
from requests import Session as SyncSession
from composio.__version__ import __version__
+from composio.exceptions import SDKTimeoutError
from composio.utils import logging
from composio.utils.shared import generate_request_id
@@ -71,7 +72,7 @@ def request(url: str, **kwargs: t.Any) -> t.Any:
)
except ReadTimeout:
retries += 1
- raise TimeoutError("Timed out while waiting for request to complete")
+ raise SDKTimeoutError("Timed out while waiting for request to complete")
return request
diff --git a/python/composio/constants.py b/python/composio/constants.py
index b64f5359395..6a4a2c40eb8 100644
--- a/python/composio/constants.py
+++ b/python/composio/constants.py
@@ -26,13 +26,39 @@
Local cache directory name for composio CLI
"""
+LOCAL_CACHE_DIRECTORY_NAME = ".composio"
+"""
+Local cache directory name for composio CLI
+"""
+
+ENV_LOCAL_CACHE_DIRECTORY = "COMPOSIO_CACHE_DIR"
+"""
+Environment to set the composio caching directory.
+"""
+
+_cache_dir = os.environ.get(ENV_LOCAL_CACHE_DIRECTORY)
+
LOCAL_CACHE_DIRECTORY = (
- Path.home() if Path.home().exists() else Path.cwd()
-) / LOCAL_CACHE_DIRECTORY_NAME
+ Path(_cache_dir)
+ if _cache_dir is not None
+ else (Path.home() / LOCAL_CACHE_DIRECTORY_NAME)
+)
"""
Path to local caching directory.
"""
+try:
+ LOCAL_CACHE_DIRECTORY.mkdir(parents=True, exist_ok=True)
+ if not os.access(LOCAL_CACHE_DIRECTORY, os.W_OK):
+ raise OSError
+except OSError as e:
+ raise RuntimeError(
+ f"Cache directory {LOCAL_CACHE_DIRECTORY} is not writable please "
+ f"provide a path that is writable using {ENV_LOCAL_CACHE_DIRECTORY} "
+ "environment variable."
+ ) from e
+
+
LOCAL_OUTPUT_FILE_DIRECTORY_NAME = "output"
"""
Local output file directory name for composio tools
diff --git a/python/composio/exceptions.py b/python/composio/exceptions.py
index fd4e1b93253..4d4de438d56 100644
--- a/python/composio/exceptions.py
+++ b/python/composio/exceptions.py
@@ -2,6 +2,7 @@
Composio exceptions.
"""
+import difflib
import typing as t
from composio.constants import ENV_COMPOSIO_API_KEY
@@ -28,7 +29,171 @@ def __init__(
self.delegate = delegate
-class ApiKeyNotProvidedError(ComposioSDKError):
+class NotFoundError(ComposioSDKError):
+ pass
+
+
+class HTTPError(ComposioSDKError):
+ """
+ Exception class for HTTP API errors.
+ """
+
+ def __init__(
+ self,
+ message: str,
+ status_code: int,
+ *args: t.Any,
+ delegate: bool = False,
+ ) -> None:
+ """
+ Initialize HTTPError class.
+
+ :param message: Content from the API response
+ :param status_code: HTTP response status code
+ :param delegate: Whether to delegate the error message to the log
+ collection server or not
+ """
+ super().__init__(message, *args, delegate=delegate)
+ self.status_code = status_code
+
+
+class ComposioClientError(ComposioSDKError):
+ """
+ Exception class for Composio client errors.
+ """
+
+
+class ToolsetError(ComposioSDKError):
+ pass
+
+
+class ProcessorError(ToolsetError):
+ pass
+
+
+class EnumError(ComposioSDKError):
+ pass
+
+
+class ValidationError(ComposioSDKError):
+ pass
+
+
+class ToolError(ComposioSDKError):
+ pass
+
+
+class EntityIDError(ComposioSDKError):
+ pass
+
+
+class PluginError(ComposioSDKError):
+ pass
+
+
+class InvalidParams(ComposioSDKError):
+ pass
+
+
+class FileError(ComposioSDKError):
+ pass
+
+
+class SDKTimeoutError(TimeoutError, ComposioSDKError):
+ pass
+
+
+class SDKFileNotFoundError(FileNotFoundError, ComposioSDKError):
+ pass
+
+
+class LockFileError(ComposioSDKError):
+ pass
+
+
+class VersionError(ComposioSDKError):
+ pass
+
+
+class InvalidLockFile(LockFileError):
+ pass
+
+
+class InvalidVersionString(EnumError):
+ pass
+
+
+class VersionSelectionError(LockFileError, VersionError):
+
+ def __init__(
+ self,
+ action: str,
+ requested: str,
+ locked: str,
+ delegate: bool = False,
+ ) -> None:
+ self.action = action
+ self.requested = requested
+ self.locked = locked
+ super().__init__(
+ message=(
+ f"Error selecting version for action: {action!r}, "
+ f"requested: {requested!r}, locked: {locked!r}"
+ ),
+ delegate=delegate,
+ )
+
+
+class InvalidEnum(EnumError):
+ pass
+
+
+class EnumStringNotFound(EnumError):
+ """Raise when user provides invalid enum string."""
+
+ def __init__(self, value: str, enum: str, possible_values: t.List[str]) -> None:
+ error_message = f"Invalid value `{value}` for enum class `{enum}`"
+ matches = difflib.get_close_matches(value, possible_values, n=1)
+ if matches:
+ (match,) = matches
+ error_message += f". Did you mean {match!r}?"
+
+ super().__init__(message=error_message)
+
+
+class EnumMetadataNotFound(EnumError):
+ pass
+
+
+class ErrorUploadingFile(FileError):
+ pass
+
+
+class ErrorDownloadingFile(FileError):
+ pass
+
+
+class TriggerError(ToolError):
+ pass
+
+
+class ActionError(ToolError):
+ pass
+
+
+class TriggerSubscriptionError(TriggerError, ComposioClientError):
+ pass
+
+
+class InvalidTriggerFilters(TriggerSubscriptionError):
+ pass
+
+
+class ApiKeyError(ComposioClientError):
+ pass
+
+
+class ApiKeyNotProvidedError(ApiKeyError, NotFoundError):
"""Raise when API key is required but not provided."""
def __init__(self) -> None:
@@ -39,3 +204,49 @@ def __init__(self) -> None:
"or run `composio login`"
),
)
+
+
+class ResourceError(ComposioClientError):
+ pass
+
+
+class NoItemsFound(ResourceError):
+ """
+ Exception class for empty collection values.
+ """
+
+
+class ErrorFetchingResource(ResourceError):
+ pass
+
+
+class SchemaError(ToolError):
+ pass
+
+
+class InvalidSchemaError(TypeError, SchemaError):
+ pass
+
+
+class InvalidEntityIdError(ValueError, EntityIDError):
+ pass
+
+
+class IntegrationError(ResourceError):
+ pass
+
+
+class ConnectedAccountError(ResourceError):
+ pass
+
+
+class ConnectedAccountNotFoundError(NotFoundError, ConnectedAccountError):
+ pass
+
+
+class InvalidConnectedAccount(ValidationError, ConnectedAccountError):
+ pass
+
+
+class ErrorProcessingToolExecutionRequest(PluginError):
+ pass
diff --git a/python/composio/tools/base/abs.py b/python/composio/tools/base/abs.py
index 285b41e2dba..49b0ccd22d9 100644
--- a/python/composio/tools/base/abs.py
+++ b/python/composio/tools/base/abs.py
@@ -536,7 +536,7 @@ def execute(
:param params: Execution parameters.
:param metadata: A dictionary containing metadata for action.
"""
- raise NotImplementedError()
+ raise NotImplementedError
@classmethod
def register(cls: t.Type["Tool"]) -> None:
diff --git a/python/composio/tools/base/local.py b/python/composio/tools/base/local.py
index 32c555d2001..6e9e957795e 100644
--- a/python/composio/tools/base/local.py
+++ b/python/composio/tools/base/local.py
@@ -9,6 +9,7 @@
from pydantic import BaseModel, Field
+from composio.exceptions import NotFoundError
from composio.tools.base.abs import (
Action,
ActionRequest,
@@ -157,7 +158,7 @@ def execute(
"""
actcls = self._actions.get(action)
if actcls is None:
- raise ValueError(f"No action found with name `{action}`")
+ raise NotFoundError(f"No action found with name `{action}`")
try:
metadata = metadata or {}
diff --git a/python/composio/tools/env/docker/workspace.py b/python/composio/tools/env/docker/workspace.py
index c721829443e..40362c45176 100644
--- a/python/composio/tools/env/docker/workspace.py
+++ b/python/composio/tools/env/docker/workspace.py
@@ -9,6 +9,7 @@
from dataclasses import dataclass
from pathlib import Path
+from composio.constants import LOCAL_CACHE_DIRECTORY
from composio.exceptions import ComposioSDKError
from composio.tools.env.base import RemoteWorkspace, WorkspaceConfigType
from composio.tools.env.constants import (
@@ -35,13 +36,12 @@
COMPOSIO_PATH = Path(__file__).parent.parent.parent.parent.parent.resolve()
-COMPOSIO_CACHE = Path.home() / ".composio"
CONTAINER_DEV_VOLUMES = {
COMPOSIO_PATH: {
"bind": "/opt/composio-core",
"mode": "rw",
},
- COMPOSIO_CACHE: {
+ LOCAL_CACHE_DIRECTORY: {
"bind": "/root/.composio",
"mode": "rw",
},
diff --git a/python/composio/tools/local/browsertool/actions/base_action.py b/python/composio/tools/local/browsertool/actions/base_action.py
index 1f8bcf34139..13c4253772c 100644
--- a/python/composio/tools/local/browsertool/actions/base_action.py
+++ b/python/composio/tools/local/browsertool/actions/base_action.py
@@ -2,11 +2,11 @@
import string
from abc import abstractmethod
from enum import Enum
-from pathlib import Path
from typing import Dict, Optional
from pydantic import BaseModel, Field
+from composio.constants import LOCAL_CACHE_DIRECTORY
from composio.tools.base.local import ActionRequest, ActionResponse, LocalAction
from composio.tools.env.browsermanager.manager import BrowserManager
@@ -174,8 +174,7 @@ def execute(self, request: ActionRequest, metadata: dict) -> ActionResponse:
)
def _take_screenshot(self, browser_manager: BrowserManager, prefix: str) -> str:
- home_dir = Path.home()
- browser_media_dir = home_dir / ".browser_media"
+ browser_media_dir = LOCAL_CACHE_DIRECTORY / "browser_media"
browser_media_dir.mkdir(parents=True, exist_ok=True)
random_string = "".join(random.choices(string.ascii_lowercase, k=6))
output_path = browser_media_dir / f"{prefix}_screenshot_{random_string}.png"
diff --git a/python/composio/tools/local/browsertool/actions/get_screenshot.py b/python/composio/tools/local/browsertool/actions/get_screenshot.py
index f886171b151..804ec13a763 100644
--- a/python/composio/tools/local/browsertool/actions/get_screenshot.py
+++ b/python/composio/tools/local/browsertool/actions/get_screenshot.py
@@ -8,6 +8,7 @@
from pydantic import Field
+from composio.constants import LOCAL_CACHE_DIRECTORY
from composio.tools.env.browsermanager.manager import BrowserManager
from composio.tools.local.browsertool.actions.base_action import (
BaseBrowserAction,
@@ -65,8 +66,7 @@ def execute_on_browser_manager(
"""Execute the screenshot action."""
try:
if not request.output_path or request.output_path == "":
- home_dir = Path.home()
- browser_media_dir = home_dir / ".browser_media"
+ browser_media_dir = LOCAL_CACHE_DIRECTORY / "browser_media"
browser_media_dir.mkdir(parents=True, exist_ok=True)
random_string = "".join(random.choices(string.ascii_lowercase, k=6))
output_path = browser_media_dir / f"screenshot_{random_string}.png"
diff --git a/python/composio/tools/local/codeanalysis/constants.py b/python/composio/tools/local/codeanalysis/constants.py
index 56a3176239c..8f46628b73c 100644
--- a/python/composio/tools/local/codeanalysis/constants.py
+++ b/python/composio/tools/local/codeanalysis/constants.py
@@ -1,8 +1,7 @@
-import os
-from pathlib import Path
+from composio.constants import LOCAL_CACHE_DIRECTORY
-CODE_MAP_CACHE = os.path.join(Path.home(), ".composio/tmp")
+CODE_MAP_CACHE = str(LOCAL_CACHE_DIRECTORY / "tmp")
FQDN_FILE = "fqdn_cache.json"
DEEPLAKE_FOLDER = "deeplake"
EMBEDDER = "sentence-transformers/all-mpnet-base-v2"
diff --git a/python/composio/tools/local/embedtool/actions/create_vectorstore.py b/python/composio/tools/local/embedtool/actions/create_vectorstore.py
index 92cc395d05e..0752dda61ca 100644
--- a/python/composio/tools/local/embedtool/actions/create_vectorstore.py
+++ b/python/composio/tools/local/embedtool/actions/create_vectorstore.py
@@ -4,6 +4,7 @@
from pydantic import BaseModel, Field
+from composio.constants import LOCAL_CACHE_DIRECTORY
from composio.tools.base.local import LocalAction
@@ -56,7 +57,7 @@ def execute(
from chromadb.utils import embedding_functions # pylint: disable=C0415
image_collection_name = Path(request.folder_path).name + "_images"
- index_storage_path = Path.home() / ".composio" / "image_index_storage"
+ index_storage_path = LOCAL_CACHE_DIRECTORY / "image_index_storage"
index_storage_path.mkdir(parents=True, exist_ok=True)
# Initialize Chroma client
diff --git a/python/composio/tools/local/embedtool/actions/query_vectorstore.py b/python/composio/tools/local/embedtool/actions/query_vectorstore.py
index 747e17f82f6..466e3b30e6e 100644
--- a/python/composio/tools/local/embedtool/actions/query_vectorstore.py
+++ b/python/composio/tools/local/embedtool/actions/query_vectorstore.py
@@ -2,6 +2,7 @@
from pydantic import BaseModel, Field
+from composio.constants import LOCAL_CACHE_DIRECTORY
from composio.tools.base.local import LocalAction
@@ -46,7 +47,7 @@ def execute(
from chromadb.utils import embedding_functions # pylint: disable=C0415
image_collection_name = Path(request.indexed_directory).name + "_images"
- index_storage_path = Path.home() / ".composio" / "image_index_storage"
+ index_storage_path = LOCAL_CACHE_DIRECTORY / "image_index_storage"
chroma_client = chromadb.PersistentClient(path=str(index_storage_path))
chroma_collection = chroma_client.get_collection(image_collection_name)
diff --git a/python/composio/tools/toolset.py b/python/composio/tools/toolset.py
index 2b068f56606..c5b61bbcf05 100644
--- a/python/composio/tools/toolset.py
+++ b/python/composio/tools/toolset.py
@@ -46,7 +46,6 @@
TriggerSubscription,
)
from composio.client.enums import TriggerType
-from composio.client.enums.base import EnumStringNotFound
from composio.client.exceptions import ComposioClientError, HTTPError, NoItemsFound
from composio.client.files import FileDownloadable, FileUploadable
from composio.client.utils import check_cache_refresh
@@ -59,7 +58,20 @@
USER_DATA_FILE_NAME,
VERSION_LATEST,
)
-from composio.exceptions import ApiKeyNotProvidedError, ComposioSDKError
+from composio.exceptions import (
+ ApiKeyNotProvidedError,
+ ComposioSDKError,
+ ConnectedAccountNotFoundError,
+ EnumStringNotFound,
+ ErrorFetchingResource,
+ IntegrationError,
+ InvalidConnectedAccount,
+ InvalidLockFile,
+ InvalidParams,
+ ProcessorError,
+ ResourceError,
+ VersionSelectionError,
+)
from composio.storage.user import UserData
from composio.tools.base.abs import action_registry, tool_registry
from composio.tools.env.base import (
@@ -149,31 +161,6 @@ class _Retry:
RETRY = _Retry()
-class VersionError(ComposioSDKError):
- pass
-
-
-class VersionSelectionError(VersionError):
-
- def __init__(
- self,
- action: str,
- requested: str,
- locked: str,
- delegate: bool = False,
- ) -> None:
- self.action = action
- self.requested = requested
- self.locked = locked
- super().__init__(
- message=(
- f"Error selecting version for action: {action!r}, "
- f"requested: {requested!r}, locked: {locked!r}"
- ),
- delegate=delegate,
- )
-
-
class VersionLock:
"""Lock file representing action->version mapping"""
@@ -199,13 +186,13 @@ def __load(self) -> t.Dict[str, str]:
versions = yaml.safe_load(file)
if not isinstance(versions, dict):
- raise ComposioSDKError(
+ raise InvalidLockFile(
f"Invalid lockfile format, expected dict, got {type(versions)}"
)
for tool in versions.values():
if not isinstance(tool, str):
- raise ComposioSDKError(
+ raise InvalidLockFile(
f"Invalid lockfile format, expected version to be string, got {tool!r}"
)
@@ -320,7 +307,7 @@ def process_request(self, action: Action, request: t.Dict) -> t.Dict:
type_="pre",
)
if isinstance(processed, _Retry):
- raise ComposioSDKError(
+ raise ProcessorError(
"Received RETRY from App preprocessor function."
" Preprocessors cannot be retried."
)
@@ -331,7 +318,7 @@ def process_request(self, action: Action, request: t.Dict) -> t.Dict:
type_="pre",
)
if isinstance(processed, _Retry):
- raise ComposioSDKError(
+ raise ProcessorError(
"Received RETRY from Action preprocessor function."
" Preprocessors cannot be retried."
)
@@ -366,7 +353,7 @@ def process_schema_properties(self, action: Action, properties: t.Dict) -> t.Dic
type_="schema",
)
if isinstance(processed, _Retry):
- raise ComposioSDKError(
+ raise ProcessorError(
"Received RETRY from App schema processor function."
" Schema pprocessors cannot be retried."
)
@@ -377,7 +364,7 @@ def process_schema_properties(self, action: Action, properties: t.Dict) -> t.Dic
type_="schema",
)
if isinstance(processed, _Retry):
- raise ComposioSDKError(
+ raise ProcessorError(
"Received RETRY from Action preprocessor function."
" Schema processors cannot be retried."
)
@@ -905,9 +892,7 @@ def find_actions_by_tags(
:return: A list of actions matching the relevant use case.
"""
if len(tags) == 0:
- raise ComposioClientError(
- "Please provide at least one tag to perform search"
- )
+ raise InvalidParams("Please provide at least one tag to perform search")
if len(apps) > 0:
return list(
@@ -987,7 +972,7 @@ def get_auth_params(
) -> t.Optional[ConnectionParams]:
"""Get authentication parameters for given app."""
if app is None and connection_id is None:
- raise ComposioSDKError("Both `app` and `connection_id` cannot be `None`")
+ raise InvalidParams("Both `app` and `connection_id` cannot be `None`")
try:
connection_id = (
@@ -1057,7 +1042,7 @@ def get_active_triggers(
def delete_trigger(self, id: str) -> bool:
delete_status = self.client.triggers.delete(id=id).get("status", None)
if delete_status is None:
- raise ComposioSDKError(message="Delete operation failed to return status.")
+ raise ResourceError("Delete operation failed to return status.")
return delete_status == "success"
def get_integration(self, id: str) -> IntegrationModel:
@@ -1105,7 +1090,7 @@ def get_auth_scheme_for_app(
}
if auth_scheme is not None and auth_scheme not in auth_schemes:
- raise ComposioSDKError(
+ raise InvalidParams(
message=f"Auth scheme `{auth_scheme}` not found for app `{app}`"
)
@@ -1117,7 +1102,7 @@ def get_auth_scheme_for_app(
scheme = t.cast(AuthSchemeType, scheme)
return auth_schemes[scheme]
- raise ComposioSDKError(
+ raise ErrorFetchingResource(
message=(
f"Error getting expected params for {app=}, {auth_scheme=}, "
f"available_schems={list(auth_schemes)}"
@@ -1190,7 +1175,7 @@ def get_expected_params_for_user(
if integration_id is not None:
response = self._get_expected_params_from_integration_id(id=integration_id)
if auth_scheme is not None and response["auth_scheme"] != auth_scheme:
- raise ComposioSDKError(
+ raise InvalidParams(
message=(
"Auth scheme does not match provided integration ID, "
f"auth scheme associated with integration ID {response['auth_scheme']} "
@@ -1200,9 +1185,7 @@ def get_expected_params_for_user(
return response
if app is None:
- raise ComposioSDKError(
- message="Both `integration_id` and `app` cannot be None"
- )
+ raise InvalidParams("Both `integration_id` and `app` cannot be None")
try:
# Check if integration is available for an app, and if available
@@ -1233,7 +1216,7 @@ def get_expected_params_for_user(
"expected_params": integration.expectedInputFields,
}
- raise ComposioSDKError(
+ raise IntegrationError(
message=(
f"No existing integration found for `{str(app)}`, with auth "
f"scheme {auth_scheme} Please create an integration and use the"
@@ -1251,7 +1234,8 @@ def fetch_expected_integration_params(
if auth_scheme != scheme.auth_mode.upper():
continue
return [f for f in scheme.fields if not f.expected_from_customer]
- raise ComposioSDKError(
+
+ raise InvalidParams(
message=f"{app.name!r} does not support {auth_scheme!r} auth scheme"
)
@@ -1276,8 +1260,9 @@ def create_integration(
def _validate_no_auth_scheme(self, auth_scheme):
if auth_scheme == "NO_AUTH":
- raise ComposioSDKError(
- "'NO_AUTH' does not require initiating a connection. Please use the `execute_action` method directly to execute actions for this app."
+ raise InvalidParams(
+ "'NO_AUTH' does not require initiating a connection. Please use "
+ "the `execute_action` method directly to execute actions for this app."
)
def initiate_connection(
@@ -1305,13 +1290,13 @@ def initiate_connection(
"""
if auth_scheme is not None and auth_scheme not in AUTH_SCHEME_WITH_INITIATE:
self._validate_no_auth_scheme(auth_scheme)
- raise ComposioSDKError(
+ raise InvalidParams(
f"'auth_scheme' must be one of {AUTH_SCHEME_WITH_INITIATE}"
)
if integration_id is None:
if app is None:
- raise ComposioSDKError(
+ raise InvalidParams(
message="Both `integration_id` and `app` cannot be None"
)
@@ -1320,7 +1305,7 @@ def initiate_connection(
if auth_scheme is not None and auth_scheme not in AUTH_SCHEME_WITH_INITIATE:
self._validate_no_auth_scheme(auth_scheme)
- raise ComposioSDKError(
+ raise InvalidParams(
f"'auth_scheme' must be one of {AUTH_SCHEME_WITH_INITIATE}"
)
@@ -1355,7 +1340,7 @@ def initiate_connection(
if param.name not in connected_account_params
]
if unavailable_params:
- raise ComposioSDKError(
+ raise InvalidParams(
f"Expected 'connected_account_params' to provide these params: {unavailable_params}"
)
@@ -1394,7 +1379,7 @@ def _validate_auth_config(
field.name for field in required_fields if field.name not in auth_config
]
if unavailable_fields:
- raise ComposioSDKError(
+ raise InvalidParams(
f"Expected 'auth_config' to provide these fields: {unavailable_fields}"
) from None
@@ -1599,7 +1584,7 @@ def _log_service(self) -> LogIngester:
@property
def api_key(self) -> str:
if self._api_key is None:
- raise ApiKeyNotProvidedError()
+ raise ApiKeyNotProvidedError
return self._api_key
def _init_client(self) -> Composio:
@@ -1672,8 +1657,7 @@ def _validate_connection_ids(
if len(invalid) == 0:
return valid
-
- raise ComposioSDKError(message=f"Invalid connected accounts found: {invalid}")
+ raise InvalidConnectedAccount(f"Invalid connected accounts found: {invalid}")
def check_connected_account(self, action: ActionType) -> None:
"""Check if connected account is required and if required it exists or not."""
@@ -1691,11 +1675,10 @@ def check_connected_account(self, action: ActionType) -> None:
)
if action.app not in [
- # Normalize app names/ids coming from API
- connection.appUniqueId.upper()
+ connection.appUniqueId.upper() # Normalize app names/ids coming from API
for connection in self._connected_accounts
]:
- raise ComposioSDKError(
+ raise ConnectedAccountNotFoundError(
f"No connected account found for app `{action.app}`; "
f"Run `composio add {action.app.lower()}` to fix this"
)
@@ -1863,7 +1846,7 @@ def execute_action(
(action,) = self._version_lock.apply(actions=[action])
if _check_requested_actions and action.slug not in self._requested_actions:
- raise ComposioSDKError(
+ raise InvalidParams(
f"Action {action.slug} is being called, but was never requested by the toolset. "
"Make sure that the actions you are trying to execute are requested in your "
"`get_tools()` call."
@@ -1983,7 +1966,7 @@ def execute_request(
:param app: App type to use for connection lookup
:returns: Response from the proxy request
- :raises: ComposioSDKError: If neither connection_id nor app is provided
+ :raises: InvalidParams: If neither connection_id nor app is provided
"""
if app is not None and connection_id is None:
connection_id = (
@@ -1991,7 +1974,7 @@ def execute_request(
)
if connection_id is None:
- raise ComposioSDKError(
+ raise InvalidParams(
"Please provide connection id or app name to execute a request"
)
diff --git a/python/composio/utils/sentry.py b/python/composio/utils/sentry.py
index 2b13f25cf4e..ca76dcc8e8f 100644
--- a/python/composio/utils/sentry.py
+++ b/python/composio/utils/sentry.py
@@ -7,7 +7,6 @@
import types
import typing as t
from functools import cache
-from pathlib import Path
import requests
import sentry_sdk
@@ -22,6 +21,8 @@
import sentry_sdk.integrations.threading
import sentry_sdk.types
+from composio.constants import LOCAL_CACHE_DIRECTORY
+
@cache
def fetch_dsn() -> t.Optional[str]:
@@ -35,7 +36,7 @@ def fetch_dsn() -> t.Optional[str]:
def get_sentry_config() -> t.Optional[t.Dict]:
- user_file = Path.home() / ".composio" / "user_data.json"
+ user_file = LOCAL_CACHE_DIRECTORY / "user_data.json"
if not user_file.exists():
update_dsn()
@@ -110,7 +111,7 @@ def init():
@atexit.register
def update_dsn() -> None:
- user_file = Path.home() / ".composio" / "user_data.json"
+ user_file = LOCAL_CACHE_DIRECTORY / "user_data.json"
if user_file.exists():
try:
data = json.loads(user_file.read_text(encoding="utf-8"))
diff --git a/python/dockerfiles/Dockerfile b/python/dockerfiles/Dockerfile
index 460169c6ca2..d812d6dfdc9 100644
--- a/python/dockerfiles/Dockerfile
+++ b/python/dockerfiles/Dockerfile
@@ -19,7 +19,7 @@ RUN /bin/python3 -m venv .composio/venv
RUN export PATH=$PATH:$(pwd)/.composio/venv/bin
# Install composio
-RUN python -m pip install composio-core[all]==0.7.1 fastapi playwright uvicorn
+RUN python -m pip install composio-core[all]==0.7.2 fastapi playwright uvicorn
# Install playwright deps
RUN playwright install-deps
diff --git a/python/plugins/agno/setup.py b/python/plugins/agno/setup.py
index e4f0b38c220..2a5c6e07640 100644
--- a/python/plugins/agno/setup.py
+++ b/python/plugins/agno/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_agno",
- version="0.7.1",
+ version="0.7.2",
author="Devanshu",
author_email="tech@composio.dev",
description="Use Composio to get an array of tools with your Agno Plugin.",
diff --git a/python/plugins/autogen/setup.py b/python/plugins/autogen/setup.py
index fec8cad89b8..d553c2eb32a 100644
--- a/python/plugins/autogen/setup.py
+++ b/python/plugins/autogen/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_autogen",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Autogen agent.",
diff --git a/python/plugins/camel/composio_camel/toolset.py b/python/plugins/camel/composio_camel/toolset.py
index a7fb18b1473..9060f784b97 100644
--- a/python/plugins/camel/composio_camel/toolset.py
+++ b/python/plugins/camel/composio_camel/toolset.py
@@ -12,6 +12,7 @@
from composio import Action, ActionType, AppType, TagType
from composio.constants import DEFAULT_ENTITY_ID
+from composio.exceptions import InvalidEntityIdError
from composio.tools import ComposioToolSet as BaseComposioToolSet
from composio.tools.schema import OpenAISchema, SchemaType
from composio.tools.toolset import ProcessorsType
@@ -102,7 +103,7 @@ def validate_entity_id(self, entity_id: str) -> str:
and entity_id != DEFAULT_ENTITY_ID
and self.entity_id != entity_id
):
- raise ValueError(
+ raise InvalidEntityIdError(
"Separate `entity_id` can not be provided during "
"initialization and handling tool calls"
)
diff --git a/python/plugins/camel/setup.py b/python/plugins/camel/setup.py
index 4e719754fe4..95a622dbbd8 100644
--- a/python/plugins/camel/setup.py
+++ b/python/plugins/camel/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_camel",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Claude LLMs.",
diff --git a/python/plugins/claude/composio_claude/toolset.py b/python/plugins/claude/composio_claude/toolset.py
index f592caa452f..7a0fa3301bd 100644
--- a/python/plugins/claude/composio_claude/toolset.py
+++ b/python/plugins/claude/composio_claude/toolset.py
@@ -3,7 +3,10 @@
import typing_extensions as te
-from composio.exceptions import ComposioSDKError
+from composio.exceptions import (
+ ErrorProcessingToolExecutionRequest,
+ InvalidEntityIdError,
+)
from composio.utils import help_msg
@@ -82,7 +85,7 @@ def validate_entity_id(self, entity_id: str) -> str:
and entity_id != DEFAULT_ENTITY_ID
and self.entity_id != entity_id
):
- raise ValueError(
+ raise InvalidEntityIdError(
"separate `entity_id` can not be provided during "
"initialization and handelling tool calls"
)
@@ -181,7 +184,7 @@ def handle_tool_calls(
# Since llm_response can also be a dictionary, we should only proceed
# towards action execution if we have the correct type of llm_response
if not isinstance(llm_response, (dict, ToolsBetaMessage)):
- raise ComposioSDKError(
+ raise ErrorProcessingToolExecutionRequest(
"llm_response should be of type `Message` or castable to type `Message`, "
f"received object {llm_response} of type {type(llm_response)}"
)
diff --git a/python/plugins/claude/setup.py b/python/plugins/claude/setup.py
index 8a6b21e7aef..05ee84e005e 100644
--- a/python/plugins/claude/setup.py
+++ b/python/plugins/claude/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_claude",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Claude LLMs.",
diff --git a/python/plugins/crew_ai/setup.py b/python/plugins/crew_ai/setup.py
index d87e0b0720d..c104c3883b0 100644
--- a/python/plugins/crew_ai/setup.py
+++ b/python/plugins/crew_ai/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_crewai",
- version="0.7.1",
+ version="0.7.2",
author="Himanshu",
author_email="himanshu@composio.dev",
description="Use Composio to get an array of tools with your CrewAI agent.",
diff --git a/python/plugins/gemini/composio_gemini/toolset.py b/python/plugins/gemini/composio_gemini/toolset.py
index 69e66716bb1..9f229b9d569 100644
--- a/python/plugins/gemini/composio_gemini/toolset.py
+++ b/python/plugins/gemini/composio_gemini/toolset.py
@@ -78,7 +78,9 @@ def _execute(**kwargs: t.Any) -> t.Dict:
closure=_execute.__closure__,
)
parameters = get_signature_format_from_schema_params(
- schema_params=schema.parameters.model_dump(),
+ schema_params=schema.parameters.model_dump(
+ exclude_none=True,
+ ),
)
setattr(function, "__signature__", Signature(parameters=parameters))
setattr(
@@ -95,6 +97,7 @@ def get_tools(
apps: t.Optional[t.Sequence[AppType]] = None,
tags: t.Optional[t.List[TagType]] = None,
entity_id: t.Optional[str] = None,
+ check_connected_accounts: bool = True,
) -> t.List[t.Callable]:
"""
Get composio tools wrapped as Google Genai SDK compatible function calling object.
@@ -114,5 +117,6 @@ def get_tools(
apps=apps,
tags=tags,
_populate_requested=True,
+ check_connected_accounts=check_connected_accounts,
)
]
diff --git a/python/plugins/gemini/setup.py b/python/plugins/gemini/setup.py
index f9924a5efd8..8105107d535 100644
--- a/python/plugins/gemini/setup.py
+++ b/python/plugins/gemini/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_gemini",
- version="0.7.1",
+ version="0.7.2",
author="Composio",
author_email="tech@composio.dev",
description="Use Composio to get an array of tools with your Gemini agent.",
diff --git a/python/plugins/google/composio_google/toolset.py b/python/plugins/google/composio_google/toolset.py
index f7ca73529e5..88bb6c118a1 100644
--- a/python/plugins/google/composio_google/toolset.py
+++ b/python/plugins/google/composio_google/toolset.py
@@ -17,6 +17,7 @@
from composio import Action, ActionType, AppType, TagType
from composio.constants import DEFAULT_ENTITY_ID
+from composio.exceptions import InvalidEntityIdError
from composio.tools import ComposioToolSet as BaseComposioToolSet
from composio.utils import help_msg
from composio.utils.shared import json_schema_to_model
@@ -75,7 +76,7 @@ def validate_entity_id(self, entity_id: str) -> str:
and entity_id != DEFAULT_ENTITY_ID
and self.entity_id != entity_id
):
- raise ValueError(
+ raise InvalidEntityIdError(
"separate `entity_id` can not be provided during "
"initialization and handling tool calls"
)
diff --git a/python/plugins/google/setup.py b/python/plugins/google/setup.py
index 81a19ddede0..d98c259b1af 100644
--- a/python/plugins/google/setup.py
+++ b/python/plugins/google/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_google",
- version="0.7.1",
+ version="0.7.2",
author="Assistant",
author_email="karan@composio.dev",
description="Use Composio to get an array of tools with your Google AI Python Gemini model.",
diff --git a/python/plugins/griptape/composio_griptape/toolset.py b/python/plugins/griptape/composio_griptape/toolset.py
index 4adbf9bbd1d..576e6bcf7a4 100644
--- a/python/plugins/griptape/composio_griptape/toolset.py
+++ b/python/plugins/griptape/composio_griptape/toolset.py
@@ -8,6 +8,7 @@
from schema import Literal, Schema
from composio import Action, ActionType, AppType, TagType
+from composio.exceptions import InvalidSchemaError
from composio.tools import ComposioToolSet as BaseComposioToolSet
from composio.tools.toolset import ProcessorsType
from composio.utils import help_msg
@@ -74,8 +75,9 @@ def _wrap_tool(
)
schema_dtype = list[schema_array_dtype] if schema_array_dtype else list # type: ignore
else:
- raise TypeError(
- f"Some dtype of current schema are not handled yet. Current Schema: {param_body}"
+ raise InvalidSchemaError(
+ "Some dtype of current schema are not handled yet. "
+ f"Current Schema: {param_body}"
)
schema_dict[schema_key] = schema_dtype
diff --git a/python/plugins/griptape/setup.py b/python/plugins/griptape/setup.py
index df3bf33e791..e1aa5f7f924 100644
--- a/python/plugins/griptape/setup.py
+++ b/python/plugins/griptape/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_griptape",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Griptape workflow.",
diff --git a/python/plugins/julep/setup.py b/python/plugins/julep/setup.py
index cbb4ccd29ff..7acd790071c 100644
--- a/python/plugins/julep/setup.py
+++ b/python/plugins/julep/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_julep",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Julep workflow.",
diff --git a/python/plugins/langchain/setup.py b/python/plugins/langchain/setup.py
index 9c9698c1edf..a4b2cc09270 100644
--- a/python/plugins/langchain/setup.py
+++ b/python/plugins/langchain/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_langchain",
- version="0.7.1",
+ version="0.7.2",
author="Karan",
author_email="karan@composio.dev",
description="Use Composio to get an array of tools with your LangChain agent.",
diff --git a/python/plugins/langgraph/setup.py b/python/plugins/langgraph/setup.py
index 17e8a774cea..034fb9b1b4c 100644
--- a/python/plugins/langgraph/setup.py
+++ b/python/plugins/langgraph/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_langgraph",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get array of tools with LangGraph Agent Workflows",
diff --git a/python/plugins/llamaindex/setup.py b/python/plugins/llamaindex/setup.py
index 319b4b63ccf..d3b2a506e47 100644
--- a/python/plugins/llamaindex/setup.py
+++ b/python/plugins/llamaindex/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_llamaindex",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your LlamaIndex agent.",
diff --git a/python/plugins/lyzr/setup.py b/python/plugins/lyzr/setup.py
index 3a5dbfd4ecf..38b6c8b88e2 100644
--- a/python/plugins/lyzr/setup.py
+++ b/python/plugins/lyzr/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_lyzr",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Lyzr workflow.",
diff --git a/python/plugins/openai/composio_openai/toolset.py b/python/plugins/openai/composio_openai/toolset.py
index 2432b55a094..cb4a5d449e9 100644
--- a/python/plugins/openai/composio_openai/toolset.py
+++ b/python/plugins/openai/composio_openai/toolset.py
@@ -19,6 +19,7 @@
from composio import ActionType, AppType, TagType
from composio.constants import DEFAULT_ENTITY_ID
+from composio.exceptions import InvalidEntityIdError
from composio.tools import ComposioToolSet as BaseComposioToolSet
from composio.tools.schema import OpenAISchema, SchemaType
from composio.tools.toolset import ProcessorsType
@@ -80,7 +81,7 @@ def validate_entity_id(self, entity_id: str) -> str:
and entity_id != DEFAULT_ENTITY_ID
and self.entity_id != entity_id
):
- raise ValueError(
+ raise InvalidEntityIdError(
"separate `entity_id` can not be provided during "
"initialization and handelling tool calls"
)
diff --git a/python/plugins/openai/setup.py b/python/plugins/openai/setup.py
index 867afac937c..c852a7ea8f9 100644
--- a/python/plugins/openai/setup.py
+++ b/python/plugins/openai/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_openai",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your OpenAI Function Call.",
diff --git a/python/plugins/phidata/setup.py b/python/plugins/phidata/setup.py
index 0a6cb980358..e61213f2fc3 100644
--- a/python/plugins/phidata/setup.py
+++ b/python/plugins/phidata/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_phidata",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio to get an array of tools with your Phidata Plugin.",
diff --git a/python/plugins/praisonai/composio_praisonai/toolset.py b/python/plugins/praisonai/composio_praisonai/toolset.py
index 6a9433b53aa..30733718417 100644
--- a/python/plugins/praisonai/composio_praisonai/toolset.py
+++ b/python/plugins/praisonai/composio_praisonai/toolset.py
@@ -7,6 +7,7 @@
from composio import Action, ActionType, AppType
from composio import ComposioToolSet as BaseComposioToolSet
from composio import TagType
+from composio.exceptions import InvalidSchemaError
from composio.tools.toolset import ProcessorsType
from composio.utils import help_msg
@@ -64,8 +65,9 @@ def _process_input_schema(
f"list[{schema_array_dtype}]" if schema_array_dtype else "list"
)
else:
- raise TypeError(
- f"Some dtype of current schema are not handled yet. Current Schema: {param_body}"
+ raise InvalidSchemaError(
+ "Some dtype of current schema are not handled yet. "
+ f"Current Schema: {param_body}"
)
input_model_lines.append(
diff --git a/python/plugins/praisonai/setup.py b/python/plugins/praisonai/setup.py
index 77366cb1a0a..d60b8f12d6c 100644
--- a/python/plugins/praisonai/setup.py
+++ b/python/plugins/praisonai/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_praisonai",
- version="0.7.1",
+ version="0.7.2",
author="Sawradip",
author_email="sawradip@composio.dev",
description="Use Composio Tools to enhance your PraisonAI agents capabilities.",
diff --git a/python/plugins/pydanticai/setup.py b/python/plugins/pydanticai/setup.py
index 05c5b6804db..c867286d4e8 100644
--- a/python/plugins/pydanticai/setup.py
+++ b/python/plugins/pydanticai/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_pydanticai",
- version="0.7.1",
+ version="0.7.2",
author="Siddharth",
author_email="tech@composio.dev",
description="Use Composio to get array of strongly typed tools for Pydantic AI",
diff --git a/python/plugins/smolagent/setup.py b/python/plugins/smolagent/setup.py
index c2a9a05b6fa..16022342541 100644
--- a/python/plugins/smolagent/setup.py
+++ b/python/plugins/smolagent/setup.py
@@ -9,7 +9,7 @@
setup(
name="composio_smol",
- version="0.7.1",
+ version="0.7.2",
author="Composio",
author_email="tech@composio.dev",
description="Use Composio to get array of strongly typed tools for Smol Agents",
diff --git a/python/plugins/smolagent/smol_demo.py b/python/plugins/smolagent/smol_demo.py
index f469167b5bf..e9a4b369314 100644
--- a/python/plugins/smolagent/smol_demo.py
+++ b/python/plugins/smolagent/smol_demo.py
@@ -11,6 +11,6 @@
actions=[Action.GITHUB_STAR_A_REPOSITORY_FOR_THE_AUTHENTICATED_USER],
)
# Create agent with Composio tools
-agent = CodeAgent(tools=tools, model=HfApiModel())# type: ignore[import-untyped]
+agent = CodeAgent(tools=tools, model=HfApiModel()) # type: ignore[import-untyped]
agent.run("Star the composiohq/composio repo")
diff --git a/python/setup.py b/python/setup.py
index d14cad0d7af..0eedf35642d 100644
--- a/python/setup.py
+++ b/python/setup.py
@@ -89,7 +89,7 @@ def scan_for_package_data(
setup(
name="composio_core",
- version="0.7.1",
+ version="0.7.2",
author="Utkarsh",
author_email="utkarsh@composio.dev",
description="Core package to act as a bridge between composio platform and other services.",
diff --git a/python/swe/agent/benchmark.py b/python/swe/agent/benchmark.py
index 285ba8736da..cd35a985486 100644
--- a/python/swe/agent/benchmark.py
+++ b/python/swe/agent/benchmark.py
@@ -7,7 +7,7 @@
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import List
-from langchain_aws import BedrockChat
+from langchain_aws import ChatBedrock
from langchain_core.messages import HumanMessage
from langchain_openai import ChatOpenAI
from langgraph.errors import GraphRecursionError
@@ -41,7 +41,7 @@ def retry_with_exponential_backoff(func, *args, **kwargs):
def get_llm_response(system_prompt: str, human_prompt: str) -> str:
try:
if MODEL == "claude":
- client = BedrockChat(
+ client = ChatBedrock(
credentials_profile_name="default",
model_id="anthropic.claude-3-5-sonnet-20240620-v1:0",
region_name="us-west-2",
diff --git a/python/swe/dockerfiles/build.py b/python/swe/dockerfiles/build.py
index 9cdc225d4a7..cb4b1183b80 100644
--- a/python/swe/dockerfiles/build.py
+++ b/python/swe/dockerfiles/build.py
@@ -1,4 +1,6 @@
+import signal
import subprocess
+import threading
import typing as t
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
@@ -9,7 +11,89 @@
logs = Path.cwd() / "logs"
logs.mkdir(exist_ok=True)
-errors = []
+# Lock for synchronizing access to the success log
+success_lock = threading.Lock()
+# Global set to keep track of successfully pushed images
+successful_builds = set()
+
+
+# Load successful builds from logs/success.log into the global set
+def load_successful_builds() -> None:
+ success_file = logs / "success.log"
+ if success_file.exists():
+ with open(success_file, "r", encoding="utf-8") as f:
+ for line in f:
+ tag = line.strip()
+ if tag:
+ successful_builds.add(tag)
+
+
+# Record a successful build by writing to the log file and updating the global set
+def record_success(tag: str) -> None:
+ with success_lock:
+ successful_builds.add(tag)
+ with open(logs / "success.log", "a", encoding="utf-8") as f:
+ f.write(tag + "\n")
+
+
+# Insert new global variables and functions for graceful stop and resume support
+stop_requested = False
+
+
+def handle_stop(signum, frame):
+ global stop_requested
+ print("Received stop signal. Gracefully stopping new builds...")
+ stop_requested = True
+
+
+def _base(generated: Path, multi: bool = False) -> None:
+ base = generated / "base"
+ with ThreadPoolExecutor() as executor:
+ futures = []
+ for file in base.iterdir():
+ if stop_requested:
+ print("Graceful stop activated. Halting base builds.")
+ break
+ try:
+ _, tag_part = file.name.split(".", maxsplit=1)
+ except ValueError:
+ print(f"Skipping invalid file name format: {file.name}")
+ continue
+ full_tag = f"composio/swe:{tag_part}"
+ if full_tag in successful_builds:
+ print(f"Skipping build for {full_tag} as it has been already pushed.")
+ continue
+ futures.append(executor.submit(_build, file, tag_part, multi))
+ [fut.result() for fut in futures]
+
+
+def _swes(generated: Path, multi: bool = False) -> None:
+ with ThreadPoolExecutor() as executor:
+ futures = []
+ for child in generated.iterdir():
+ if child.name == "base":
+ continue
+ if child.is_file():
+ continue
+ repo = child.name.replace("__", "-")
+ for version in child.iterdir():
+ if stop_requested:
+ print("Graceful stop activated. Halting SWES builds.")
+ break
+ tag_part = f"{repo}-{version.name.replace('.', '-') }"
+ full_tag = f"composio/swe:{tag_part}"
+ if full_tag in successful_builds:
+ print(
+ f"Skipping build for {full_tag} as it has been already pushed."
+ )
+ continue
+ futures.append(
+ executor.submit(_build, version / "Dockerfile", tag_part, multi)
+ )
+ if stop_requested:
+ break
+ [fut.result() for fut in futures]
+
ARCHS = ("linux/arm64", "linux/amd64")
@@ -50,81 +134,44 @@ def _build(file: Path, tag: str, multi: bool, *flags: str) -> None:
if process.returncode == 0:
print(f"Finished build for {tag}")
+ record_success(tag)
else:
print(f"Error building {tag} - {logs / log}")
- errors.append(f"Error building {tag} - {logs / log}")
-
-
-def _base(generated: Path, multi: bool = False) -> None:
- base = generated / "base"
- with ThreadPoolExecutor() as executor:
- futures = []
- for file in base.iterdir():
- _, tag = file.name.split(".", maxsplit=1)
- futures.append(executor.submit(_build, file, tag, multi))
- _ = [fut.result() for fut in futures]
-
-
-def _swes(generated: Path, multi: bool = False) -> None:
- with ThreadPoolExecutor() as executor:
- futures = []
- for child in generated.iterdir():
- if child.name == "base":
- continue
-
- if child.is_file():
- continue
-
- repo = child.name.replace("__", "-")
- for version in child.iterdir():
- tag = f"{repo}-{version.name.replace('.', '-')}"
- futures.append(
- executor.submit(
- _build,
- version / "Dockerfile",
- tag,
- multi,
- )
- )
-
- _ = [fut.result() for fut in futures]
def _pyenv(file: t.Optional[Path] = None, multi: bool = False) -> None:
print("Print building pyenv base")
file = file or Path(__file__).parent / "templates" / "Dockerfile.pyenv"
+ full_tag = "composio/swe:pyenv"
+ if full_tag in successful_builds:
+ print(f"Skipping build for {full_tag} as it has already been pushed.")
+ return
_build(file=file, tag="pyenv", multi=multi)
@click.command(name="build")
-@click.argument(
- "generated",
- type=str,
- default="./generated",
-)
+@click.argument("generated", type=str, default="./generated")
@click.option(
- "--multi",
- is_flag=True,
- help="Use this flag to build multi-plaform images",
+ "--multi", is_flag=True, help="Use this flag to build multi-plaform images"
)
def build(generated: Path, multi: bool = False) -> None:
"""Build docker images for SWEKIT."""
+ load_successful_builds()
+ signal.signal(signal.SIGINT, handle_stop)
+ signal.signal(signal.SIGTERM, handle_stop)
+
_pyenv(multi=multi)
- if len(errors) > 0:
- print("==== Errors ====")
- print("\n".join(errors))
- return
+ print("==== Successful Builds (after pyenv) ====")
+ print("\n".join(successful_builds))
generated = Path(generated or Path.cwd() / "generated").resolve()
_base(generated=generated, multi=multi)
- if len(errors) > 0:
- print("==== Errors ====")
- print("\n".join(errors))
- return
+ print("==== Successful Builds (after base) ====")
+ print("\n".join(successful_builds))
_swes(generated=generated, multi=multi)
- print("==== Errors ====")
- print("\n".join(errors))
+ print("==== Final Successful Builds ====")
+ print("\n".join(successful_builds))
if __name__ == "__main__":
diff --git a/python/swe/dockerfiles/create_index.py b/python/swe/dockerfiles/create_index.py
index c283478b7dd..d3b58914c90 100644
--- a/python/swe/dockerfiles/create_index.py
+++ b/python/swe/dockerfiles/create_index.py
@@ -5,13 +5,14 @@
from pathlib import Path
import click
-from swebench import get_eval_refs
+from swebench.harness.constants import SWEbenchInstance
+from swebench.harness.utils import load_swebench_dataset
from composio import Action, ComposioToolSet
from composio.utils.logging import WithLogger
-def group_task_instances(task_instances):
+def group_task_instances(task_instances: list[SWEbenchInstance]):
groups = {}
for instance in task_instances:
repo = instance["repo"]
@@ -43,11 +44,25 @@ def __init__(
self.outdir.mkdir()
def generate(self):
- task_instances = get_eval_refs(data_path_or_name=self.dataset)
- task_instance_groups = group_task_instances(task_instances.values())
+ task_instances = load_swebench_dataset(name=self.dataset)
+ task_instance_groups = group_task_instances(task_instances)
for repo, versions in task_instance_groups.items():
self.logger.info(f"Repo {repo} with {set(versions.keys())} versions")
for version, instances in versions.items():
+ outname = _repo_name(repo)
+ docker_outdir = Path("generated") / outname / version
+
+ # Check if files in generated directory are complete
+ if (
+ docker_outdir.exists()
+ and (docker_outdir / "deeplake").exists()
+ and (docker_outdir / "fqdn_cache.json").exists()
+ ):
+ self.logger.info(
+ f"Skipping {repo} {version} - files already exist in generated directory"
+ )
+ continue
+
self.logger.info(f"\tGenerating for version - {version}")
self.create_index(
repository=repo, version=version, setup_ref_instance=instances[0]
@@ -58,8 +73,8 @@ def create_index(
):
outname = _repo_name(repository)
outdir = self.outdir / outname / version
- if outdir.exists():
- return
+ docker_outdir = Path("generated") / outname / version
+
repo_url = f"https://github.com/{repository}.git"
base_commit = setup_ref_instance["base_commit"]
if not (outdir / outname).exists():
@@ -76,10 +91,16 @@ def create_index(
["git", "checkout", base_commit], cwd=outdir / outname, check=True
)
- composio_toolset = ComposioToolSet()
+ composio_toolset = ComposioToolSet(
+ metadata={
+ Action.CODE_ANALYSIS_TOOL_CREATE_CODE_MAP: {
+ "dir_to_index_path": str(outdir / outname),
+ },
+ },
+ )
composio_toolset.execute_action(
action=Action.CODE_ANALYSIS_TOOL_CREATE_CODE_MAP,
- params={"dir_to_index_path": str(outdir / outname)},
+ params={},
)
with open(f"{Path.home()}/.composio/tmp/{outname}/fqdn_cache.json") as f:
fqdn_index = json.load(f)
@@ -91,8 +112,6 @@ def create_index(
)
fqdn_index[k] = v
- docker_outdir = Path("generated") / outname / version
- # docker_outdir.mkdir(exist_ok=True, parents=True)
with open(
docker_outdir / "fqdn_cache.json",
"w",
@@ -100,7 +119,6 @@ def create_index(
json.dump(fqdn_index, f, indent=4)
DEEPLAKE_PATH = docker_outdir / "deeplake"
- # DEEPLAKE_PATH.mkdir(exist_ok=True, parents=True)
if not DEEPLAKE_PATH.exists():
shutil.copytree(
f"{Path.home()}/.composio/tmp/{outname}/deeplake",
diff --git a/python/swe/setup.py b/python/swe/setup.py
index aa3c7dc6225..d007a81d2bf 100644
--- a/python/swe/setup.py
+++ b/python/swe/setup.py
@@ -35,7 +35,7 @@ def scan_for_package_data(
setup(
name="swekit",
- version="0.4.1",
+ version="0.4.2",
author="Shubhra",
author_email="shubhra@composio.dev",
description="Tools for running a SWE agent using Composio platform",
diff --git a/python/tox.ini b/python/tox.ini
index 63e47c7905e..576da38d263 100644
--- a/python/tox.ini
+++ b/python/tox.ini
@@ -7,7 +7,7 @@ basepython = python
deps =
isort==5.12.0
commands =
- isort composio/ scripts/ tests/ swe/ --profile black
+ isort composio/ scripts/ tests/ swe/ --profile black --skip swe/dockerfiles/generated --skip swe/dockerfiles/indexed
isort plugins/ --profile black
[testenv:isort-check]
@@ -15,7 +15,7 @@ basepython = python3
deps =
isort==5.12.0
commands =
- isort composio/ scripts/ tests/ swe/ --check --profile black
+ isort composio/ scripts/ tests/ swe/ --check --profile black --skip swe/dockerfiles/generated --skip swe/dockerfiles/indexed
isort plugins/ --check --profile black
[testenv:black]
@@ -23,7 +23,7 @@ basepython = python
deps =
black==24.10.0
commands =
- black composio/ scripts/ tests/ swe/
+ black composio/ scripts/ tests/ swe/ --exclude "swe/dockerfiles/(generated|indexed)"
black plugins/
[testenv:black-check]
@@ -31,7 +31,7 @@ basepython = python3
deps =
black==24.10.0
commands =
- black composio/ scripts/ tests/ swe/ --check
+ black composio/ scripts/ tests/ swe/ --check --exclude "swe/dockerfiles/(generated|indexed)"
black plugins/ --check
[testenv:black-diff]
@@ -39,7 +39,7 @@ basepython = python3
deps =
black==24.10.0
commands =
- black composio/ scripts/ tests/ swe/ --check --diff
+ black composio/ scripts/ tests/ swe/ --check --diff --exclude "swe/dockerfiles/(generated|indexed)"
black plugins/ --check --diff
[testenv:mypy]
@@ -171,7 +171,7 @@ sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,PLUGINS,PACKAGES,LOCALFOLDER
[flake8]
max_line_length = 200
-exclude= **/build, **/dist
+exclude = **/build, **/dist, swe/dockerfiles/generated/*, swe/dockerfiles/indexed/*
per-file-ignores =
__init__.py:F401,W503
tests/**:E501
@@ -179,7 +179,7 @@ ignore = E231, W291, W503, E704
[mypy]
strict_optional = True
-exclude=plugins/.*/setup\.py|plugins/.*/build/lib/|swe/build/lib/
+exclude = plugins/.*/setup\.py|plugins/.*/build/lib/|swe/build/lib/|swe/dockerfiles/generated/.*|swe/dockerfiles/indexed/.*
ignore_missing_imports = True
[mypy-requests.*]