Skip to content

Commit

Permalink
feat(vscode): feed docs into copilot integration (RAG) (#2404)
Browse files Browse the repository at this point in the history
  • Loading branch information
MaxKless authored Feb 20, 2025
1 parent f01befb commit a307f07
Show file tree
Hide file tree
Showing 8 changed files with 209 additions and 81 deletions.
114 changes: 114 additions & 0 deletions libs/vscode/copilot/src/lib/context.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
import { readNxJson } from '@nx-console/shared-npm';
import { GeneratorCollectionInfo } from '@nx-console/shared-schema';
import { withTimeout } from '@nx-console/shared-utils';
import { getGenerators, getNxWorkspace } from '@nx-console/vscode-nx-workspace';
import { getOutputChannel } from '@nx-console/vscode-output-channels';
import type { NxJsonConfiguration, ProjectGraph } from 'nx/src/devkit-exports';
import { xhr } from 'request-light';
import {
ChatRequestTurn,
ChatResponseStream,
ChatResponseTurn,
MarkdownString,
} from 'vscode';
import { chatResponseToString } from './prompts/history';

export async function getProjectGraph(
stream: ChatResponseStream,
): Promise<ProjectGraph | undefined> {
let projectGraph: ProjectGraph | undefined;
try {
await withTimeout<void>(async () => {
const workspace = await getNxWorkspace();
projectGraph = workspace?.projectGraph;
}, 10000);
} catch (e) {
projectGraph = undefined;
}
if (
projectGraph === undefined ||
Object.keys(projectGraph.nodes).length === 0
) {
const md = new MarkdownString();
md.supportThemeIcons = true;
md.appendMarkdown(
'$(warning) Unable to retrieve workspace information. Proceeding without workspace data. ',
);
stream.markdown(md);
}
return projectGraph;
}

export async function getGeneratorNamesAndDescriptions(): Promise<
{
name: string;
description: string;
}[]
> {
let generators: GeneratorCollectionInfo[];
try {
await withTimeout<void>(async () => {
generators = await getGenerators();
}, 3000);
} catch (e) {
generators = [];
}

return generators.map((generator) => ({
name: generator.name,
description: generator.data.description,
}));
}

export async function tryReadNxJson(
workspacePath: string,
): Promise<NxJsonConfiguration | undefined> {
try {
return await readNxJson(workspacePath);
} catch (e) {
return undefined;
}
}

export type DocsPageSection = {
heading: string;
longer_heading: string;
content: string;
similarity: number;
};

export async function getDocsContext(
prompt: string,
history: ReadonlyArray<ChatRequestTurn | ChatResponseTurn>,
): Promise<DocsPageSection[]> {
try {
const messages = history.map((chatItem) => ({
role: chatItem instanceof ChatRequestTurn ? 'user' : 'assistant',
content:
chatItem instanceof ChatRequestTurn
? chatItem.prompt
: chatResponseToString(chatItem),
}));
messages.push({
role: 'user',
content: prompt,
});

const req = await xhr({
url: 'https://nx.dev/api/query-ai-embeddings',
type: 'POST',
headers: { 'Content-Type': 'application/json' },
data: JSON.stringify({
messages,
}),
});

const response = JSON.parse(req.responseText);
return response.context.pageSections;
} catch (error) {
getOutputChannel().appendLine(
`Error fetching AI context: ${JSON.stringify(error)}`,
);
return [];
}
}
98 changes: 25 additions & 73 deletions libs/vscode/copilot/src/lib/init-copilot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,58 +33,65 @@ import { GeneratePrompt } from './prompts/generate-prompt';
import { NxCopilotPrompt, NxCopilotPromptProps } from './prompts/prompt';
import { GeneratorDetailsTool } from './tools/generator-details-tool';
import yargs = require('yargs');
import { get } from 'http';
import {
getDocsContext,
getGeneratorNamesAndDescriptions,
getProjectGraph,
tryReadNxJson,
} from './context';

export function initCopilot(context: ExtensionContext) {
const telemetry = getTelemetry();
const nxParticipant = chat.createChatParticipant('nx-console.nx', handler);
nxParticipant.iconPath = Uri.joinPath(
context.extensionUri,
'assets',
'nx.png'
'nx.png',
);
nxParticipant.onDidReceiveFeedback((feedback) => {
telemetry.logUsage(
feedback.kind === ChatResultFeedbackKind.Helpful
? 'ai.feedback-good'
: 'ai.feedback-bad'
: 'ai.feedback-bad',
);
});

context.subscriptions.push(
nxParticipant,
lm.registerTool('nx_generator-details', new GeneratorDetailsTool())
lm.registerTool('nx_generator-details', new GeneratorDetailsTool()),
);

context.subscriptions.push(
commands.registerCommand(
'nxConsole.adjustGeneratorInUI',
adjustGeneratorInUI
adjustGeneratorInUI,
),
commands.registerCommand(
'nxConsole.executeResponseCommand',
executeResponseCommand
)
executeResponseCommand,
),
);
}

const handler: ChatRequestHandler = async (
request: ChatRequest,
context: ChatContext,
stream: ChatResponseStream,
token: CancellationToken
token: CancellationToken,
) => {
const telemetry = getTelemetry();
telemetry.logUsage('ai.chat-message');
const intent = await determineIntent(request);

const workspacePath = getNxWorkspacePath();

stream.progress('Retrieving workspace information...');

const projectGraph = await getProjectGraph(stream);

const pmExec = (await getPackageManagerCommand(workspacePath)).exec;
stream.progress('Retrieving relevant documentation...');
const docsPages = await getDocsContext(request.prompt, context.history);

const pmExec = (await getPackageManagerCommand(workspacePath)).exec;
const nxJson = await tryReadNxJson(workspacePath);

const generatorNamesAndDescriptions =
Expand All @@ -96,6 +103,7 @@ const handler: ChatRequestHandler = async (
packageManagerExecCommand: pmExec,
projectGraph,
nxJson,
docsPages,
};

let promptElementAndProps: PromptElementAndProps<
Expand Down Expand Up @@ -135,7 +143,7 @@ const handler: ChatRequestHandler = async (
},
tools,
},
token
token,
);

const startMarker = new RegExp(`"""\\s*${pmExec}\\s+nx\\s*`);
Expand Down Expand Up @@ -200,7 +208,7 @@ const handler: ChatRequestHandler = async (
async function renderCommandSnippet(
snippet: string,
stream: ChatResponseStream,
pmExec: string
pmExec: string,
) {
snippet = snippet.replace(/\s+/g, ' ');
const parsedArgs = await yargs.parse(snippet);
Expand Down Expand Up @@ -233,55 +241,8 @@ async function renderCommandSnippet(
}
}

async function getProjectGraph(
stream: ChatResponseStream
): Promise<ProjectGraph | undefined> {
let projectGraph: ProjectGraph | undefined;
try {
await withTimeout<void>(async () => {
const workspace = await getNxWorkspace();
projectGraph = workspace?.projectGraph;
}, 10000);
} catch (e) {
projectGraph = undefined;
}
if (
projectGraph === undefined ||
Object.keys(projectGraph.nodes).length === 0
) {
const md = new MarkdownString();
md.supportThemeIcons = true;
md.appendMarkdown(
'$(warning) Unable to retrieve workspace information. Proceeding without workspace data. '
);
stream.markdown(md);
}
return projectGraph;
}

async function getGeneratorNamesAndDescriptions(): Promise<
{
name: string;
description: string;
}[]
> {
let generators: GeneratorCollectionInfo[];
try {
await withTimeout<void>(async () => {
generators = await getGenerators();
}, 3000);
} catch (e) {
generators = [];
}

return generators.map((generator) => ({
name: generator.name,
description: generator.data.description,
}));
}

async function adjustGeneratorInUI(
parsedArgs: Awaited<ReturnType<typeof yargs.parse>>
parsedArgs: Awaited<ReturnType<typeof yargs.parse>>,
) {
getTelemetry().logUsage('ai.response-interaction', {
kind: 'adjust-generator',
Expand All @@ -291,7 +252,7 @@ async function adjustGeneratorInUI(

function executeResponseCommand(
snippet: string,
parsedArgs: Awaited<ReturnType<typeof yargs.parse>>
parsedArgs: Awaited<ReturnType<typeof yargs.parse>>,
) {
const isGenerator = parsedArgs._.includes('generate');
getTelemetry().logUsage('ai.response-interaction', {
Expand All @@ -300,21 +261,12 @@ function executeResponseCommand(
commands.executeCommand(
EXECUTE_ARBITRARY_COMMAND,
snippet,
parsedArgs['cwd']
parsedArgs['cwd'],
);
}

export async function tryReadNxJson(
workspacePath: string
): Promise<NxJsonConfiguration | undefined> {
try {
return await readNxJson(workspacePath);
} catch (e) {
return undefined;
}
}
async function determineIntent(
request: ChatRequest
request: ChatRequest,
): Promise<'generate' | 'other'> {
const messages = [
new LanguageModelChatMessage(
Expand All @@ -340,7 +292,7 @@ async function determineIntent(
If the user query is not clear, classify it as <other>. If you are unsure, classify it as <generate>.
Here is the user query: "${request.prompt}"
`
`,
),
];
let buffer = '';
Expand Down
42 changes: 42 additions & 0 deletions libs/vscode/copilot/src/lib/prompts/docs-pages-prompt.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import {
PromptElement,
UserMessage,
BasePromptElementProps,
} from '@vscode/prompt-tsx';
import { DocsPageSection } from '../context';

interface DocsPagesPromptProps extends BasePromptElementProps {
docsPages: DocsPageSection[];
passPriority: true;
}

export class DocsPagesPrompt extends PromptElement<DocsPagesPromptProps> {
override render() {
const pages = this.props.docsPages.slice(0, 4);
return (
<>
{pages.length > 0 ? (
<UserMessage priority={30}>
Below are some documentation sections that could be relevant to the
user request. Read through them carefully. You don't have to use
them to answer the user query, but they might help. Do not assume
knowledge about nx, its configuration and options. Instead, base
your replies on the provided metadata and these documentation
sections. <br />
<br />
{pages
.map(
(page, index) =>
`- ${page.longer_heading ?? page.heading ?? index} <br/> ${
page.content
}`,
)
.join('<br/>')}
</UserMessage>
) : (
<UserMessage></UserMessage>
)}
</>
);
}
}
6 changes: 6 additions & 0 deletions libs/vscode/copilot/src/lib/prompts/generate-prompt.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { History } from './history';
import { NxProjectGraphPrompt } from './project-graph-prompt';
import { BaseSystemPrompt, NxCopilotPromptProps } from './prompt';
import { NxJsonPrompt } from './nx-json-prompt';
import { DocsPagesPrompt } from './docs-pages-prompt';

interface GeneratePromptProps extends NxCopilotPromptProps {
generators: { name: string; description: string }[];
Expand Down Expand Up @@ -63,6 +64,11 @@ export class GeneratePrompt extends PromptElement<GeneratePromptProps> {
passPriority
/>
<NxJsonPrompt nxJson={this.props.nxJson} flexGrow={3} passPriority />
<DocsPagesPrompt
docsPages={this.props.docsPages}
flexGrow={5}
passPriority
/>
<History
history={this.props.history}
passPriority
Expand Down
4 changes: 2 additions & 2 deletions libs/vscode/copilot/src/lib/prompts/history.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ export class HistoryMessages extends PromptElement<IHistoryMessagesProps> {
history.push(<UserMessage>{turn.prompt}</UserMessage>);
} else if (turn instanceof ChatResponseTurn) {
history.push(
<AssistantMessage>{chatResponseToString(turn)}</AssistantMessage>
<AssistantMessage>{chatResponseToString(turn)}</AssistantMessage>,
);
}
}
Expand All @@ -66,7 +66,7 @@ export class HistoryMessages extends PromptElement<IHistoryMessagesProps> {
/**
* Convert the stream of chat response parts into something that can be rendered in the prompt.
*/
function chatResponseToString(response: ChatResponseTurn): string {
export function chatResponseToString(response: ChatResponseTurn): string {
return response.response
.map((r) => {
if (r instanceof ChatResponseMarkdownPart) {
Expand Down
Loading

0 comments on commit a307f07

Please sign in to comment.