diff --git a/lib/shared/src/index.ts b/lib/shared/src/index.ts index 8833239dfd7b..03b42d9c814c 100644 --- a/lib/shared/src/index.ts +++ b/lib/shared/src/index.ts @@ -334,6 +334,7 @@ export { openCtxProviderMetadata, FILE_CONTEXT_MENTION_PROVIDER, SYMBOL_CONTEXT_MENTION_PROVIDER, + DIAGNOSTIC_CONTEXT_MENTION_PROVIDER, type ContextMentionProviderID, type ContextMentionProviderMetadata, } from './mentions/api' diff --git a/lib/shared/src/mentions/api.ts b/lib/shared/src/mentions/api.ts index e28bcfdd1833..99bc1d5ea4c7 100644 --- a/lib/shared/src/mentions/api.ts +++ b/lib/shared/src/mentions/api.ts @@ -51,13 +51,26 @@ export const SYMBOL_CONTEXT_MENTION_PROVIDER: ContextMentionProviderMetadata & { emptyLabel: 'No symbols found', } +export const DIAGNOSTIC_CONTEXT_MENTION_PROVIDER: ContextMentionProviderMetadata & { id: 'diagnostic' } = + { + id: 'diagnostic', + title: 'Problems', + queryLabel: 'Search for a problem...', + emptyLabel: 'No problem found', + } + export function mentionProvidersMetadata(options?: { disableProviders: ContextMentionProviderID[] }): Observable { return openCtxMentionProviders().map(providers => - [...[FILE_CONTEXT_MENTION_PROVIDER, SYMBOL_CONTEXT_MENTION_PROVIDER], ...providers].filter( - provider => !options?.disableProviders.includes(provider.id) - ) + [ + ...[ + FILE_CONTEXT_MENTION_PROVIDER, + SYMBOL_CONTEXT_MENTION_PROVIDER, + DIAGNOSTIC_CONTEXT_MENTION_PROVIDER, + ], + ...providers, + ].filter(provider => !options?.disableProviders.includes(provider.id)) ) } diff --git a/vscode/src/chat/agentic/DeepCody.ts b/vscode/src/chat/agentic/DeepCody.ts index 316745de1f23..e3f6cf34e1bd 100644 --- a/vscode/src/chat/agentic/DeepCody.ts +++ b/vscode/src/chat/agentic/DeepCody.ts @@ -28,6 +28,20 @@ import { CodyToolProvider, type ToolStatusCallback } from './CodyToolProvider' import type { ProcessManager } from './ProcessManager' import { ACTIONS_TAGS, CODYAGENT_PROMPTS } from './prompts' +export interface OmniboxAgentResponse { + next?: OmniboxNextStep + contextItems?: ContextItem[] + error?: Error + abort?: boolean +} + +interface OmniboxNextStep { + mode: OmniboxModes + query?: string +} + +export type OmniboxModes = 'edit' | 'search' | 'chat' + /** * A DeepCodyAgent handles advanced context retrieval and analysis for chat interactions. * It uses a multi-step process to: @@ -61,7 +75,7 @@ export class DeepCodyAgent { */ private stats = { context: 0, loop: 0 } - public nextActionMode = { mode: 'chat', query: '' } + private nextStep: OmniboxNextStep = { mode: 'chat', query: undefined } constructor( protected readonly chatBuilder: ChatBuilder, @@ -89,6 +103,17 @@ export class DeepCodyAgent { } } + public async start( + requestID: string, + chatAbortSignal: AbortSignal, + context: ContextItem[] + ): Promise { + return { + next: this.nextStep, + contextItems: await this.getContext(requestID, chatAbortSignal, context), + } + } + /** * Register the tools with the multiplexer. */ @@ -143,46 +168,37 @@ export class DeepCodyAgent { maxLoops = 2 ): Promise { this.context = context - return wrapInActiveSpan('DeepCody.getContext', span => - this._getContext(requestID, span, chatAbortSignal, maxLoops) - ) - } - - private async _getContext( - requestID: string, - span: Span, - chatAbortSignal: AbortSignal, - maxLoops = 2 - ): Promise { - span.setAttribute('sampled', true) - const startTime = performance.now() - await this.reviewLoop(requestID, span, chatAbortSignal, maxLoops) - telemetryRecorder.recordEvent('cody.deep-cody.context', 'reviewed', { - privateMetadata: { - requestID, - model: DeepCodyAgent.model, - traceId: span.spanContext().traceId, - chatAgent: 'deep-cody', - }, - metadata: { - loop: this.stats.loop, // Number of loops run. - fetched: this.stats.context, // Number of context fetched. - context: this.context.length, // Number of context used. - durationMs: performance.now() - startTime, - }, - billingMetadata: { - product: 'cody', - category: 'billable', - }, - }) - const knownModes = ['search', 'edit'] - if (knownModes.includes(this.nextActionMode.mode)) { - this.statusCallback.onStream({ - title: `Switch to ${this.nextActionMode.mode} mode`, - content: 'New intent detected: ' + this.nextActionMode.mode, + return wrapInActiveSpan('DeepCody.getContext', async span => { + span.setAttribute('sampled', true) + const startTime = performance.now() + await this.reviewLoop(requestID, span, chatAbortSignal, maxLoops) + telemetryRecorder.recordEvent('cody.deep-cody.context', 'reviewed', { + privateMetadata: { + requestID, + model: DeepCodyAgent.model, + traceId: span.spanContext().traceId, + chatAgent: 'deep-cody', + }, + metadata: { + loop: this.stats.loop, // Number of loops run. + fetched: this.stats.context, // Number of context fetched. + context: this.context.length, // Number of context used. + durationMs: performance.now() - startTime, + }, + billingMetadata: { + product: 'cody', + category: 'billable', + }, }) - } - return this.context + const knownModes = ['search', 'edit'] + if (knownModes.includes(this.nextStep.mode)) { + this.statusCallback.onStream({ + title: `Switch to ${this.nextStep.mode} mode`, + content: 'New intent detected: ' + this.nextStep.mode, + }) + } + return this.context + }) } private async reviewLoop( @@ -234,10 +250,11 @@ export class DeepCodyAgent { const nextActionRes = nextMode(res)[0] || '' const [mode, query] = nextActionRes.split(':') - if (mode) { - this.nextActionMode.mode = mode - this.nextActionMode.query = query || '' - if (mode === 'search') { + const validatedMode = mode === 'edit' ? 'edit' : mode === 'search' ? 'search' : undefined + if (validatedMode) { + this.nextStep.mode = validatedMode + this.nextStep.query = query + if (validatedMode === 'search') { return [] } } diff --git a/vscode/src/chat/chat-view/handlers/AgenticEditHandler.ts b/vscode/src/chat/chat-view/handlers/AgenticEditHandler.ts new file mode 100644 index 000000000000..923bbb65b1a6 --- /dev/null +++ b/vscode/src/chat/chat-view/handlers/AgenticEditHandler.ts @@ -0,0 +1,112 @@ +import { type ContextItem, PromptString, ps } from '@sourcegraph/cody-shared' +import * as vscode from 'vscode' +import { getDiagnosticsTextBlock, getUpdatedDiagnostics } from '../../../commands/context/diagnostic' +import { executeEdit } from '../../../edit/execute' +import { getEditor } from '../../../editor/active-editor' +import { chatDiff } from '../../../non-stop/line-diff' +import type { AgentHandler, AgentHandlerDelegate, AgentRequest } from './interfaces' + +export class AgenticEditHandler implements AgentHandler { + constructor(protected modelId: string) {} + + public async handle( + req: AgentRequest, + delegate: AgentHandlerDelegate, + context?: ContextItem[] + ): Promise { + const editor = getEditor()?.active + if (!editor?.document) { + delegate.postError(new Error('No active editor'), 'transcript') + delegate.postDone() + return + } + const abortSignal = req.signal + const postProgressToWebview = (msgs: string[]) => { + const message = msgs.join('\n\n') + delegate.postMessageInProgress({ + speaker: 'assistant', + text: PromptString.unsafe_fromLLMResponse(message), + model: this.modelId, + }) + } + + const document = editor.document + const fullRange = document.validateRange(new vscode.Range(0, 0, document.lineCount, 0)) + let currentDiagnostics = vscode.languages.getDiagnostics() + + let attempts = 0 + const MAX_ATTEMPTS = 5 + let currentInstruction = req.inputText + + const messageInProgress = [] + + while (attempts < MAX_ATTEMPTS) { + abortSignal.throwIfAborted() + attempts++ + const task = await executeEdit({ + configuration: { + document, + range: fullRange, + userContextFiles: context, + instruction: currentInstruction, + mode: 'edit', + intent: currentInstruction?.includes(ps`unit test`) ? 'edit' : 'edit', + }, + }) + + if (!task) { + delegate.postError(new Error('Failed to execute edit command'), 'transcript') + delegate.postDone() + return + } + + const diffs = + task.diff || + (task.replacement + ? [ + { + type: 'insertion', + text: task.replacement, + range: task.originalRange, + }, + ] + : []) + + messageInProgress.push(chatDiff(diffs, document, { showFullFile: false })) + postProgressToWebview(messageInProgress) + + abortSignal.throwIfAborted() + + // We need to give it time for the + const latestDiagnostics = vscode.languages.getDiagnostics() + const problems = getUpdatedDiagnostics(currentDiagnostics, latestDiagnostics) + + if (!problems.length) { + break // Success! No more problems + } + + if (attempts < MAX_ATTEMPTS) { + const problemText = getDiagnosticsTextBlock(problems) + const diagnosticsBlock = PromptString.unsafe_fromLLMResponse(problemText) + const retryMessage = `Attempt ${attempts}/${MAX_ATTEMPTS}: Found issues, trying to fix:\n${problemText}` + messageInProgress.push(retryMessage) + postProgressToWebview(messageInProgress) + + // Update instruction with current problems for next attempt + currentInstruction = currentInstruction.concat( + ps`\nPrevious attempt resulted in these issues:\n${diagnosticsBlock}` + ) + currentDiagnostics = latestDiagnostics + } + } + + if (attempts === MAX_ATTEMPTS) { + messageInProgress.push( + `Reached maximum number of attempts (${MAX_ATTEMPTS}). Some issues may remain.` + ) + } + + postProgressToWebview(messageInProgress) + delegate.postDone() + } +} diff --git a/vscode/src/chat/chat-view/handlers/AgenticHandler.ts b/vscode/src/chat/chat-view/handlers/AgenticHandler.ts index e326b4a3055b..7e2f441ef62d 100644 --- a/vscode/src/chat/chat-view/handlers/AgenticHandler.ts +++ b/vscode/src/chat/chat-view/handlers/AgenticHandler.ts @@ -11,22 +11,17 @@ import { firstResultFromOperation, isAbortErrorOrSocketHangUp, modelsService, - ps, wrapInActiveSpan, } from '@sourcegraph/cody-shared' -import * as vscode from 'vscode' -import { getDiagnosticsTextBlock, getUpdatedDiagnostics } from '../../../commands/context/diagnostic' -import { executeEdit } from '../../../edit/execute' -import { getEditor } from '../../../editor/active-editor' -import type { Edit } from '../../../non-stop/line-diff' import { getCategorizedMentions } from '../../../prompt-builder/utils' -import { DeepCodyAgent } from '../../agentic/DeepCody' +import { DeepCodyAgent, type OmniboxAgentResponse } from '../../agentic/DeepCody' import { ProcessManager } from '../../agentic/ProcessManager' import { ChatBuilder } from '../ChatBuilder' import type { ChatControllerOptions } from '../ChatController' import type { ContextRetriever } from '../ContextRetriever' import type { HumanInput } from '../context' import { DefaultPrompter, type PromptInfo } from '../prompt' +import { AgenticEditHandler } from './AgenticEditHandler' import { computeContextAlternatives } from './ChatHandler' import { SearchHandler } from './SearchHandler' import type { AgentHandler, AgentHandlerDelegate, AgentRequest } from './interfaces' @@ -40,7 +35,7 @@ export class AgenticHandler implements AgentHandler { ) {} public async handle(req: AgentRequest, delegate: AgentHandlerDelegate): Promise { - const { requestID, inputText, mentions, editorState, signal, chatBuilder, recorder, span } = req + const { mentions, signal } = req const stepsManager = new ProcessManager( steps => delegate.postStatuses(steps), @@ -50,70 +45,77 @@ export class AgenticHandler implements AgentHandler { // forgot to set the source, assume it's from the user. req.mentions = mentions.map(m => (m.source ? m : { ...m, source: ContextItemSource.User })) - const contextAgent = new DeepCodyAgent(chatBuilder, this.chatClient, stepsManager) + const reflection = await this.reflection(req, delegate, stepsManager) + if (reflection.abort) { + delegate.postDone({ abort: reflection.abort }) + return + } + if (reflection.error) { + delegate.postError(reflection.error, 'transcript') + return + } + + signal.throwIfAborted() + this.processActionMode(reflection, req, delegate, stepsManager) + } - const contextResult = await this.agenticContext( - contextAgent, + private async reflection( + req: AgentRequest, + delegate: AgentHandlerDelegate, + stepsManager: ProcessManager + ): Promise { + const { requestID, inputText, mentions, editorState, signal, chatBuilder } = req + const baseContextResult = await this.computeContext( requestID, { text: inputText, mentions }, editorState, chatBuilder, delegate, - signal + signal, + true ) - - if (contextResult.abort) { - delegate.postDone({ abort: contextResult.abort }) - return - } - if (contextResult.error) { - delegate.postError(contextResult.error, 'transcript') - return + const baseContext = baseContextResult.contextItems + // Early return if basic conditions aren't met. + if (baseContextResult.error || baseContextResult.abort || !baseContext) { + return { ...baseContextResult } } - signal.throwIfAborted() + const agent = new DeepCodyAgent(req.chatBuilder, this.chatClient, stepsManager) - const { mode, query } = contextAgent.nextActionMode - if (mode === 'search') { + return await agent.start(requestID, signal, baseContext) + } + + private async processActionMode( + agentResponse: OmniboxAgentResponse, + req: AgentRequest, + delegate: AgentHandlerDelegate, + stepsManager: ProcessManager + ): Promise { + const { mode, query } = agentResponse.next ?? {} + const corpusContext = agentResponse.contextItems ?? [] + // Search mode + if (mode === 'search' && query) { const search = new SearchHandler() - await search.handle( - { - ...req, - inputText: PromptString.unsafe_fromLLMResponse(query), - }, - delegate - ) + req.inputText = PromptString.unsafe_fromLLMResponse(query) + await search.handle(req, delegate) delegate.postDone() return } - - const corpusContext = contextResult.contextItems ?? [] - + // Edit mode if (mode === 'edit') { - chatBuilder.setLastMessageIntent('edit') + req.chatBuilder.setLastMessageIntent('edit') // const edit = new EditChatHandler(this.modelId, this.editor, this.chatClient, corpusContext) // await edit.handle(req, delegate) - this.edit(inputText, delegate, corpusContext) + await new AgenticEditHandler(this.modelId).handle(req, delegate) return } - + // Chat mode + req.signal.throwIfAborted() + req.recorder.recordChatQuestionExecuted(corpusContext, { addMetadata: true, current: req.span }) const { explicitMentions, implicitMentions } = getCategorizedMentions(corpusContext) const prompter = new DefaultPrompter(explicitMentions, implicitMentions, false) - const { prompt, context } = await this.buildPrompt(prompter, chatBuilder, signal, 8) - - signal.throwIfAborted() - - recorder.recordChatQuestionExecuted(corpusContext, { addMetadata: true, current: span }) - this.streamAssistantResponse( - requestID, - prompt, - this.modelId, - signal, - chatBuilder, - delegate, - stepsManager, - context?.used - ) + const { prompt } = await this.buildPrompt(prompter, req.chatBuilder, req.signal, 8) + this.streamAssistantResponse(req, prompt, this.modelId, delegate, stepsManager) } /** @@ -131,8 +133,7 @@ export class AgenticHandler implements AgentHandler { error: (completedResponse: string, error: Error) => void }, abortSignal: AbortSignal, - stepsManager: ProcessManager, - context?: ContextItem[] + stepsManager: ProcessManager ): Promise { let lastContent = '' const typewriter = new Typewriter({ @@ -188,21 +189,18 @@ export class AgenticHandler implements AgentHandler { } private streamAssistantResponse( - requestID: string, + req: AgentRequest, prompt: Message[], model: ChatModel, - abortSignal: AbortSignal, - chatBuilder: ChatBuilder, delegate: AgentHandlerDelegate, - stepsManager: ProcessManager, - context?: ContextItem[] + stepsManager: ProcessManager ): void { - abortSignal.throwIfAborted() + req.signal.throwIfAborted() this.sendLLMRequest( - requestID, + req.requestID, prompt, model, - chatBuilder, + req.chatBuilder, { update: content => { delegate.postMessageInProgress({ @@ -230,46 +228,13 @@ export class AgenticHandler implements AgentHandler { }) delegate.postDone() if (isAbortErrorOrSocketHangUp(error)) { - abortSignal.throwIfAborted() + req.signal.throwIfAborted() } }, }, - abortSignal, - stepsManager, - context - ) - } - - private async agenticContext( - contextAgent: DeepCodyAgent, - requestID: string, - { text, mentions }: HumanInput, - editorState: SerializedPromptEditorState | null, - chatBuilder: ChatBuilder, - delegate: AgentHandlerDelegate, - signal: AbortSignal - ): Promise<{ - contextItems?: ContextItem[] - error?: Error - abort?: boolean - }> { - const baseContextResult = await this.computeContext( - requestID, - { text, mentions }, - editorState, - chatBuilder, - delegate, - signal, - true + req.signal, + stepsManager ) - // Early return if basic conditions aren't met. - if (baseContextResult.error || baseContextResult.abort) { - return baseContextResult - } - - const baseContext = baseContextResult.contextItems ?? [] - const agenticContext = await contextAgent.getContext(requestID, signal, baseContext) - return { contextItems: agenticContext } } private async buildPrompt( @@ -318,170 +283,4 @@ export class AgenticHandler implements AgentHandler { return { error: new Error(`Unexpected error computing context, no context was used: ${e}`) } } } - - protected async edit( - instruction: PromptString, - delegate: AgentHandlerDelegate, - context: ContextItem[] = [] - ): Promise { - const editor = getEditor()?.active - if (!editor?.document) { - delegate.postError(new Error('No active editor'), 'transcript') - delegate.postDone() - return - } - - const postProgressToWebview = (msgs: string[]) => { - const message = msgs.join('\n\n') - delegate.postMessageInProgress({ - speaker: 'assistant', - text: PromptString.unsafe_fromLLMResponse(message), - model: this.modelId, - }) - } - - const document = editor.document - const fullRange = document.validateRange(new vscode.Range(0, 0, document.lineCount, 0)) - let currentDiagnostics = vscode.languages.getDiagnostics() - - let attempts = 0 - const MAX_ATTEMPTS = 5 - let currentInstruction = instruction - - const messageInProgress = [] - - while (attempts < MAX_ATTEMPTS) { - attempts++ - - const task = await executeEdit({ - configuration: { - document, - range: fullRange, - userContextFiles: context, - instruction: currentInstruction, - mode: 'edit', - intent: 'edit', - }, - }) - - if (!task) { - delegate.postError(new Error('Failed to execute edit command'), 'transcript') - delegate.postDone() - return - } - - const diffs = - task.diff || - (task.replacement - ? [ - { - type: 'insertion', - text: task.replacement, - range: task.originalRange, - }, - ] - : []) - - messageInProgress.push(this.generateDiffMessage(diffs, document)) - postProgressToWebview(messageInProgress) - - await editor.document.save() - - const latestDiagnostics = vscode.languages.getDiagnostics() - const problems = getUpdatedDiagnostics(currentDiagnostics, latestDiagnostics) - - if (!problems.length) { - break // Success! No more problems - } - - if (attempts < MAX_ATTEMPTS) { - const problemText = getDiagnosticsTextBlock(problems) - const diagnosticsBlock = PromptString.unsafe_fromLLMResponse(problemText) - const retryMessage = `Attempt ${attempts}/${MAX_ATTEMPTS}: Found issues, trying to fix:\n${problemText}` - messageInProgress.push(retryMessage) - postProgressToWebview(messageInProgress) - - // Update instruction with current problems for next attempt - currentInstruction = instruction.concat( - ps`\nPrevious attempt resulted in these issues:\n${diagnosticsBlock}` - ) - currentDiagnostics = latestDiagnostics - } - } - - if (attempts === MAX_ATTEMPTS) { - messageInProgress.push( - `Reached maximum number of attempts (${MAX_ATTEMPTS}). Some issues may remain.` - ) - } - - postProgressToWebview(messageInProgress) - delegate.postDone() - } - - // Helper method to generate diff message - private generateDiffMessage(diffs: Edit[], document: vscode.TextDocument): string { - const message = ['Here is the proposed change:\n\n```diff'] - const documentLines = document.getText().split('\n') - const modifiedLines = new Map() - - for (const diff of diffs) { - for (let line = diff.range.start.line; line <= diff.range.end.line; line++) { - modifiedLines.set(line, diff) - } - } - - for (let lineNumber = 0; lineNumber < documentLines.length; lineNumber++) { - const diff = modifiedLines.get(lineNumber) - if (!diff) { - message.push(` ${documentLines[lineNumber]}`) - continue - } - - switch (diff.type) { - case 'deletion': - if (lineNumber === diff.range.start.line) { - message.push( - document - .getText(diff.range) - .trimEnd() - .split('\n') - .map(line => `- ${line}`) - .join('\n') - ) - } - break - case 'decoratedReplacement': - if (lineNumber === diff.range.start.line) { - message.push( - diff.oldText - .trimEnd() - .split('\n') - .map(line => `- ${line}`) - .join('\n'), - diff.text - .trimEnd() - .split('\n') - .map(line => `+ ${line}`) - .join('\n') - ) - } - break - case 'insertion': - if (lineNumber === diff.range.start.line) { - message.push( - diff.text - .trimEnd() - .split('\n') - .map(line => `+ ${line}`) - .join('\n') - ) - } - break - } - } - - message.push('```') - return message.join('\n') - } } diff --git a/vscode/src/chat/chat-view/handlers/registry.ts b/vscode/src/chat/chat-view/handlers/registry.ts index a5a5251e10f7..c78a44209224 100644 --- a/vscode/src/chat/chat-view/handlers/registry.ts +++ b/vscode/src/chat/chat-view/handlers/registry.ts @@ -22,6 +22,7 @@ export function getAgent(id: string, modelId: string, tools: AgentTools): AgentH if (agentRegistry.has(id)) { return agentRegistry.get(id)!(id, tools) } + // If id is not found, assume it's a base model return new AgenticHandler(modelId, contextRetriever, editor, chatClient) } diff --git a/vscode/src/chat/context/chatContext.ts b/vscode/src/chat/context/chatContext.ts index dd0ac4f3363d..fe69f99c036e 100644 --- a/vscode/src/chat/context/chatContext.ts +++ b/vscode/src/chat/context/chatContext.ts @@ -4,6 +4,7 @@ import { type ContextItemOpenCtx, type ContextItemRepository, type ContextMentionProviderID, + DIAGNOSTIC_CONTEXT_MENTION_PROVIDER, FILE_CONTEXT_MENTION_PROVIDER, type MentionMenuData, type MentionQuery, @@ -28,6 +29,7 @@ import { LRUCache } from 'lru-cache' import { Observable, map } from 'observable-fns' import * as vscode from 'vscode' import { URI } from 'vscode-uri' +import { getContextFromDiagnostics } from '../../commands/context/diagnostic' import { getContextFileFromUri } from '../../commands/context/file-path' import { getFileContextFiles, @@ -151,7 +153,8 @@ export async function getChatContextItemsForMention( return files } - + case DIAGNOSTIC_CONTEXT_MENTION_PROVIDER.id: + return getContextFromDiagnostics() default: { const items = await currentOpenCtxController().mentions( { diff --git a/vscode/src/commands/context/diagnostic.ts b/vscode/src/commands/context/diagnostic.ts index e34cd29747c1..2e6f6a6a0629 100644 --- a/vscode/src/commands/context/diagnostic.ts +++ b/vscode/src/commands/context/diagnostic.ts @@ -17,10 +17,9 @@ export async function getContextFromDiagnostics(): Promise { try { const diagnostics = vscode.languages.getDiagnostics() - // Process diagnostics in parallel for better performance const processedItems = await Promise.all( Array.from(diagnostics).map(async ([uri, fileDiagnostics]) => { - const cacheKey = `${uri.toString()}-${fileDiagnostics.length}` + const cacheKey = `${displayPath(uri)}-${fileDiagnostics.length}` // Check cache first if (diagnosticsCache.has(cacheKey)) { @@ -40,9 +39,10 @@ export async function getContextFromDiagnostics(): Promise { const item = { type: 'file' as const, + title: errors[0]?.message, content, - uri, - source: ContextItemSource.User, + uri: uri, + source: ContextItemSource.Terminal, size, } @@ -82,9 +82,16 @@ const areDiagnosticsEquivalent = ( { code: code2, message: message2, severity: severity2, source: source2 }: vscode.Diagnostic ): boolean => code1 === code2 && message1 === message2 && severity1 === severity2 && source1 === source2 -export function getUpdatedDiagnostics(previous: Diagnostics[], current: Diagnostics[]): Diagnostics[] { +export function getUpdatedDiagnostics( + previous: Diagnostics[], + current: Diagnostics[], + doc?: vscode.Uri +): Diagnostics[] { const cache = new Map(previous) return current.flatMap(([uri, currentDiags]) => { + if (doc && doc.path !== uri.path) { + return [] + } const previousDiags = cache.get(uri) || [] const uniqueDiags = currentDiags.filter( cur => !previousDiags.some(prev => areDiagnosticsEquivalent(prev, cur)) @@ -99,7 +106,7 @@ export function getDiagnosticsTextBlock(diagnostics: Diagnostics[]): string { const diagnosticLines = fileDiagnostics .map(d => `[${d.severity}] Line ${d.range.start.line + 1}: ${d.message}`) .join('\n') - return `\`\`\`bash:${displayPath(uri)}\n${diagnosticLines}\n\`\`\`` + return `\`\`\`console:${displayPath(uri)}\n${diagnosticLines}\n\`\`\`` }) .join('\n') } diff --git a/vscode/src/non-stop/line-diff.ts b/vscode/src/non-stop/line-diff.ts index e12c6aea9475..998024c850d2 100644 --- a/vscode/src/non-stop/line-diff.ts +++ b/vscode/src/non-stop/line-diff.ts @@ -120,3 +120,89 @@ export function makeDiffEditBuilderCompatible(diff: Edit[]): Edit[] { return suitableEdit } + +export interface ChatDiffDisplayOptions { + showFullFile: boolean +} + +export function chatDiff( + diffs: Edit[], + document: vscode.TextDocument, + options: ChatDiffDisplayOptions = { showFullFile: true } +): string { + const message = ['Here is the proposed change:\n\n```diff'] + const documentLines = document.getText().split('\n') + const modifiedLines = new Map() + + // Find first and last modified lines for compact diff + let firstModifiedLine = documentLines.length + let lastModifiedLine = 0 + + // Build modified lines map and find boundaries + for (const diff of diffs) { + for (let line = diff.range.start.line; line <= diff.range.end.line; line++) { + modifiedLines.set(line, diff) + firstModifiedLine = Math.min(firstModifiedLine, line) + lastModifiedLine = Math.max(lastModifiedLine, line) + } + } + + // Determine the range of lines to process + const startLine = options.showFullFile ? 0 : Math.max(0, firstModifiedLine - 3) // Show 3 lines of context + const endLine = options.showFullFile + ? documentLines.length + : Math.min(documentLines.length, lastModifiedLine + 3) + + for (let lineNumber = startLine; lineNumber < endLine; lineNumber++) { + const diff = modifiedLines.get(lineNumber) + if (!diff) { + message.push(` ${documentLines[lineNumber]}`) + continue + } + + switch (diff.type) { + case 'deletion': + if (lineNumber === diff.range.start.line) { + message.push( + document + .getText(diff.range) + .trimEnd() + .split('\n') + .map(line => `- ${line}`) + .join('\n') + ) + } + break + case 'decoratedReplacement': + if (lineNumber === diff.range.start.line) { + message.push( + diff.oldText + .trimEnd() + .split('\n') + .map(line => `- ${line}`) + .join('\n'), + diff.text + .trimEnd() + .split('\n') + .map(line => `+ ${line}`) + .join('\n') + ) + } + break + case 'insertion': + if (lineNumber === diff.range.start.line) { + message.push( + diff.text + .trimEnd() + .split('\n') + .map(line => `+ ${line}`) + .join('\n') + ) + } + break + } + } + + message.push('```') + return message.join('\n') +} diff --git a/vscode/webviews/chat/Transcript.tsx b/vscode/webviews/chat/Transcript.tsx index 99856d832e82..51256fc285bd 100644 --- a/vscode/webviews/chat/Transcript.tsx +++ b/vscode/webviews/chat/Transcript.tsx @@ -39,7 +39,7 @@ import { import { HumanMessageCell } from './cells/messageCell/human/HumanMessageCell' import { type Context, type Span, context, trace } from '@opentelemetry/api' -import { DeepCodyAgentID, ToolCodyModelName } from '@sourcegraph/cody-shared/src/models/client' +import { ToolCodyModelName } from '@sourcegraph/cody-shared/src/models/client' import { isCodeSearchContextItem } from '../../src/context/openctx/codeSearch' import { AgenticContextCell } from './cells/agenticCell/AgenticContextCell' import ApprovalCell from './cells/agenticCell/ApprovalCell' @@ -568,7 +568,7 @@ const TranscriptInteraction: FC = memo(props => { model={assistantMessage?.model} isForFirstMessage={humanMessage.index === 0} isContextLoading={isContextLoading} - defaultOpen={isContextLoading && humanMessage.agent === DeepCodyAgentID} + defaultOpen={isContextLoading} agent={humanMessage?.agent ?? undefined} /> )}