Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 54 additions & 2 deletions workspaces/ballerina/ballerina-core/src/state-machine-types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,8 @@ export type ChatNotify =
| EvalsToolResult
| UsageMetricsEvent
| TaskApprovalRequest
| GeneratedSourcesEvent;
| GeneratedSourcesEvent
| ConnectorGenerationNotification;

export interface ChatStart {
type: "start";
Expand Down Expand Up @@ -295,6 +296,31 @@ export interface GeneratedSourcesEvent {
fileArray: SourceFile[];
}

export interface ConnectorGenerationNotification {
type: "connector_generation_notification";
requestId: string;
stage: "requesting_input" | "input_received" | "generating" | "generated" | "skipped" | "error";
serviceName?: string;
serviceDescription?: string;
spec?: {
version: string;
title: string;
description?: string;
baseUrl?: string;
endpointCount: number;
methods: string[];
};
connector?: {
moduleName: string;
importStatement: string;
};
error?: {
message: string;
code: string;
};
message: string;
}

export const stateChanged: NotificationType<MachineStateValue> = { method: 'stateChanged' };
export const onDownloadProgress: NotificationType<DownloadProgress> = { method: 'onDownloadProgress' };
export const onChatNotify: NotificationType<ChatNotify> = { method: 'onChatNotify' };
Expand Down Expand Up @@ -374,6 +400,7 @@ export type AIChatMachineStateValue =
| 'TaskReview'
| 'ApprovedTask'
| 'RejectedTask'
| 'WaitingForConnectorSpec'
| 'Completed'
| 'PartiallyCompleted'
| 'Error';
Expand All @@ -396,6 +423,9 @@ export enum AIChatMachineEventType {
RESTORE_STATE = 'RESTORE_STATE',
ERROR = 'ERROR',
RETRY = 'RETRY',
CONNECTOR_GENERATION_REQUESTED = 'CONNECTOR_GENERATION_REQUESTED',
PROVIDE_CONNECTOR_SPEC = 'PROVIDE_CONNECTOR_SPEC',
SKIP_CONNECTOR_GENERATION = 'SKIP_CONNECTOR_GENERATION',
}

export interface ChatMessage {
Expand Down Expand Up @@ -459,6 +489,14 @@ export interface AIChatMachineContext {
projectId?: string;
currentApproval?: UserApproval;
autoApproveEnabled?: boolean;
currentSpec?: {
requestId: string;
spec?: any;
provided?: boolean;
skipped?: boolean;
comment?: string;
};
previousState?: AIChatMachineStateValue;
}

export type AIChatMachineSendableEvent =
Expand All @@ -478,7 +516,10 @@ export type AIChatMachineSendableEvent =
| { type: AIChatMachineEventType.RESET }
| { type: AIChatMachineEventType.RESTORE_STATE; payload: { state: AIChatMachineContext } }
| { type: AIChatMachineEventType.ERROR; payload: { message: string } }
| { type: AIChatMachineEventType.RETRY };
| { type: AIChatMachineEventType.RETRY }
| { type: AIChatMachineEventType.CONNECTOR_GENERATION_REQUESTED; payload: { requestId: string; serviceName?: string; serviceDescription?: string; fromState?: AIChatMachineStateValue } }
| { type: AIChatMachineEventType.PROVIDE_CONNECTOR_SPEC; payload: { requestId: string; spec: any; inputMethod: 'file' | 'paste' | 'url'; sourceIdentifier?: string } }
| { type: AIChatMachineEventType.SKIP_CONNECTOR_GENERATION; payload: { requestId: string; comment?: string } };

export enum LoginMethod {
BI_INTEL = 'biIntel',
Expand Down Expand Up @@ -546,3 +587,14 @@ export const aiChatStateChanged: NotificationType<AIChatMachineStateValue> = { m
export const sendAIChatStateEvent: RequestType<AIChatMachineEventType | AIChatMachineSendableEvent, void> = { method: 'sendAIChatStateEvent' };
export const getAIChatContext: RequestType<void, AIChatMachineContext> = { method: 'getAIChatContext' };
export const getAIChatUIHistory: RequestType<void, UIChatHistoryMessage[]> = { method: 'getAIChatUIHistory' };

// Connector Generator RPC methods
export interface ConnectorGeneratorResponsePayload {
requestId: string;
action: 'provide' | 'skip';
spec?: any;
inputMethod?: 'file' | 'paste' | 'url';
sourceIdentifier?: string;
comment?: string;
}
export const sendConnectorGeneratorResponse: RequestType<ConnectorGeneratorResponsePayload, void> = { method: 'sendConnectorGeneratorResponse' };
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ import { getLibraryProviderTool } from "../libs/libraryProviderTool";
import { GenerationType, getAllLibraries, LIBRARY_PROVIDER_TOOL } from "../libs/libs";
import { Library } from "../libs/libs_types";
import { AIChatStateMachine } from "../../../../views/ai-panel/aiChatMachine";
import { getTempProject, FileModificationInfo } from "../../utils/temp-project-utils";
import { formatCodebaseStructure } from "./utils";
import { getTempProject } from "../../utils/temp-project-utils";
import { getSystemPrompt, getUserPrompt } from "./prompts";
import { createConnectorGeneratorTool, CONNECTOR_GENERATOR_TOOL } from "../libs/connectorGeneratorTool";
import { LangfuseExporter } from 'langfuse-vercel';
import { NodeSDK } from '@opentelemetry/sdk-node';
import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
Expand Down Expand Up @@ -61,7 +61,7 @@ export async function generateDesignCore(params: GenerateAgentCodeRequest, event

const modifiedFiles: string[] = [];

const userMessageContent = getUserPrompt(params.usecase, hasHistory, tempProjectPath);
const userMessageContent = getUserPrompt(params.usecase, hasHistory, tempProjectPath, project.projectName);
const allMessages: ModelMessage[] = [
{
role: "system",
Expand All @@ -84,6 +84,7 @@ export async function generateDesignCore(params: GenerateAgentCodeRequest, event
const tools = {
[TASK_WRITE_TOOL_NAME]: createTaskWriteTool(eventHandler, tempProjectPath, modifiedFiles),
[LIBRARY_PROVIDER_TOOL]: getLibraryProviderTool(libraryDescriptions, GenerationType.CODE_GENERATION),
[CONNECTOR_GENERATOR_TOOL]: createConnectorGeneratorTool(eventHandler, tempProjectPath, project.projectName, modifiedFiles),
[FILE_WRITE_TOOL_NAME]: createWriteTool(createWriteExecute(tempProjectPath, modifiedFiles)),
[FILE_SINGLE_EDIT_TOOL_NAME]: createEditTool(createEditExecute(tempProjectPath, modifiedFiles)),
[FILE_BATCH_EDIT_TOOL_NAME]: createBatchEditTool(createMultiEditExecute(tempProjectPath, modifiedFiles)),
Expand Down Expand Up @@ -128,7 +129,14 @@ export async function generateDesignCore(params: GenerateAgentCodeRequest, event

if (toolName === "LibraryProviderTool") {
selectedLibraries = (part.input as any)?.libraryNames || [];
} else if ([FILE_WRITE_TOOL_NAME, FILE_SINGLE_EDIT_TOOL_NAME, FILE_BATCH_EDIT_TOOL_NAME, FILE_READ_TOOL_NAME].includes(toolName)) {
} else if (
[
FILE_WRITE_TOOL_NAME,
FILE_SINGLE_EDIT_TOOL_NAME,
FILE_BATCH_EDIT_TOOL_NAME,
FILE_READ_TOOL_NAME,
].includes(toolName)
) {
const input = part.input as any;
if (input && input.file_path) {
let fileName = input.file_path;
Expand Down Expand Up @@ -157,8 +165,14 @@ export async function generateDesignCore(params: GenerateAgentCodeRequest, event
} else if (toolName === "LibraryProviderTool") {
const libraryNames = (part.output as Library[]).map((lib) => lib.name);
const fetchedLibraries = libraryNames.filter((name) => selectedLibraries.includes(name));
}
else if ([FILE_WRITE_TOOL_NAME, FILE_SINGLE_EDIT_TOOL_NAME, FILE_BATCH_EDIT_TOOL_NAME, FILE_READ_TOOL_NAME].includes(toolName)) {
} else if (
[
FILE_WRITE_TOOL_NAME,
FILE_SINGLE_EDIT_TOOL_NAME,
FILE_BATCH_EDIT_TOOL_NAME,
FILE_READ_TOOL_NAME,
].includes(toolName)
) {
} else {
eventHandler({ type: "tool_result", toolName });
}
Expand All @@ -170,6 +184,10 @@ export async function generateDesignCore(params: GenerateAgentCodeRequest, event
eventHandler({ type: "error", content: getErrorMessage(error) });
break;
}
case "text-start": {
eventHandler({ type: "content_block", content: " \n" });
break;
}
case "abort": {
console.log("[Design] Aborted by user.");
let messagesToSave: any[] = [];
Expand Down Expand Up @@ -221,7 +239,7 @@ Generation stopped by user. The last in-progress task was not saved. Files have
await langfuseExporter.forceFlush();
break;
}
}
}
}
}

Expand Down Expand Up @@ -308,37 +326,3 @@ function saveToolResult(
}]
});
}

/**
* Formats file modifications into XML structure for Claude
* TODO: This function is currently not used. Can be removed if workspace modification
* tracking is not needed in the future.
*/
function formatModifications(modifications: FileModificationInfo[]): string {
if (modifications.length === 0) {
return '';
}

const modifiedFiles = modifications.filter(m => m.type === 'modified').map(m => m.filePath);
const newFiles = modifications.filter(m => m.type === 'new').map(m => m.filePath);
const deletedFiles = modifications.filter(m => m.type === 'deleted').map(m => m.filePath);

let text = '<workspace_changes>\n';
text += 'The following changes were detected in the workspace since the last session. ';
text += 'You do not need to acknowledge or repeat these changes in your response. ';
text += 'This information is provided for your awareness only.\n\n';

if (modifiedFiles.length > 0) {
text += '<modified_files>\n' + modifiedFiles.join('\n') + '\n</modified_files>\n\n';
}
if (newFiles.length > 0) {
text += '<new_files>\n' + newFiles.join('\n') + '\n</new_files>\n\n';
}
if (deletedFiles.length > 0) {
text += '<deleted_files>\n' + deletedFiles.join('\n') + '\n</deleted_files>\n\n';
}

text += '</workspace_changes>';
return text;
}

Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { DIAGNOSTICS_TOOL_NAME } from "../libs/diagnostics_tool";
import { LIBRARY_PROVIDER_TOOL } from "../libs/libs";
import { TASK_WRITE_TOOL_NAME } from "../libs/task_write_tool";
import { FILE_BATCH_EDIT_TOOL_NAME, FILE_SINGLE_EDIT_TOOL_NAME, FILE_WRITE_TOOL_NAME } from "../libs/text_editor_tool";
import { CONNECTOR_GENERATOR_TOOL } from "../libs/connectorGeneratorTool";
import { formatCodebaseStructure } from "./utils";

/**
Expand Down Expand Up @@ -64,8 +65,11 @@ This plan will be visible to the user and the execution will be guided on the ta
5. Once plan is APPROVED (success: true in tool response), IMMEDIATELY start the execution cycle:

**For each task:**
- Mark task as in_progress using ${TASK_WRITE_TOOL_NAME} (send ALL tasks)
- Mark task as in_progress using ${TASK_WRITE_TOOL_NAME} and immediately start implementation in parallel (single message with multiple tool calls)
- Implement the task completely (write the Ballerina code)
- When implementing external API integrations:
- First check ${LIBRARY_PROVIDER_TOOL} for known services (Stripe, GitHub, etc.)
- If NOT available, call ${CONNECTOR_GENERATOR_TOOL} to generate connector from OpenAPI spec
- Before marking the task as completed, use the ${DIAGNOSTICS_TOOL_NAME} tool to check for compilation errors and fix them. Introduce a a new subtask if needed to fix errors.
- Mark task as completed using ${TASK_WRITE_TOOL_NAME} (send ALL tasks)
- The tool will wait for TASK COMPLETION APPROVAL from the user
Expand Down Expand Up @@ -94,6 +98,8 @@ When generating Ballerina code strictly follow these syntax and structure guidel
- In the library API documentation, if the service type is specified as generic, adhere to the instructions specified there on writing the service.
- For GraphQL service related queries, if the user hasn't specified their own GraphQL Schema, write the proposed GraphQL schema for the user query right after the explanation before generating the Ballerina code. Use the same names as the GraphQL Schema when defining record types.

### Local Connectors
- If the codebase structure shows connector modules in generated/moduleName, import using: import packageName.moduleName

### Code Structure
- Define required configurables for the query. Use only string, int, decimal, boolean types in configurable variables.
Expand All @@ -117,7 +123,13 @@ When generating Ballerina code strictly follow these syntax and structure guidel
- To narrow down a union type(or optional type), always declare a separate variable and then use that variable in the if condition.

### File modifications
- You must apply changes to the existing source code using the provided ${[FILE_BATCH_EDIT_TOOL_NAME, FILE_SINGLE_EDIT_TOOL_NAME, FILE_WRITE_TOOL_NAME].join(", ")} tools. The complete existing source code will be provided in the <existing_code> section of the user prompt.
- You must apply changes to the existing source code using the provided ${[
FILE_BATCH_EDIT_TOOL_NAME,
FILE_SINGLE_EDIT_TOOL_NAME,
FILE_WRITE_TOOL_NAME,
].join(
", "
)} tools. The complete existing source code will be provided in the <existing_code> section of the user prompt.
- When making replacements inside an existing file, provide the **exact old string** and the **exact new string** with all newlines, spaces, and indentation, being mindful to replace nearby occurrences together to minimize the number of tool calls.
- Do not modify documentation such as .md files unless explicitly asked to be modified in the query.
- Do not add/modify toml files (Config.toml/Ballerina.toml/Dependencies.toml).
Expand All @@ -130,14 +142,15 @@ When generating Ballerina code strictly follow these syntax and structure guidel
* @param usecase User's query/requirement
* @param hasHistory Whether chat history exists
* @param tempProjectPath Path to temp project (used when hasHistory is false)
* @param packageName Name of the Ballerina package
*/
export function getUserPrompt(usecase: string, hasHistory: boolean, tempProjectPath: string) {
export function getUserPrompt(usecase: string, hasHistory: boolean, tempProjectPath: string, packageName: string) {
const content = [];

if (!hasHistory) {
content.push({
type: 'text' as const,
text: formatCodebaseStructure(tempProjectPath)
text: formatCodebaseStructure(tempProjectPath, packageName)
});
}

Expand Down
Loading
Loading