|
1 | 1 | // Copyright (c) Microsoft. All rights reserved. |
2 | 2 |
|
3 | | -// This sample shows how to use Computer Use Tool with a ChatClientAgent. |
| 3 | +// This sample shows how to use the Computer Use tool with AIProjectClient.AsAIAgent(...). |
4 | 4 |
|
5 | 5 | using Azure.AI.Projects; |
6 | 6 | using Azure.Identity; |
| 7 | +using Demo.ComputerUse; |
7 | 8 | using Microsoft.Agents.AI; |
8 | 9 | using Microsoft.Agents.AI.Foundry; |
9 | 10 | using Microsoft.Extensions.AI; |
10 | 11 | using OpenAI.Responses; |
11 | 12 |
|
12 | | -namespace Demo.ComputerUse; |
| 13 | +string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); |
| 14 | +string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_COMPUTER_USE_DEPLOYMENT_NAME") ?? "computer-use-preview"; |
13 | 15 |
|
14 | | -internal sealed class Program |
15 | | -{ |
16 | | - private static async Task Main(string[] args) |
17 | | - { |
18 | | - const string AgentInstructions = @" |
19 | | - You are a computer automation assistant. |
20 | | - |
21 | | - Be direct and efficient. When you reach the search results page, read and describe the actual search result titles and descriptions you can see. |
22 | | - "; |
23 | | - |
24 | | - const string AgentName = "ComputerAgent-RAPI"; |
25 | | - |
26 | | - string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT") ?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set."); |
27 | | - string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "computer-use-preview"; |
28 | | - |
29 | | - // WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production. |
30 | | - // In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid |
31 | | - // latency issues, unintended credential probing, and potential security risks from fallback mechanisms. |
32 | | - AIProjectClient aiProjectClient = new(new Uri(endpoint), new DefaultAzureCredential()); |
33 | | - |
34 | | - // Create a AIAgent with ComputerUseTool. |
35 | | - AIAgent agent = aiProjectClient.AsAIAgent(deploymentName, |
36 | | - instructions: AgentInstructions, |
37 | | - name: AgentName, |
38 | | - description: "Computer automation agent with screen interaction capabilities.", |
39 | | - tools: [ |
40 | | - FoundryAITool.CreateComputerTool(ComputerToolEnvironment.Browser, 1026, 769), |
41 | | - ]); |
42 | | - |
43 | | - await InvokeComputerUseAgentAsync(agent); |
44 | | - } |
| 16 | +AIProjectClient projectClient = new(new Uri(endpoint), new DefaultAzureCredential()); |
| 17 | +using IHostedFileClient fileClient = projectClient.GetProjectOpenAIClient().AsIHostedFileClient(); |
45 | 18 |
|
46 | | - private static async Task InvokeComputerUseAgentAsync(AIAgent agent) |
47 | | - { |
48 | | - // Load screenshot assets |
49 | | - Dictionary<string, byte[]> screenshots = ComputerUseUtil.LoadScreenshotAssets(); |
| 19 | +AIAgent agent = projectClient.AsAIAgent( |
| 20 | + model: deploymentName, |
| 21 | + name: "ComputerAgent", |
| 22 | + instructions: "You are a computer automation assistant.", |
| 23 | + tools: [FoundryAITool.CreateComputerTool(ComputerToolEnvironment.Browser, 1026, 769)]); |
50 | 24 |
|
51 | | - ChatOptions chatOptions = new(); |
52 | | - CreateResponseOptions responseCreationOptions = new() |
53 | | - { |
54 | | - TruncationMode = ResponseTruncationMode.Auto |
55 | | - }; |
56 | | - chatOptions.RawRepresentationFactory = (_) => responseCreationOptions; |
57 | | - ChatClientAgentRunOptions runOptions = new(chatOptions) |
58 | | - { |
59 | | - AllowBackgroundResponses = true, |
60 | | - }; |
| 25 | +Dictionary<string, string> screenshots = []; |
61 | 26 |
|
62 | | - ChatMessage message = new(ChatRole.User, [ |
63 | | - new TextContent("I need you to help me search for 'OpenAI news'. Please type 'OpenAI news' and submit the search. Once you see search results, the task is complete."), |
64 | | - new DataContent(new BinaryData(screenshots["browser_search"]), "image/png") |
65 | | - ]); |
66 | | - |
67 | | - // Initial request with screenshot - start with Bing search page |
68 | | - Console.WriteLine("Starting computer automation session (initial screenshot: cua_browser_search.png)..."); |
69 | | - |
70 | | - // We use PreviousResponseId to chain calls, sending only the new computer_call_output items |
71 | | - // instead of re-sending the full context. |
72 | | - AgentSession session = await agent.CreateSessionAsync(); |
73 | | - AgentResponse response = await agent.RunAsync(message, session: session, options: runOptions); |
74 | | - |
75 | | - // Main interaction loop |
76 | | - const int MaxIterations = 10; |
77 | | - int iteration = 0; |
78 | | - // Initialize state machine |
79 | | - SearchState currentState = SearchState.Initial; |
| 27 | +try |
| 28 | +{ |
| 29 | + // Upload pre-captured screenshots that simulate browser state transitions. |
| 30 | + screenshots = await ComputerUseUtil.UploadScreenshotAssetsAsync(fileClient); |
80 | 31 |
|
81 | | - while (true) |
| 32 | + // Enable auto-truncation for the Responses API. |
| 33 | + ChatClientAgentRunOptions runOptions = new() |
| 34 | + { |
| 35 | + ChatOptions = new ChatOptions |
82 | 36 | { |
83 | | - // Poll until the response is complete. |
84 | | - while (response.ContinuationToken is { } token) |
85 | | - { |
86 | | - // Wait before polling again. |
87 | | - await Task.Delay(TimeSpan.FromSeconds(2)); |
| 37 | + RawRepresentationFactory = (_) => new CreateResponseOptions() { TruncationMode = ResponseTruncationMode.Auto }, |
| 38 | + } |
| 39 | + }; |
88 | 40 |
|
89 | | - // Continue with the token. |
90 | | - runOptions.ContinuationToken = token; |
| 41 | + // Send the initial request with a screenshot of the browser. |
| 42 | + ChatMessage message = new(ChatRole.User, [ |
| 43 | + new TextContent("Search for 'OpenAI news'. Type it and submit. Once you see results, the task is complete."), |
| 44 | + new AIContent() { RawRepresentation = ResponseContentPart.CreateInputImagePart(imageFileId: screenshots["browser_search"], imageDetailLevel: ResponseImageDetailLevel.High) } |
| 45 | + ]); |
91 | 46 |
|
92 | | - response = await agent.RunAsync(session, runOptions); |
93 | | - } |
| 47 | + Console.WriteLine("Starting computer use session..."); |
| 48 | + |
| 49 | + AgentSession session = await agent.CreateSessionAsync(); |
| 50 | + AgentResponse response = await agent.RunAsync(message, session: session, options: runOptions); |
94 | 51 |
|
95 | | - // Clear the continuation token so the next RunAsync call is a fresh request. |
96 | | - runOptions.ContinuationToken = null; |
| 52 | + SearchState currentState = SearchState.Initial; |
97 | 53 |
|
98 | | - Console.WriteLine($"Agent response received (ID: {response.ResponseId})"); |
| 54 | + for (int i = 0; i < 10; i++) |
| 55 | + { |
| 56 | + // Find the next computer call action. |
| 57 | + ComputerCallResponseItem? computerCall = response.Messages |
| 58 | + .SelectMany(m => m.Contents) |
| 59 | + .Select(c => c.RawRepresentation as ComputerCallResponseItem) |
| 60 | + .FirstOrDefault(item => item is not null); |
99 | 61 |
|
100 | | - if (iteration >= MaxIterations) |
| 62 | + if (computerCall is null) |
| 63 | + { |
| 64 | + if (currentState == SearchState.PressedEnter) |
101 | 65 | { |
102 | | - Console.WriteLine($"\nReached maximum iterations ({MaxIterations}). Stopping."); |
| 66 | + Console.WriteLine("No more computer actions. Done."); |
| 67 | + Console.WriteLine(response); |
103 | 68 | break; |
104 | 69 | } |
105 | 70 |
|
106 | | - iteration++; |
107 | | - Console.WriteLine($"\n--- Iteration {iteration} ---"); |
108 | | - |
109 | | - // Check for computer calls in the response |
110 | | - IEnumerable<ComputerCallResponseItem> computerCallResponseItems = response.Messages |
111 | | - .SelectMany(x => x.Contents) |
112 | | - .Where(c => c.RawRepresentation is ComputerCallResponseItem and not null) |
113 | | - .Select(c => (ComputerCallResponseItem)c.RawRepresentation!); |
114 | | - |
115 | | - ComputerCallResponseItem? firstComputerCall = computerCallResponseItems.FirstOrDefault(); |
116 | | - if (firstComputerCall is null) |
| 71 | + // Check if the agent is asking for confirmation to proceed, and if so, respond affirmatively. |
| 72 | + TextContent? textContent = response.Messages |
| 73 | + .Where(m => m.Role == ChatRole.Assistant) |
| 74 | + .SelectMany(m => m.Contents.OfType<TextContent>()) |
| 75 | + .FirstOrDefault(); |
| 76 | + |
| 77 | + if (textContent?.Text is { } text && ( |
| 78 | + text.Contains("Would you like me") || |
| 79 | + text.Contains("Should I") || |
| 80 | + text.Contains("proceed") || |
| 81 | + text.Contains('?'))) |
117 | 82 | { |
118 | | - Console.WriteLine("No computer call actions found. Ending interaction."); |
119 | | - Console.WriteLine($"Final Response: {response}"); |
120 | | - break; |
| 83 | + response = await agent.RunAsync("Please proceed.", session, runOptions); |
| 84 | + continue; |
121 | 85 | } |
122 | 86 |
|
123 | | - // Process the first computer call response |
124 | | - ComputerCallAction action = firstComputerCall.Action; |
125 | | - string currentCallId = firstComputerCall.CallId; |
126 | | - |
127 | | - Console.WriteLine($"Processing computer call (ID: {currentCallId})"); |
| 87 | + break; |
| 88 | + } |
128 | 89 |
|
129 | | - // Simulate executing the action and taking a screenshot |
130 | | - (SearchState CurrentState, byte[] ImageBytes) screenInfo = ComputerUseUtil.HandleComputerActionAndTakeScreenshot(action, currentState, screenshots); |
131 | | - currentState = screenInfo.CurrentState; |
| 90 | + Console.WriteLine($"[{i + 1}] Action: {computerCall!.Action.Kind}"); |
132 | 91 |
|
133 | | - Console.WriteLine("Sending action result back to agent..."); |
| 92 | + // Simulate the action and get the resulting screenshot. |
| 93 | + (currentState, string fileId) = await ComputerUseUtil.GetScreenshotAsync(computerCall.Action, currentState, screenshots); |
134 | 94 |
|
135 | | - // Send only the computer_call_output — the session carries PreviousResponseId for context continuity. |
136 | | - AIContent callOutput = new() |
137 | | - { |
138 | | - RawRepresentation = new ComputerCallOutputResponseItem( |
139 | | - currentCallId, |
140 | | - output: ComputerCallOutput.CreateScreenshotOutput(new BinaryData(screenInfo.ImageBytes), "image/png")) |
141 | | - }; |
| 95 | + // Send the screenshot back as the computer call output. |
| 96 | + AIContent callOutput = new() |
| 97 | + { |
| 98 | + RawRepresentation = new ComputerCallOutputResponseItem( |
| 99 | + computerCall.CallId, |
| 100 | + output: ComputerCallOutput.CreateScreenshotOutput(screenshotImageFileId: fileId)) |
| 101 | + }; |
142 | 102 |
|
143 | | - response = await agent.RunAsync([new ChatMessage(ChatRole.User, [callOutput])], session: session, options: runOptions); |
144 | | - } |
| 103 | + response = await agent.RunAsync([new ChatMessage(ChatRole.User, [callOutput])], session: session, options: runOptions); |
145 | 104 | } |
146 | 105 | } |
| 106 | +finally |
| 107 | +{ |
| 108 | + await ComputerUseUtil.EnsureDeleteScreenshotAssetsAsync(fileClient, screenshots); |
| 109 | +} |
0 commit comments