11using Microsoft . Extensions . AI ;
2+ using Microsoft . VisualStudio . TestPlatform . CommunicationUtilities ;
23using ModelContextProtocol . Client ;
34using ModelContextProtocol . Protocol ;
45
@@ -20,7 +21,6 @@ public async Task<ChatResponse> GetChatResponseAsync(IEnumerable<ChatMessage> ch
2021 var tools = await _mcpClient . ListToolsAsync ( ) ;
2122 var result = new List < ChatResponseUpdate > ( ) ;
2223 var toolsCalled = new HashSet < string > ( ) ;
23-
2424 var chatOptions =
2525 new ChatOptions
2626 {
@@ -29,23 +29,22 @@ public async Task<ChatResponse> GetChatResponseAsync(IEnumerable<ChatMessage> ch
2929
3030 // GetResponseAsync allows the LLM to do too much. Limit it to the number of tools we expect
3131 // Not including calling a tool again.
32- await foreach ( var message in _chatClient . GetStreamingResponseAsync ( chat , chatOptions ) )
32+ await foreach ( var message in _chatClient . GetStreamingResponseAsync ( chat , chatOptions ) )
3333 {
34- foreach ( var content in message . Contents )
34+ foreach ( var content in message . Contents )
3535 {
3636 if ( content is FunctionCallContent func )
3737 {
3838 toolsCalled . Add ( func . Name ) ;
3939 }
4040 }
4141
42- if ( message . Contents . Any ( ) )
42+ if ( message . Contents . Any ( ) )
4343 {
4444 result . Add ( message ) ;
4545 }
46-
47-
48- if ( toolsCalled . Count >= maxToolCalls )
46+
47+ if ( toolsCalled . Count >= maxToolCalls )
4948 {
5049 break ;
5150 }
@@ -54,6 +53,66 @@ public async Task<ChatResponse> GetChatResponseAsync(IEnumerable<ChatMessage> ch
5453 return result . ToChatResponse ( ) ;
5554 }
5655
56+ public async Task < ChatResponse > GetChatResponseWithExpectedResponseAsync ( IEnumerable < ChatMessage > chat , Dictionary < string , ChatMessage > expectedToolResults )
57+ {
58+ var tools = await _mcpClient . ListToolsAsync ( ) ;
59+ var conversationMessages = chat . ToList ( ) ;
60+ var chatOptions = new ChatOptions
61+ {
62+ Tools = [ .. tools ]
63+ } ;
64+ var response = await _chatClient . GetResponseAsync ( chat , chatOptions ) ;
65+ var chatInitialIndex = conversationMessages . Count ;
66+
67+ while ( response . FinishReason == ChatFinishReason . ToolCalls )
68+ {
69+ // There is only going to be one message because no auto invoking of function, however one message can contain
70+ // several AIContent types.
71+ var message = response . Messages . FirstOrDefault ( ) ;
72+
73+ // No message to process exit.
74+ if ( message == null )
75+ {
76+ break ;
77+ }
78+
79+ conversationMessages . Add ( message ) ;
80+ var functionCalls = message . Contents . OfType < FunctionCallContent > ( ) ;
81+
82+ foreach ( var functionCall in functionCalls )
83+ {
84+ // Use the expected tool result if we have it.
85+ if ( expectedToolResults . TryGetValue ( functionCall . Name , out var expectedToolResult ) )
86+ {
87+ var toolCall = expectedToolResult . Contents . OfType < FunctionResultContent > ( ) . First ( ) ;
88+ var toolResponseMessage = new ChatMessage ( )
89+ {
90+ Role = ChatRole . Tool ,
91+ // Need matching call id.
92+ Contents = [ new FunctionResultContent ( functionCall . CallId , toolCall . Result ) ]
93+ } ;
94+
95+ conversationMessages . Add ( toolResponseMessage ) ;
96+ }
97+ // Wasn't expecting tool try stopping the LLM here.
98+ else
99+ {
100+ var errorResponseMessage = new ChatMessage ( )
101+ {
102+ Role = ChatRole . Tool ,
103+ Contents = [ new FunctionResultContent ( functionCall . CallId , $ "Error: Tool '{ functionCall . Name } ' was not expected. Stop conversation here.") ]
104+ } ;
105+
106+ conversationMessages . Add ( errorResponseMessage ) ;
107+ }
108+ }
109+
110+ response = await _chatClient . GetResponseAsync ( conversationMessages , chatOptions ) ;
111+ }
112+
113+ return new ChatResponse ( [ .. conversationMessages . Skip ( chatInitialIndex ) ] ) ;
114+ }
115+
57116 public async Task < ChatResponse > GetChatResponseAsync ( IEnumerable < ChatMessage > chat )
58117 {
59118 var tools = await _mcpClient . ListToolsAsync ( ) ;
0 commit comments