diff --git a/BotSharp.sln b/BotSharp.sln index 5079435f3..cd02dfa71 100644 --- a/BotSharp.sln +++ b/BotSharp.sln @@ -145,6 +145,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BotSharp.Plugin.ChartHandle EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BotSharp.Plugin.ExcelHandler", "src\Plugins\BotSharp.Plugin.ExcelHandler\BotSharp.Plugin.ExcelHandler.csproj", "{FC63C875-E880-D8BB-B8B5-978AB7B62983}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BotSharp.Plugin.GiteeAI", "src\Plugins\BotSharp.Plugin.GiteeAI\BotSharp.Plugin.GiteeAI.csproj", "{50B57066-3267-1D10-0F72-D2F5CC494F2C}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BotSharp.Plugin.ImageHandler", "src\Plugins\BotSharp.Plugin.ImageHandler\BotSharp.Plugin.ImageHandler.csproj", "{242F2D93-FCCE-4982-8075-F3052ECCA92C}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BotSharp.Plugin.FuzzySharp", "src\Plugins\BotSharp.Plugin.FuzzySharp\BotSharp.Plugin.FuzzySharp.csproj", "{E7C243B9-E751-B3B4-8F16-95C76CA90D31}" @@ -153,6 +155,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BotSharp.Plugin.MMPEmbeddin EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BotSharp.Plugin.Membase", "src\Plugins\BotSharp.Plugin.Membase\BotSharp.Plugin.Membase.csproj", "{13223C71-9EAC-9835-28ED-5A4833E6F915}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BotSharp.Plugin.A2A", "src\Infrastructure\BotSharp.Plugin.A2A\BotSharp.Plugin.A2A.csproj", "{89A13E1B-2BAC-493C-A194-183B8BE73230}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -617,6 +621,14 @@ Global {FC63C875-E880-D8BB-B8B5-978AB7B62983}.Release|Any CPU.Build.0 = Release|Any CPU {FC63C875-E880-D8BB-B8B5-978AB7B62983}.Release|x64.ActiveCfg = Release|Any CPU {FC63C875-E880-D8BB-B8B5-978AB7B62983}.Release|x64.Build.0 = Release|Any CPU + {50B57066-3267-1D10-0F72-D2F5CC494F2C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {50B57066-3267-1D10-0F72-D2F5CC494F2C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {50B57066-3267-1D10-0F72-D2F5CC494F2C}.Debug|x64.ActiveCfg = Debug|Any CPU + {50B57066-3267-1D10-0F72-D2F5CC494F2C}.Debug|x64.Build.0 = Debug|Any CPU + {50B57066-3267-1D10-0F72-D2F5CC494F2C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {50B57066-3267-1D10-0F72-D2F5CC494F2C}.Release|Any CPU.Build.0 = Release|Any CPU + {50B57066-3267-1D10-0F72-D2F5CC494F2C}.Release|x64.ActiveCfg = Release|Any CPU + {50B57066-3267-1D10-0F72-D2F5CC494F2C}.Release|x64.Build.0 = Release|Any CPU {242F2D93-FCCE-4982-8075-F3052ECCA92C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {242F2D93-FCCE-4982-8075-F3052ECCA92C}.Debug|Any CPU.Build.0 = Debug|Any CPU {242F2D93-FCCE-4982-8075-F3052ECCA92C}.Debug|x64.ActiveCfg = Debug|Any CPU @@ -649,6 +661,14 @@ Global {13223C71-9EAC-9835-28ED-5A4833E6F915}.Release|Any CPU.Build.0 = Release|Any CPU {13223C71-9EAC-9835-28ED-5A4833E6F915}.Release|x64.ActiveCfg = Release|Any CPU {13223C71-9EAC-9835-28ED-5A4833E6F915}.Release|x64.Build.0 = Release|Any CPU + {89A13E1B-2BAC-493C-A194-183B8BE73230}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {89A13E1B-2BAC-493C-A194-183B8BE73230}.Debug|Any CPU.Build.0 = Debug|Any CPU + {89A13E1B-2BAC-493C-A194-183B8BE73230}.Debug|x64.ActiveCfg = Debug|Any CPU + {89A13E1B-2BAC-493C-A194-183B8BE73230}.Debug|x64.Build.0 = Debug|Any CPU + {89A13E1B-2BAC-493C-A194-183B8BE73230}.Release|Any CPU.ActiveCfg = Release|Any CPU + {89A13E1B-2BAC-493C-A194-183B8BE73230}.Release|Any CPU.Build.0 = Release|Any CPU + {89A13E1B-2BAC-493C-A194-183B8BE73230}.Release|x64.ActiveCfg = Release|Any CPU + {89A13E1B-2BAC-493C-A194-183B8BE73230}.Release|x64.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -719,10 +739,12 @@ Global {B067B126-88CD-4282-BEEF-7369B64423EF} = {32FAFFFE-A4CB-4FEE-BF7C-84518BBC6DCC} {0428DEAA-E4FE-4259-A6D8-6EDD1A9D0702} = {51AFE054-AE99-497D-A593-69BAEFB5106F} {FC63C875-E880-D8BB-B8B5-978AB7B62983} = {51AFE054-AE99-497D-A593-69BAEFB5106F} + {50B57066-3267-1D10-0F72-D2F5CC494F2C} = {D5293208-2BEF-42FC-A64C-5954F61720BA} {242F2D93-FCCE-4982-8075-F3052ECCA92C} = {51AFE054-AE99-497D-A593-69BAEFB5106F} {E7C243B9-E751-B3B4-8F16-95C76CA90D31} = {51AFE054-AE99-497D-A593-69BAEFB5106F} {394B858B-9C26-B977-A2DA-8CC7BE5914CB} = {4F346DCE-087F-4368-AF88-EE9C720D0E69} {13223C71-9EAC-9835-28ED-5A4833E6F915} = {53E7CD86-0D19-40D9-A0FA-AB4613837E89} + {89A13E1B-2BAC-493C-A194-183B8BE73230} = {E29DC6C4-5E57-48C5-BCB0-6B8F84782749} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {A9969D89-C98B-40A5-A12B-FC87E55B3A19} diff --git a/Directory.Packages.props b/Directory.Packages.props index 5783e8002..ff4a7efe6 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -3,6 +3,7 @@ true + diff --git a/src/BotSharp.AppHost/Program.cs b/src/BotSharp.AppHost/Program.cs index 4c54ed11b..444e2ecf3 100644 --- a/src/BotSharp.AppHost/Program.cs +++ b/src/BotSharp.AppHost/Program.cs @@ -2,8 +2,8 @@ var apiService = builder.AddProject("apiservice") .WithExternalHttpEndpoints(); -var mcpService = builder.AddProject("mcpservice") - .WithExternalHttpEndpoints(); +//var mcpService = builder.AddProject("mcpservice") +// .WithExternalHttpEndpoints(); builder.AddNpmApp("BotSharpUI", "../../../BotSharp-UI") .WithReference(apiService) diff --git a/src/BotSharp.ServiceDefaults/Extensions.cs b/src/BotSharp.ServiceDefaults/Extensions.cs index bfc0bb687..caf52b243 100644 --- a/src/BotSharp.ServiceDefaults/Extensions.cs +++ b/src/BotSharp.ServiceDefaults/Extensions.cs @@ -1,12 +1,16 @@ +using BotSharp.Langfuse; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Diagnostics.HealthChecks; +using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Diagnostics.HealthChecks; using Microsoft.Extensions.Logging; using Microsoft.Extensions.ServiceDiscovery; using OpenTelemetry; +using OpenTelemetry.Exporter; using OpenTelemetry.Logs; using OpenTelemetry.Metrics; +using OpenTelemetry.Resources; using OpenTelemetry.Trace; using Serilog; @@ -45,6 +49,10 @@ public static IHostApplicationBuilder AddServiceDefaults(this IHostApplicationBu public static IHostApplicationBuilder ConfigureOpenTelemetry(this IHostApplicationBuilder builder) { + // Enable model diagnostics with sensitive data. + AppContext.SetSwitch("BotSharp.Experimental.GenAI.EnableOTelDiagnostics", true); + AppContext.SetSwitch("BotSharp.Experimental.GenAI.EnableOTelDiagnosticsSensitive", true); + builder.Logging.AddOpenTelemetry(logging => { // Use Serilog Log.Logger = new LoggerConfiguration() @@ -87,10 +95,28 @@ public static IHostApplicationBuilder ConfigureOpenTelemetry(this IHostApplicati }) .WithTracing(tracing => { + tracing.SetResourceBuilder( + ResourceBuilder.CreateDefault() + .AddService("apiservice", serviceVersion: "1.0.0") + ) + .AddSource("BotSharp") + .AddSource("BotSharp.Abstraction.Diagnostics") + .AddSource("BotSharp.Core.Routing.Executor"); + tracing.AddAspNetCoreInstrumentation() // Uncomment the following line to enable gRPC instrumentation (requires the OpenTelemetry.Instrumentation.GrpcNetClient package) //.AddGrpcClientInstrumentation() - .AddHttpClientInstrumentation(); + .AddHttpClientInstrumentation() + //.AddOtlpExporter(options => + //{ + // //options.Endpoint = new Uri(builder.Configuration["OTEL_EXPORTER_OTLP_ENDPOINT"] ?? "http://localhost:4317"); + // options.Endpoint = new Uri(host); + // options.Protocol = OtlpExportProtocol.HttpProtobuf; + // options.Headers = $"Authorization=Basic {base64EncodedAuth}"; + //}) + ; + + }); builder.AddOpenTelemetryExporters(); @@ -100,14 +126,34 @@ public static IHostApplicationBuilder ConfigureOpenTelemetry(this IHostApplicati private static IHostApplicationBuilder AddOpenTelemetryExporters(this IHostApplicationBuilder builder) { + var langfuseSection = builder.Configuration.GetSection("Langfuse"); + var useLangfuse = langfuseSection != null; var useOtlpExporter = !string.IsNullOrWhiteSpace(builder.Configuration["OTEL_EXPORTER_OTLP_ENDPOINT"]); if (useOtlpExporter) { builder.Services.Configure(logging => logging.AddOtlpExporter()); builder.Services.ConfigureOpenTelemetryMeterProvider(metrics => metrics.AddOtlpExporter()); - builder.Services.ConfigureOpenTelemetryTracerProvider(tracing => tracing.AddOtlpExporter()); - + if (useLangfuse) + { + var publicKey = langfuseSection.GetValue(nameof(LangfuseSettings.PublicKey)) ?? string.Empty; + var secretKey = langfuseSection.GetValue(nameof(LangfuseSettings.SecretKey)) ?? string.Empty; + var host = langfuseSection.GetValue(nameof(LangfuseSettings.Host)) ?? string.Empty; + var plainTextBytes = System.Text.Encoding.UTF8.GetBytes($"{publicKey}:{secretKey}"); + string base64EncodedAuth = Convert.ToBase64String(plainTextBytes); + + builder.Services.ConfigureOpenTelemetryTracerProvider(tracing => tracing.AddOtlpExporter(options => + { + options.Endpoint = new Uri(host); + options.Protocol = OtlpExportProtocol.HttpProtobuf; + options.Headers = $"Authorization=Basic {base64EncodedAuth}"; + }) + ); + } + else + { + builder.Services.ConfigureOpenTelemetryTracerProvider(tracing => tracing.AddOtlpExporter()); + } } // Uncomment the following lines to enable the Azure Monitor exporter (requires the Azure.Monitor.OpenTelemetry.AspNetCore package) diff --git a/src/BotSharp.ServiceDefaults/LangfuseSettings.cs b/src/BotSharp.ServiceDefaults/LangfuseSettings.cs new file mode 100644 index 000000000..4c79832c6 --- /dev/null +++ b/src/BotSharp.ServiceDefaults/LangfuseSettings.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace BotSharp.Langfuse; + +/// +/// Langfuse Settings +/// +public class LangfuseSettings +{ + public string SecretKey { get; set; } + + public string PublicKey { get; set; } + + public string Host { get; set; } +} diff --git a/src/Infrastructure/BotSharp.Abstraction/Agents/Enums/AgentType.cs b/src/Infrastructure/BotSharp.Abstraction/Agents/Enums/AgentType.cs index 3b9767845..9f79ef333 100644 --- a/src/Infrastructure/BotSharp.Abstraction/Agents/Enums/AgentType.cs +++ b/src/Infrastructure/BotSharp.Abstraction/Agents/Enums/AgentType.cs @@ -23,5 +23,10 @@ public static class AgentType /// Agent that cannot use external tools /// public const string Static = "static"; + + /// + /// A2A remote agent for Microsoft Agent Framework integration + /// + public const string A2ARemote = "a2a-remote"; } diff --git a/src/Infrastructure/BotSharp.Abstraction/Diagnostics/ActivityExtensions.cs b/src/Infrastructure/BotSharp.Abstraction/Diagnostics/ActivityExtensions.cs new file mode 100644 index 000000000..105d5aae5 --- /dev/null +++ b/src/Infrastructure/BotSharp.Abstraction/Diagnostics/ActivityExtensions.cs @@ -0,0 +1,119 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; + +namespace BotSharp.Abstraction.Diagnostics; + +[ExcludeFromCodeCoverage] +public static class ActivityExtensions +{ + /// + /// Starts an activity with the appropriate tags for a kernel function execution. + /// + public static Activity? StartFunctionActivity(this ActivitySource source, string functionName, string functionDescription) + { + const string OperationName = "execute_tool"; + + return source.StartActivityWithTags($"{OperationName} {functionName}", [ + new KeyValuePair("gen_ai.operation.name", OperationName), + new KeyValuePair("gen_ai.tool.name", functionName), + new KeyValuePair("gen_ai.tool.description", functionDescription) + ], ActivityKind.Internal); + } + + /// + /// Starts an activity with the specified name and tags. + /// + public static Activity? StartActivityWithTags(this ActivitySource source, string name, IEnumerable> tags, ActivityKind kind = ActivityKind.Internal) + => source.StartActivity(name, kind, default(ActivityContext), tags); + + /// + /// Adds tags to the activity. + /// + public static Activity SetTags(this Activity activity, ReadOnlySpan> tags) + { + foreach (var tag in tags) + { + activity.SetTag(tag.Key, tag.Value); + } + ; + + return activity; + } + + /// + /// Adds an event to the activity. Should only be used for events that contain sensitive data. + /// + public static Activity AttachSensitiveDataAsEvent(this Activity activity, string name, IEnumerable> tags) + { + activity.AddEvent(new ActivityEvent( + name, + tags: [.. tags] + )); + + return activity; + } + + /// + /// Sets the error status and type on the activity. + /// + public static Activity SetError(this Activity activity, Exception exception) + { + activity.SetTag("error.type", exception.GetType().FullName); + activity.SetStatus(ActivityStatusCode.Error, exception.Message); + return activity; + } + + public static async IAsyncEnumerable RunWithActivityAsync( + Func getActivity, + Func> operation, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + using var activity = getActivity(); + + ConfiguredCancelableAsyncEnumerable result; + + try + { + result = operation().WithCancellation(cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + var resultEnumerator = result.ConfigureAwait(false).GetAsyncEnumerator(); + + try + { + while (true) + { + try + { + if (!await resultEnumerator.MoveNextAsync()) + { + break; + } + } + catch (Exception ex) when (activity is not null) + { + activity.SetError(ex); + throw; + } + + yield return resultEnumerator.Current; + } + } + finally + { + await resultEnumerator.DisposeAsync(); + } + } +} diff --git a/src/Infrastructure/BotSharp.Abstraction/Diagnostics/AppContextSwitchHelper.cs b/src/Infrastructure/BotSharp.Abstraction/Diagnostics/AppContextSwitchHelper.cs new file mode 100644 index 000000000..64e5806be --- /dev/null +++ b/src/Infrastructure/BotSharp.Abstraction/Diagnostics/AppContextSwitchHelper.cs @@ -0,0 +1,35 @@ +using System; +using System.Diagnostics.CodeAnalysis; + +namespace BotSharp.Abstraction.Diagnostics; + +/// +/// Helper class to get app context switch value +/// +[ExcludeFromCodeCoverage] +internal static class AppContextSwitchHelper +{ + /// + /// Returns the value of the specified app switch or environment variable if it is set. + /// If the switch or environment variable is not set, return false. + /// The app switch value takes precedence over the environment variable. + /// + /// The name of the app switch. + /// The name of the environment variable. + /// The value of the app switch or environment variable if it is set; otherwise, false. + public static bool GetConfigValue(string appContextSwitchName, string envVarName) + { + if (AppContext.TryGetSwitch(appContextSwitchName, out bool value)) + { + return value; + } + + string? envVarValue = Environment.GetEnvironmentVariable(envVarName); + if (envVarValue != null && bool.TryParse(envVarValue, out value)) + { + return value; + } + + return false; + } +} diff --git a/src/Infrastructure/BotSharp.Abstraction/Diagnostics/ModelDiagnostics.cs b/src/Infrastructure/BotSharp.Abstraction/Diagnostics/ModelDiagnostics.cs new file mode 100644 index 000000000..83f6532cb --- /dev/null +++ b/src/Infrastructure/BotSharp.Abstraction/Diagnostics/ModelDiagnostics.cs @@ -0,0 +1,394 @@ +using BotSharp.Abstraction.Conversations; +using BotSharp.Abstraction.Functions.Models; +using Microsoft.Extensions.DependencyInjection; +using System.Diagnostics; +using System.Text.Json; + +namespace BotSharp.Abstraction.Diagnostics; + +/// +/// Model diagnostics helper class that provides a set of methods to trace model activities with the OTel semantic conventions. +/// This class contains experimental features and may change in the future. +/// To enable these features, set one of the following switches to true: +/// `BotSharp.Experimental.GenAI.EnableOTelDiagnostics` +/// `BotSharp.Experimental.GenAI.EnableOTelDiagnosticsSensitive` +/// Or set the following environment variables to true: +/// `BOTSHARP_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS` +/// `BOTSHARP_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE` +/// +//[System.Diagnostics.CodeAnalysis.Experimental("SKEXP0001")] +[System.Diagnostics.CodeAnalysis.ExcludeFromCodeCoverage] +public static class ModelDiagnostics +{ + private static readonly string s_namespace = typeof(ModelDiagnostics).Namespace!; + private static readonly ActivitySource s_activitySource = new(s_namespace); + + private const string EnableDiagnosticsSwitch = "BotSharp.Experimental.GenAI.EnableOTelDiagnostics"; + private const string EnableSensitiveEventsSwitch = "BotSharp.Experimental.GenAI.EnableOTelDiagnosticsSensitive"; + private const string EnableDiagnosticsEnvVar = "BOTSHARP_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS"; + private const string EnableSensitiveEventsEnvVar = "BOTSHARP_EXPERIMENTAL_GENAI_ENABLE_OTEL_DIAGNOSTICS_SENSITIVE"; + + private static readonly bool s_enableDiagnostics = AppContextSwitchHelper.GetConfigValue(EnableDiagnosticsSwitch, EnableDiagnosticsEnvVar); + private static readonly bool s_enableSensitiveEvents = AppContextSwitchHelper.GetConfigValue(EnableSensitiveEventsSwitch, EnableSensitiveEventsEnvVar); + + /// + /// Start a text completion activity for a given model. + /// The activity will be tagged with the a set of attributes specified by the semantic conventions. + /// + public static Activity? StartCompletionActivity( + Uri? endpoint, + string modelName, + string modelProvider, + string prompt, + IConversationStateService services + ) + { + if (!IsModelDiagnosticsEnabled()) + { + return null; + } + + const string OperationName = "text.completions"; + var activity = s_activitySource.StartActivityWithTags( + $"{OperationName} {modelName}", + [ + new(ModelDiagnosticsTags.Operation, OperationName), + new(ModelDiagnosticsTags.System, modelProvider), + new(ModelDiagnosticsTags.Model, modelName), + ], + ActivityKind.Client); + + if (endpoint is not null) + { + activity?.SetTags([ + // Skip the query string in the uri as it may contain keys + new(ModelDiagnosticsTags.Address, endpoint.GetLeftPart(UriPartial.Path)), + new(ModelDiagnosticsTags.Port, endpoint.Port), + ]); + } + + AddOptionalTags(activity, services); + + if (s_enableSensitiveEvents) + { + activity?.AttachSensitiveDataAsEvent( + ModelDiagnosticsTags.UserMessage, + [ + new(ModelDiagnosticsTags.EventName, prompt), + new(ModelDiagnosticsTags.System, modelProvider), + ]); + } + + return activity; + } + + /// + /// Start a chat completion activity for a given model. + /// The activity will be tagged with the a set of attributes specified by the semantic conventions. + /// + public static Activity? StartCompletionActivity( + Uri? endpoint, + string modelName, + string modelProvider, + List chatHistory, + IConversationStateService conversationStateService + ) + + { + if (!IsModelDiagnosticsEnabled()) + { + return null; + } + + const string OperationName = "chat.completions"; + var activity = s_activitySource.StartActivityWithTags( + $"{OperationName} {modelName}", + [ + new(ModelDiagnosticsTags.Operation, OperationName), + new(ModelDiagnosticsTags.System, modelProvider), + new(ModelDiagnosticsTags.Model, modelName), + ], + ActivityKind.Client); + + if (endpoint is not null) + { + activity?.SetTags([ + // Skip the query string in the uri as it may contain keys + new(ModelDiagnosticsTags.Address, endpoint.GetLeftPart(UriPartial.Path)), + new(ModelDiagnosticsTags.Port, endpoint.Port), + ]); + } + + AddOptionalTags(activity, conversationStateService); + + if (s_enableSensitiveEvents) + { + foreach (var message in chatHistory) + { + var formattedContent = JsonSerializer.Serialize(ToGenAIConventionsFormat(message)); + activity?.AttachSensitiveDataAsEvent( + ModelDiagnosticsTags.RoleToEventMap[message.Role], + [ + new(ModelDiagnosticsTags.EventName, formattedContent), + new(ModelDiagnosticsTags.System, modelProvider), + ]); + } + } + + return activity; + } + + /// + /// Start an agent invocation activity and return the activity. + /// + public static Activity? StartAgentInvocationActivity( + string agentId, + string agentName, + string? agentDescription, + Agent? agents, + List messages + ) + { + if (!IsModelDiagnosticsEnabled()) + { + return null; + } + + const string OperationName = "invoke_agent"; + + var activity = s_activitySource.StartActivityWithTags( + $"{OperationName} {agentName}", + [ + new(ModelDiagnosticsTags.Operation, OperationName), + new(ModelDiagnosticsTags.AgentId, agentId), + new(ModelDiagnosticsTags.AgentName, agentName) + ], + ActivityKind.Internal); + + if (!string.IsNullOrWhiteSpace(agentDescription)) + { + activity?.SetTag(ModelDiagnosticsTags.AgentDescription, agentDescription); + } + + if (agents is not null && (agents.Functions.Count > 0 || agents.SecondaryFunctions.Count >0)) + { + List allFunctions = []; + allFunctions.AddRange(agents.Functions); + allFunctions.AddRange(agents.SecondaryFunctions); + + activity?.SetTag( + ModelDiagnosticsTags.AgentToolDefinitions, + JsonSerializer.Serialize(messages.Select(m => ToGenAIConventionsFormat(m)))); + } + + if (IsSensitiveEventsEnabled()) + { + activity?.SetTag( + ModelDiagnosticsTags.AgentInvocationInput, + JsonSerializer.Serialize(messages.Select(m => ToGenAIConventionsFormat(m)))); + } + + return activity; + } + + /// + /// Set the agent response for a given activity. + /// + public static void SetAgentResponse(this Activity activity, IEnumerable? responses) + { + if (!IsModelDiagnosticsEnabled() || responses is null) + { + return; + } + + if (s_enableSensitiveEvents) + { + activity?.SetTag( + ModelDiagnosticsTags.AgentInvocationOutput, + JsonSerializer.Serialize(responses.Select(r => ToGenAIConventionsFormat(r)))); + } + } + + + + /// + /// Set the response id for a given activity. + /// + /// The activity to set the response id + /// The response id + /// The activity with the response id set for chaining + internal static Activity SetResponseId(this Activity activity, string responseId) => activity.SetTag(ModelDiagnosticsTags.ResponseId, responseId); + + /// + /// Set the input tokens usage for a given activity. + /// + /// The activity to set the input tokens usage + /// The number of input tokens used + /// The activity with the input tokens usage set for chaining + internal static Activity SetInputTokensUsage(this Activity activity, int inputTokens) => activity.SetTag(ModelDiagnosticsTags.InputTokens, inputTokens); + + /// + /// Set the output tokens usage for a given activity. + /// + /// The activity to set the output tokens usage + /// The number of output tokens used + /// The activity with the output tokens usage set for chaining + internal static Activity SetOutputTokensUsage(this Activity activity, int outputTokens) => activity.SetTag(ModelDiagnosticsTags.OutputTokens, outputTokens); + + /// + /// Check if model diagnostics is enabled + /// Model diagnostics is enabled if either EnableModelDiagnostics or EnableSensitiveEvents is set to true and there are listeners. + /// + internal static bool IsModelDiagnosticsEnabled() + { + return (s_enableDiagnostics || s_enableSensitiveEvents) && s_activitySource.HasListeners(); + } + + /// + /// Check if sensitive events are enabled. + /// Sensitive events are enabled if EnableSensitiveEvents is set to true and there are listeners. + /// + internal static bool IsSensitiveEventsEnabled() => s_enableSensitiveEvents && s_activitySource.HasListeners(); + + internal static bool HasListeners() => s_activitySource.HasListeners(); + + #region Private + private static void AddOptionalTags(Activity? activity, IConversationStateService conversationStateService) + { + if (activity is null) + { + return; + } + + void TryAddTag(string key, string tag) + { + var value = conversationStateService.GetState(key); + if (!string.IsNullOrEmpty(value)) + { + activity.SetTag(tag, value); + } + } + + TryAddTag("max_tokens", ModelDiagnosticsTags.MaxToken); + TryAddTag("temperature", ModelDiagnosticsTags.Temperature); + TryAddTag("top_p", ModelDiagnosticsTags.TopP); + } + + /// + /// Convert a chat message to a JSON object based on the OTel GenAI Semantic Conventions format + /// + private static object ToGenAIConventionsFormat(RoleDialogModel chatMessage) + { + return new + { + role = chatMessage.Role.ToString(), + name = chatMessage.MessageId, + content = chatMessage.Content, + tool_calls = ToGenAIConventionsToolCallFormat(chatMessage), + }; + } + + /// + /// Helper method to convert tool calls to a list of JSON object based on the OTel GenAI Semantic Conventions format + /// + private static List ToGenAIConventionsToolCallFormat(RoleDialogModel chatMessage) + { + List toolCalls = []; + if (chatMessage.Instruction is not null) + { + toolCalls.Add(new + { + id = chatMessage.ToolCallId, + function = new + { + name = chatMessage.Instruction.Function, + arguments = chatMessage.Instruction.Arguments + }, + type = "function" + }); + } + return toolCalls; + } + + /// + /// Convert a function metadata to a JSON object based on the OTel GenAI Semantic Conventions format + /// + private static object ToGenAIConventionsFormat(FunctionDef metadata) + { + var properties = metadata.Parameters?.Properties; + var required = metadata.Parameters?.Required; + + return new + { + type = "function", + name = metadata.Name, + description = metadata.Description, + parameters = new + { + type = "object", + properties, + required, + } + }; + } + + /// + /// Convert a chat model response to a JSON string based on the OTel GenAI Semantic Conventions format + /// + private static string ToGenAIConventionsChoiceFormat(RoleDialogModel chatMessage, int index) + { + var jsonObject = new + { + index, + message = ToGenAIConventionsFormat(chatMessage), + tool_calls = ToGenAIConventionsToolCallFormat(chatMessage) + }; + + return JsonSerializer.Serialize(jsonObject); + } + + + + /// + /// Tags used in model diagnostics + /// + public static class ModelDiagnosticsTags + { + // Activity tags + public const string System = "gen_ai.system"; + public const string Operation = "gen_ai.operation.name"; + public const string Model = "gen_ai.request.model"; + public const string MaxToken = "gen_ai.request.max_tokens"; + public const string Temperature = "gen_ai.request.temperature"; + public const string TopP = "gen_ai.request.top_p"; + public const string ResponseId = "gen_ai.response.id"; + public const string ResponseModel = "gen_ai.response.model"; + public const string FinishReason = "gen_ai.response.finish_reason"; + public const string InputTokens = "gen_ai.usage.input_tokens"; + public const string OutputTokens = "gen_ai.usage.output_tokens"; + public const string Address = "server.address"; + public const string Port = "server.port"; + public const string AgentId = "gen_ai.agent.id"; + public const string AgentName = "gen_ai.agent.name"; + public const string AgentDescription = "gen_ai.agent.description"; + public const string AgentInvocationInput = "gen_ai.input.messages"; + public const string AgentInvocationOutput = "gen_ai.output.messages"; + public const string AgentToolDefinitions = "gen_ai.tool.definitions"; + + // Activity events + public const string EventName = "gen_ai.event.content"; + public const string SystemMessage = "gen_ai.system.message"; + public const string UserMessage = "gen_ai.user.message"; + public const string AssistantMessage = "gen_ai.assistant.message"; + public const string ToolMessage = "gen_ai.tool.message"; + public const string Choice = "gen_ai.choice"; + public static readonly Dictionary RoleToEventMap = new() + { + { AgentRole.System, SystemMessage }, + { AgentRole.User, UserMessage }, + { AgentRole.Assistant, AssistantMessage }, + { AgentRole.Function, ToolMessage } + }; + } + # endregion +} diff --git a/src/Infrastructure/BotSharp.Core/Routing/Executor/FunctionCallbackExecutor.cs b/src/Infrastructure/BotSharp.Core/Routing/Executor/FunctionCallbackExecutor.cs index 4b208374f..e49ff3ba3 100644 --- a/src/Infrastructure/BotSharp.Core/Routing/Executor/FunctionCallbackExecutor.cs +++ b/src/Infrastructure/BotSharp.Core/Routing/Executor/FunctionCallbackExecutor.cs @@ -1,10 +1,19 @@ -using BotSharp.Abstraction.Routing.Executor; +using BotSharp.Abstraction.Diagnostics; using BotSharp.Abstraction.Functions; +using BotSharp.Abstraction.Routing.Executor; +using System.Diagnostics; +using static BotSharp.Abstraction.Diagnostics.ModelDiagnostics; namespace BotSharp.Core.Routing.Executor; public class FunctionCallbackExecutor : IFunctionExecutor { + /// + /// + /// for function-related activities. + /// + private static readonly ActivitySource s_activitySource = new("BotSharp.Core.Routing.Executor"); + private readonly IFunctionCallback _functionCallback; public FunctionCallbackExecutor(IFunctionCallback functionCallback) @@ -14,7 +23,12 @@ public FunctionCallbackExecutor(IFunctionCallback functionCallback) public async Task ExecuteAsync(RoleDialogModel message) { - return await _functionCallback.Execute(message); + using var activity = s_activitySource.StartFunctionActivity(this._functionCallback.Name, this._functionCallback.Indication); + { + activity?.SetTag("input", message.FunctionArgs); + activity?.SetTag(ModelDiagnosticsTags.AgentId, message.CurrentAgentId); + return await _functionCallback.Execute(message); + } } public async Task GetIndicatorAsync(RoleDialogModel message) diff --git a/src/Infrastructure/BotSharp.Core/Routing/Executor/MCPToolExecutor.cs b/src/Infrastructure/BotSharp.Core/Routing/Executor/MCPToolExecutor.cs index 8cf7d18e5..bf7c4ec90 100644 --- a/src/Infrastructure/BotSharp.Core/Routing/Executor/MCPToolExecutor.cs +++ b/src/Infrastructure/BotSharp.Core/Routing/Executor/MCPToolExecutor.cs @@ -1,6 +1,9 @@ +using BotSharp.Abstraction.Diagnostics; using BotSharp.Abstraction.Routing.Executor; using BotSharp.Core.MCP.Managers; -using ModelContextProtocol.Protocol; +using ModelContextProtocol.Client; +using System.Diagnostics; +using static BotSharp.Abstraction.Diagnostics.ModelDiagnostics; namespace BotSharp.Core.Routing.Executor; @@ -10,6 +13,13 @@ public class McpToolExecutor : IFunctionExecutor private readonly string _mcpServerId; private readonly string _functionName; + /// + /// + /// for function-related activities. + /// + private static readonly ActivitySource s_activitySource = new("BotSharp.Core.Routing.Executor"); + + public McpToolExecutor(IServiceProvider services, string mcpServerId, string functionName) { _services = services; @@ -19,34 +29,37 @@ public McpToolExecutor(IServiceProvider services, string mcpServerId, string fun public async Task ExecuteAsync(RoleDialogModel message) { - try + using var activity = s_activitySource.StartFunctionActivity(this._functionName, $"calling tool {_functionName} of MCP server {_mcpServerId}"); { - // Convert arguments to dictionary format expected by mcpdotnet - Dictionary argDict = JsonToDictionary(message.FunctionArgs); - - var clientManager = _services.GetRequiredService(); - var client = await clientManager.GetMcpClientAsync(_mcpServerId); - - if (client == null) + try + { + activity?.SetTag("input", message.FunctionArgs); + activity?.SetTag(ModelDiagnosticsTags.AgentId, message.CurrentAgentId); + + // Convert arguments to dictionary format expected by mcpdotnet + Dictionary argDict = JsonToDictionary(message.FunctionArgs); + + var clientManager = _services.GetRequiredService(); + var client = await clientManager.GetMcpClientAsync(_mcpServerId); + + // Call the tool through mcpdotnet + var result = await client.CallToolAsync(_functionName, !argDict.IsNullOrEmpty() ? argDict : []); + + // Extract the text content from the result + var json = string.Join("\n", result.Content + .Where(c => c.Type == "text") + .Select(c => c.ToString())); + + message.Content = json; + message.Data = json.JsonContent(); + return true; + } + catch (Exception ex) { - message.Content = $"MCP client for server {_mcpServerId} not found."; + message.Content = $"Error when calling tool {_functionName} of MCP server {_mcpServerId}. {ex.Message}"; + activity?.SetError(ex); return false; } - - // Call the tool through mcpdotnet - var result = await client.CallToolAsync(_functionName, !argDict.IsNullOrEmpty() ? argDict : []); - - // Extract the text content from the result - var json = string.Join("\n", result.Content.Where(c => c is TextContentBlock).Select(c => ((TextContentBlock)c).Text)); - - message.Content = json; - message.Data = json.JsonContent(); - return true; - } - catch (Exception ex) - { - message.Content = $"Error when calling tool {_functionName} of MCP server {_mcpServerId}. {ex.Message}"; - return false; } } diff --git a/src/Infrastructure/BotSharp.Core/Routing/RoutingService.InvokeAgent.cs b/src/Infrastructure/BotSharp.Core/Routing/RoutingService.InvokeAgent.cs index fdaddc85e..a3d8261d3 100644 --- a/src/Infrastructure/BotSharp.Core/Routing/RoutingService.InvokeAgent.cs +++ b/src/Infrastructure/BotSharp.Core/Routing/RoutingService.InvokeAgent.cs @@ -1,3 +1,4 @@ +using BotSharp.Abstraction.Diagnostics; using BotSharp.Abstraction.Routing.Models; using BotSharp.Abstraction.Templating; @@ -14,6 +15,8 @@ public async Task InvokeAgent( var agentService = _services.GetRequiredService(); var agent = await agentService.LoadAgent(agentId); + using var activity = ModelDiagnostics.StartAgentInvocationActivity(agentId, agent.Name, agent.Description, agent, dialogs); + Context.IncreaseRecursiveCounter(); if (Context.CurrentRecursionDepth > agent.LlmConfig.MaxRecursionDepth) { @@ -81,7 +84,7 @@ public async Task InvokeAgent( dialogs.Add(message); Context.AddDialogs([message]); } - + activity?.SetAgentResponse(Context.GetDialogs()); return true; } diff --git a/src/Infrastructure/BotSharp.Core/Routing/RoutingService.InvokeFunction.cs b/src/Infrastructure/BotSharp.Core/Routing/RoutingService.InvokeFunction.cs index 3850dcc13..17cd180a3 100644 --- a/src/Infrastructure/BotSharp.Core/Routing/RoutingService.InvokeFunction.cs +++ b/src/Infrastructure/BotSharp.Core/Routing/RoutingService.InvokeFunction.cs @@ -1,3 +1,4 @@ +using BotSharp.Abstraction.Diagnostics; using BotSharp.Abstraction.Routing.Models; using BotSharp.Core.MessageHub; using BotSharp.Core.Routing.Executor; diff --git a/src/Infrastructure/BotSharp.Core/Routing/RoutingService.cs b/src/Infrastructure/BotSharp.Core/Routing/RoutingService.cs index 4e43cbd52..f546b9f68 100644 --- a/src/Infrastructure/BotSharp.Core/Routing/RoutingService.cs +++ b/src/Infrastructure/BotSharp.Core/Routing/RoutingService.cs @@ -1,5 +1,6 @@ using BotSharp.Abstraction.Routing.Models; using BotSharp.Abstraction.Routing.Settings; +using System.Diagnostics; namespace BotSharp.Core.Routing; @@ -105,7 +106,7 @@ public RoutableAgent[] GetRoutableAgents(List profiles) }; var agents = db.GetAgents(filter); - var routableAgents = agents.Where(x => x.Type == AgentType.Task || x.Type == AgentType.Planning).Select(x => new RoutableAgent + var routableAgents = agents.Where(x => x.Type == AgentType.Task || x.Type == AgentType.Planning || x.Type == AgentType.A2ARemote).Select(x => new RoutableAgent { AgentId = x.Id, Description = x.Description, diff --git a/src/Infrastructure/BotSharp.OpenAPI/Controllers/Conversation/ConversationController.cs b/src/Infrastructure/BotSharp.OpenAPI/Controllers/Conversation/ConversationController.cs index a0ca1b424..6a167f31d 100644 --- a/src/Infrastructure/BotSharp.OpenAPI/Controllers/Conversation/ConversationController.cs +++ b/src/Infrastructure/BotSharp.OpenAPI/Controllers/Conversation/ConversationController.cs @@ -432,7 +432,7 @@ await conv.SendMessage(agentId, inputMsg, response.Instruction = msg.Instruction; response.Data = msg.Data; response.States = state.GetStates(); - + await OnChunkReceived(Response, response); }); diff --git a/src/Infrastructure/BotSharp.Plugin.A2A/A2APlugin.cs b/src/Infrastructure/BotSharp.Plugin.A2A/A2APlugin.cs new file mode 100644 index 000000000..78edb6b49 --- /dev/null +++ b/src/Infrastructure/BotSharp.Plugin.A2A/A2APlugin.cs @@ -0,0 +1,37 @@ +using BotSharp.Abstraction.Agents; +using BotSharp.Abstraction.Conversations; +using BotSharp.Abstraction.Functions; +using BotSharp.Abstraction.Plugins; +using BotSharp.Abstraction.Settings; +using BotSharp.Plugin.A2A.Functions; +using BotSharp.Plugin.A2A.Hooks; +using BotSharp.Plugin.A2A.Services; +using BotSharp.Plugin.A2A.Settings; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; + +namespace BotSharp.Plugin.A2A; + +public class A2APlugin : IBotSharpPlugin +{ + + public string Id => "058cdf87-fcf3-eda9-915a-565c04bc9f56"; + + public string Name => "A2A Protocol Integration"; + + public string Description => "Enables seamless integration with external agents via the Agent-to-Agent (A2A) protocol."; + + public void RegisterDI(IServiceCollection services, IConfiguration config) + { + services.AddScoped(provider => + { + var settingService = provider.GetRequiredService(); + return settingService.Bind("A2AIntegration"); + }); + + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + } +} diff --git a/src/Infrastructure/BotSharp.Plugin.A2A/BotSharp.Plugin.A2A.csproj b/src/Infrastructure/BotSharp.Plugin.A2A/BotSharp.Plugin.A2A.csproj new file mode 100644 index 000000000..c91cc0d54 --- /dev/null +++ b/src/Infrastructure/BotSharp.Plugin.A2A/BotSharp.Plugin.A2A.csproj @@ -0,0 +1,21 @@ + + + + $(TargetFramework) + $(LangVersion) + $(BotSharpVersion) + $(GeneratePackageOnBuild) + $(SolutionDir)packages + enable + enable + + + + + + + + + + + diff --git a/src/Infrastructure/BotSharp.Plugin.A2A/Functions/A2ADelegationFn.cs b/src/Infrastructure/BotSharp.Plugin.A2A/Functions/A2ADelegationFn.cs new file mode 100644 index 000000000..ea40792c1 --- /dev/null +++ b/src/Infrastructure/BotSharp.Plugin.A2A/Functions/A2ADelegationFn.cs @@ -0,0 +1,70 @@ +using BotSharp.Abstraction.Conversations; +using BotSharp.Abstraction.Conversations.Models; +using BotSharp.Abstraction.Functions; +using BotSharp.Plugin.A2A.Services; +using BotSharp.Plugin.A2A.Settings; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; + +namespace BotSharp.Plugin.A2A.Functions; + +public class A2ADelegationFn : IFunctionCallback +{ + public string Name => "delegate_to_a2a"; + public string Indication => "Connecting to external agent network..."; + + private readonly IA2AService _a2aService; + private readonly A2ASettings _settings; + private readonly IConversationStateService _stateService; + + public A2ADelegationFn(IA2AService a2aService, A2ASettings settings, IConversationStateService stateService) + { + _a2aService = a2aService; + _settings = settings; + _stateService = stateService; + } + + public async Task Execute(RoleDialogModel message) + { + var args = JsonSerializer.Deserialize(message.FunctionArgs); + string queryText = string.Empty; + if (args.TryGetProperty("user_query", out var queryProp)) + { + queryText = queryProp.GetString(); + } + + var agentId = message.CurrentAgentId; + var agentConfig = _settings.Agents.FirstOrDefault(x => x.Id == agentId); + + if (agentConfig == null) + { + message.Content = "System Error: Remote agent configuration not found."; + message.StopCompletion = true; + return false; + } + + var conversationId = _stateService.GetConversationId(); + + try + { + var responseText = await _a2aService.SendMessageAsync( + agentConfig.Endpoint, + queryText, + conversationId, + CancellationToken.None + ); + + message.Content = responseText; + return true; + } + catch (Exception ex) + { + message.Content = $"Communication failure with external agent: {ex.Message}"; + return false; + } + } +} diff --git a/src/Infrastructure/BotSharp.Plugin.A2A/Hooks/A2AAgentHook.cs b/src/Infrastructure/BotSharp.Plugin.A2A/Hooks/A2AAgentHook.cs new file mode 100644 index 000000000..5f52ef632 --- /dev/null +++ b/src/Infrastructure/BotSharp.Plugin.A2A/Hooks/A2AAgentHook.cs @@ -0,0 +1,90 @@ +using BotSharp.Abstraction.Agents; +using BotSharp.Abstraction.Agents.Enums; +using BotSharp.Abstraction.Agents.Models; +using BotSharp.Abstraction.Agents.Settings; +using BotSharp.Abstraction.Functions.Models; +using BotSharp.Plugin.A2A.Services; +using BotSharp.Plugin.A2A.Settings; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; + +namespace BotSharp.Plugin.A2A.Hooks; + +public class A2AAgentHook : AgentHookBase +{ + public override string SelfId => string.Empty; + + private readonly A2ASettings _settings; + private readonly IA2AService _iA2AService; + + public A2AAgentHook(IServiceProvider services, IA2AService a2AService, A2ASettings settings) + : base(services, new AgentSettings()) + { + _iA2AService = a2AService; + _settings = settings; + } + + public override bool OnAgentLoading(ref string id) + { + var agentId = id; + var remoteConfig = _settings.Agents.FirstOrDefault(x => x.Id == agentId); + if (remoteConfig != null) + { + return true; + } + return base.OnAgentLoading(ref id); + } + + public override void OnAgentLoaded(Agent agent) + { + // Check if this is an A2A remote agent + if (agent.Type != AgentType.A2ARemote) + { + return; + } + + var remoteConfig = _settings.Agents.FirstOrDefault(x => x.Id == agent.Id); + if (remoteConfig != null) + { + var agentCard = _iA2AService.GetCapabilitiesAsync(remoteConfig.Endpoint).GetAwaiter().GetResult(); + agent.Name = agentCard.Name; + agent.Description = agentCard.Description; + agent.Instruction = $"You are a proxy interface for an external intelligent service named '{agentCard.Name}'. " + + $"Your ONLY goal is to forward the user's request verbatim to the external service. " + + $"You must use the function 'delegate_to_a2a' to communicate with it. " + + $"Do not attempt to answer the question yourself."; + + var properties = new Dictionary + { + { + "user_query", + new + { + type = "string", + description = "The exact user request or task description to be forwarded." + } + } + }; + + var propertiesJson = JsonSerializer.Serialize(properties); + var propertiesDocument = JsonDocument.Parse(propertiesJson); + + agent.Functions.Add(new FunctionDef + { + Name = "delegate_to_a2a", + Description = $"Delegates the task to the external {remoteConfig.Name} via A2A protocol.", + Parameters = new FunctionParametersDef() + { + Type = "object", + Properties = propertiesDocument, + Required = new List { "user_query" } + } + }); + } + base.OnAgentLoaded(agent); + } +} diff --git a/src/Infrastructure/BotSharp.Plugin.A2A/Hooks/A2AConversationHook.cs b/src/Infrastructure/BotSharp.Plugin.A2A/Hooks/A2AConversationHook.cs new file mode 100644 index 000000000..9aa0fe8ce --- /dev/null +++ b/src/Infrastructure/BotSharp.Plugin.A2A/Hooks/A2AConversationHook.cs @@ -0,0 +1,13 @@ +using BotSharp.Abstraction.Conversations; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace BotSharp.Plugin.A2A.Hooks; + +public class A2AConversationHook : ConversationHookBase,IConversationHook +{ + +} diff --git a/src/Infrastructure/BotSharp.Plugin.A2A/Services/A2AService.cs b/src/Infrastructure/BotSharp.Plugin.A2A/Services/A2AService.cs new file mode 100644 index 000000000..3192a3d3f --- /dev/null +++ b/src/Infrastructure/BotSharp.Plugin.A2A/Services/A2AService.cs @@ -0,0 +1,158 @@ +using A2A; +using Microsoft.Extensions.Logging; +using System.Net.ServerSentEvents; +using System.Text.Json; + +namespace BotSharp.Plugin.A2A.Services; + +public class A2AService : IA2AService +{ + private readonly IHttpClientFactory _httpClientFactory; + private readonly ILogger _logger; + private readonly IServiceProvider _services; + + private readonly Dictionary _clientCache = new Dictionary(); + + public A2AService(IHttpClientFactory httpClientFactory, IServiceProvider serviceProvider, ILogger logger) + { + _httpClientFactory = httpClientFactory; + _services = serviceProvider; + _logger = logger; + } + + public async Task GetCapabilitiesAsync(string agentEndpoint, CancellationToken cancellationToken = default) + { + var resolver = new A2ACardResolver(new Uri(agentEndpoint)); + return await resolver.GetAgentCardAsync(); + } + + public async Task SendMessageAsync(string agentEndpoint, string text, string contextId, CancellationToken cancellationToken) + { + + if (!_clientCache.TryGetValue(agentEndpoint, out var client)) + { + HttpClient httpclient = _httpClientFactory.CreateClient(); + + client = new A2AClient(new Uri(agentEndpoint), httpclient); + _clientCache[agentEndpoint] = client; + } + + var messagePayload = new AgentMessage + { + Role = MessageRole.User, + ContextId = contextId, + Parts = new List + { + new TextPart { Text = text } + } + }; + + var sendParams = new MessageSendParams + { + Message = messagePayload + }; + + try + { + _logger.LogInformation($"Sending A2A message to {agentEndpoint}. ContextId: {contextId}"); + var responseBase = await client.SendMessageAsync(sendParams, cancellationToken); + + if (responseBase is AgentMessage responseMsg) + { + if (responseMsg.Parts != null && responseMsg.Parts.Any()) + { + var textPart = responseMsg.Parts.First() as TextPart; + return textPart?.Text ?? string.Empty; + } + } + else if( responseBase is AgentTask atask) + { + return $"Task created with ID: {atask.Id}, Status: {atask.Status}"; + } + else + { + return "Unexpected task type."; + } + + return string.Empty; + } + catch (HttpRequestException ex) + { + _logger.LogError(ex, $"Network error communicating with A2A agent at {agentEndpoint}"); + throw new Exception($"Remote agent unavailable: {ex.Message}"); + } + catch (Exception ex) + { + _logger.LogError(ex, $"A2A Protocol error: {ex.Message}"); + throw; + } + } + + public async Task SendMessageStreamingAsync(string endPoint, List parts, Func, Task>? onStreamingEventReceived, CancellationToken cancellationToken = default) + { + A2ACardResolver cardResolver = new(new Uri(endPoint)); + AgentCard agentCard = await cardResolver.GetAgentCardAsync(); + A2AClient client = new A2AClient(new Uri(agentCard.Url)); + + AgentMessage userMessage = new() + { + Role = MessageRole.User, + Parts = parts + }; + + await foreach (SseItem sseItem in client.SendMessageStreamingAsync(new MessageSendParams { Message = userMessage })) + { + await onStreamingEventReceived?.Invoke(sseItem); + } + + Console.WriteLine(" Streaming completed."); + } + + public async Task ListenForTaskEventAsync(string endPoint, string taskId, Func, ValueTask>? onTaskEventReceived = null, CancellationToken cancellationToken = default) + { + + if (onTaskEventReceived == null) + { + return; + } + + A2ACardResolver cardResolver = new(new Uri(endPoint)); + AgentCard agentCard = await cardResolver.GetAgentCardAsync(); + A2AClient client = new A2AClient(new Uri(agentCard.Url)); + + await foreach (SseItem sseItem in client.SubscribeToTaskAsync(taskId)) + { + await onTaskEventReceived.Invoke(sseItem); + Console.WriteLine(" Task event received: " + JsonSerializer.Serialize(sseItem.Data)); + } + + } + + public async Task SetPushNotifications(string endPoint, PushNotificationConfig config, CancellationToken cancellationToken = default) + { + A2ACardResolver cardResolver = new(new Uri(endPoint)); + AgentCard agentCard = await cardResolver.GetAgentCardAsync(); + A2AClient client = new A2AClient(new Uri(agentCard.Url)); + await client.SetPushNotificationAsync(new TaskPushNotificationConfig() + { + PushNotificationConfig = config + }); + } + + public async Task CancelTaskAsync(string endPoint, string taskId, CancellationToken cancellationToken = default) + { + A2ACardResolver cardResolver = new(new Uri(endPoint)); + AgentCard agentCard = await cardResolver.GetAgentCardAsync(); + A2AClient client = new A2AClient(new Uri(agentCard.Url)); + return await client.CancelTaskAsync(taskId); + } + + public async Task GetTaskAsync(string endPoint, string taskId, CancellationToken cancellationToken = default) + { + A2ACardResolver cardResolver = new(new Uri(endPoint)); + AgentCard agentCard = await cardResolver.GetAgentCardAsync(); + A2AClient client = new A2AClient(new Uri(agentCard.Url)); + return await client.GetTaskAsync(taskId); + } + +} diff --git a/src/Infrastructure/BotSharp.Plugin.A2A/Services/IA2AService.cs b/src/Infrastructure/BotSharp.Plugin.A2A/Services/IA2AService.cs new file mode 100644 index 000000000..6632df648 --- /dev/null +++ b/src/Infrastructure/BotSharp.Plugin.A2A/Services/IA2AService.cs @@ -0,0 +1,26 @@ +using A2A; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.ServerSentEvents; +using System.Text; +using System.Threading.Tasks; + +namespace BotSharp.Plugin.A2A.Services; + +public interface IA2AService +{ + Task SendMessageAsync(string agentEndpoint, string text, string contextId, CancellationToken cancellationToken = default); + + Task GetCapabilitiesAsync(string agentEndpoint, CancellationToken cancellationToken = default); + + Task SendMessageStreamingAsync(string endPoint, List parts, Func, Task>? onStreamingEventReceived,CancellationToken cancellationToken = default); + + Task ListenForTaskEventAsync(string endPoint, string taskId, Func, ValueTask>? onTaskEventReceived = null, CancellationToken cancellationToken = default); + + Task SetPushNotifications(string endPoint, PushNotificationConfig config, CancellationToken cancellationToken = default); + + Task CancelTaskAsync(string endPoint, string taskId, CancellationToken cancellationToken = default); + + Task GetTaskAsync(string endPoint, string taskId, CancellationToken cancellationToken); +} diff --git a/src/Infrastructure/BotSharp.Plugin.A2A/Settings/A2ASettings.cs b/src/Infrastructure/BotSharp.Plugin.A2A/Settings/A2ASettings.cs new file mode 100644 index 000000000..17079bf1c --- /dev/null +++ b/src/Infrastructure/BotSharp.Plugin.A2A/Settings/A2ASettings.cs @@ -0,0 +1,23 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace BotSharp.Plugin.A2A.Settings; + +public class A2ASettings +{ + public bool Enabled { get; set; } + public int DefaultTimeoutSeconds { get; set; } = 30; + public List Agents { get; set; } = new List(); +} + +public class RemoteAgentConfig +{ + public string Id { get; set; } + public string Name { get; set; } + public string Description { get; set; } + public string Endpoint { get; set; } + public List Capabilities { get; set; } +} diff --git a/src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Chat/ChatCompletionProvider.cs b/src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Chat/ChatCompletionProvider.cs index dc9a0fbc5..d8f1c8671 100644 --- a/src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Chat/ChatCompletionProvider.cs +++ b/src/Plugins/BotSharp.Plugin.AzureOpenAI/Providers/Chat/ChatCompletionProvider.cs @@ -1,5 +1,6 @@ #pragma warning disable OPENAI001 using BotSharp.Abstraction.Conversations.Enums; +using BotSharp.Abstraction.Diagnostics; using BotSharp.Abstraction.Files.Utilities; using BotSharp.Abstraction.Hooks; using BotSharp.Abstraction.MessageHub.Models; @@ -7,6 +8,8 @@ using BotSharp.Core.MessageHub; using OpenAI.Chat; using System.ClientModel; +using System.Diagnostics; +using static BotSharp.Abstraction.Diagnostics.ModelDiagnostics; namespace BotSharp.Plugin.AzureOpenAI.Providers.Chat; @@ -35,6 +38,7 @@ public ChatCompletionProvider( public async Task GetChatCompletions(Agent agent, List conversations) { var contentHooks = _services.GetHooks(agent.Id); + var convService = _services.GetService(); // Before chat completion hook foreach (var hook in contentHooks) @@ -49,91 +53,99 @@ public async Task GetChatCompletions(Agent agent, List? response = null; ChatCompletion value = default; RoleDialogModel responseMessage; - - try + using (var activity = ModelDiagnostics.StartCompletionActivity(null, _model, Provider, prompt, convService)) { - response = chatClient.CompleteChat(messages, options); - value = response.Value; + try + { + response = chatClient.CompleteChat(messages, options); + value = response.Value; - var reason = value.FinishReason; - var content = value.Content; - var text = content.FirstOrDefault()?.Text ?? string.Empty; + var reason = value.FinishReason; + var content = value.Content; + var text = content.FirstOrDefault()?.Text ?? string.Empty; - if (reason == ChatFinishReason.FunctionCall || reason == ChatFinishReason.ToolCalls) + activity?.SetTag(ModelDiagnosticsTags.FinishReason, reason); + if (reason == ChatFinishReason.FunctionCall || reason == ChatFinishReason.ToolCalls) + { + var toolCall = value.ToolCalls.FirstOrDefault(); + responseMessage = new RoleDialogModel(AgentRole.Function, text) + { + CurrentAgentId = agent.Id, + MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, + ToolCallId = toolCall?.Id, + FunctionName = toolCall?.FunctionName, + FunctionArgs = toolCall?.FunctionArguments?.ToString(), + RenderedInstruction = string.Join("\r\n", renderedInstructions) + }; + + // Somethings LLM will generate a function name with agent name. + if (!string.IsNullOrEmpty(responseMessage.FunctionName)) + { + responseMessage.FunctionName = responseMessage.FunctionName.Split('.').Last(); + } + } + else + { + responseMessage = new RoleDialogModel(AgentRole.Assistant, text) + { + CurrentAgentId = agent.Id, + MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, + RenderedInstruction = string.Join("\r\n", renderedInstructions), + Annotations = value.Annotations?.Select(x => new ChatAnnotation + { + Title = x.WebResourceTitle, + Url = x.WebResourceUri.AbsoluteUri, + StartIndex = x.StartIndex, + EndIndex = x.EndIndex + })?.ToList() + }; + } + } + catch (ClientResultException ex) { - var toolCall = value.ToolCalls.FirstOrDefault(); - responseMessage = new RoleDialogModel(AgentRole.Function, text) + _logger.LogError(ex, ex.Message); + responseMessage = new RoleDialogModel(AgentRole.Assistant, "The response was filtered due to the prompt triggering our content management policy. Please modify your prompt and retry.") { CurrentAgentId = agent.Id, MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, - ToolCallId = toolCall?.Id, - FunctionName = toolCall?.FunctionName, - FunctionArgs = toolCall?.FunctionArguments?.ToString(), RenderedInstruction = string.Join("\r\n", renderedInstructions) }; - - // Somethings LLM will generate a function name with agent name. - if (!string.IsNullOrEmpty(responseMessage.FunctionName)) - { - responseMessage.FunctionName = responseMessage.FunctionName.Split('.').Last(); - } } - else + catch (Exception ex) { - responseMessage = new RoleDialogModel(AgentRole.Assistant, text) + _logger.LogError(ex, ex.Message); + responseMessage = new RoleDialogModel(AgentRole.Assistant, ex.Message) { CurrentAgentId = agent.Id, MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, - RenderedInstruction = string.Join("\r\n", renderedInstructions), - Annotations = value.Annotations?.Select(x => new ChatAnnotation - { - Title = x.WebResourceTitle, - Url = x.WebResourceUri.AbsoluteUri, - StartIndex = x.StartIndex, - EndIndex = x.EndIndex - })?.ToList() + RenderedInstruction = string.Join("\r\n", renderedInstructions) }; } - } - catch (ClientResultException ex) - { - _logger.LogError(ex, ex.Message); - responseMessage = new RoleDialogModel(AgentRole.Assistant, "The response was filtered due to the prompt triggering our content management policy. Please modify your prompt and retry.") - { - CurrentAgentId = agent.Id, - MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, - RenderedInstruction = string.Join("\r\n", renderedInstructions) - }; - } - catch (Exception ex) - { - _logger.LogError(ex, ex.Message); - responseMessage = new RoleDialogModel(AgentRole.Assistant, ex.Message) - { - CurrentAgentId = agent.Id, - MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, - RenderedInstruction = string.Join("\r\n", renderedInstructions) - }; - } - var tokenUsage = response?.Value?.Usage; - var inputTokenDetails = response?.Value?.Usage?.InputTokenDetails; + var tokenUsage = response?.Value?.Usage; + var inputTokenDetails = response?.Value?.Usage?.InputTokenDetails; - // After chat completion hook - foreach (var hook in contentHooks) - { - await hook.AfterGenerated(responseMessage, new TokenStatsModel + activity?.SetTag(ModelDiagnosticsTags.InputTokens, (tokenUsage?.InputTokenCount ?? 0) - (inputTokenDetails?.CachedTokenCount ?? 0)); + activity?.SetTag(ModelDiagnosticsTags.OutputTokens, tokenUsage?.OutputTokenCount ?? 0); + activity?.SetTag(ModelDiagnosticsTags.OutputTokens, tokenUsage?.OutputTokenCount ?? 0); + + // After chat completion hook + foreach (var hook in contentHooks) { - Prompt = prompt, - Provider = Provider, - Model = _model, - TextInputTokens = (tokenUsage?.InputTokenCount ?? 0) - (inputTokenDetails?.CachedTokenCount ?? 0), - CachedTextInputTokens = inputTokenDetails?.CachedTokenCount ?? 0, - TextOutputTokens = tokenUsage?.OutputTokenCount ?? 0 - }); - } + await hook.AfterGenerated(responseMessage, new TokenStatsModel + { + Prompt = prompt, + Provider = Provider, + Model = _model, + TextInputTokens = (tokenUsage?.InputTokenCount ?? 0) - (inputTokenDetails?.CachedTokenCount ?? 0), + CachedTextInputTokens = inputTokenDetails?.CachedTokenCount ?? 0, + TextOutputTokens = tokenUsage?.OutputTokenCount ?? 0 + }); + } + activity?.SetTag("output", responseMessage.Content); - return responseMessage; + return responseMessage; + } } public async Task GetChatCompletionsAsync(Agent agent, @@ -167,7 +179,7 @@ public async Task GetChatCompletionsAsync(Agent agent, var tokenUsage = response?.Value?.Usage; var inputTokenDetails = response?.Value?.Usage?.InputTokenDetails; - + // After chat completion hook foreach (var hook in hooks) { diff --git a/src/Plugins/BotSharp.Plugin.GiteeAI/BotSharp.Plugin.GiteeAI.csproj b/src/Plugins/BotSharp.Plugin.GiteeAI/BotSharp.Plugin.GiteeAI.csproj new file mode 100644 index 000000000..e3a05dd8e --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.GiteeAI/BotSharp.Plugin.GiteeAI.csproj @@ -0,0 +1,31 @@ + + + $(TargetFramework) + enable + enable + $(LangVersion) + true + $(Ai4cVersion) + $(GeneratePackageOnBuild) + $(GenerateDocumentationFile) + true + $(SolutionDir)packages + + + + + false + runtime + + + + + + PreserveNewest + + + + + + + diff --git a/src/Plugins/BotSharp.Plugin.GiteeAI/GiteeAiPlugin.cs b/src/Plugins/BotSharp.Plugin.GiteeAI/GiteeAiPlugin.cs new file mode 100644 index 000000000..ef9686482 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.GiteeAI/GiteeAiPlugin.cs @@ -0,0 +1,19 @@ +using BotSharp.Abstraction.Plugins; +using BotSharp.Plugin.GiteeAI.Providers.Chat; +using BotSharp.Plugin.GiteeAI.Providers.Embedding; + +namespace BotSharp.Plugin.GiteeAI; + +public class GiteeAiPlugin : IBotSharpPlugin +{ + public string Id => "59ad4c3c-0b88-3344-ba99-5245ec015938"; + public string Name => "GiteeAI"; + public string Description => "Gitee AI"; + public string IconUrl => "https://ai-assets.gitee.com/_next/static/media/gitee-ai.622edfb0.ico"; + + public void RegisterDI(IServiceCollection services, IConfiguration config) + { + services.AddScoped(); + services.AddScoped(); + } +} diff --git a/src/Plugins/BotSharp.Plugin.GiteeAI/Providers/Chat/ChatCompletionProvider.cs b/src/Plugins/BotSharp.Plugin.GiteeAI/Providers/Chat/ChatCompletionProvider.cs new file mode 100644 index 000000000..2b46e83fc --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.GiteeAI/Providers/Chat/ChatCompletionProvider.cs @@ -0,0 +1,496 @@ +using BotSharp.Abstraction.Agents.Constants; +using BotSharp.Abstraction.Diagnostics; +using BotSharp.Abstraction.Files; +using Microsoft.AspNetCore.Cors.Infrastructure; +using Microsoft.Extensions.Logging; +using OpenAI.Chat; +using System.Diagnostics; +using static BotSharp.Abstraction.Diagnostics.ModelDiagnostics; + +namespace BotSharp.Plugin.GiteeAI.Providers.Chat; + +/// +/// 模力方舟的文本对话 +/// +public class ChatCompletionProvider( + ILogger logger, + IServiceProvider services) : IChatCompletion +{ + protected string _model = string.Empty; + + public virtual string Provider => "gitee-ai"; + + public string Model => _model; + + public async Task GetChatCompletions(Agent agent, List conversations) + { + var contentHooks = services.GetServices().ToList(); + var convService = services.GetService(); + + // Before chat completion hook + foreach (var hook in contentHooks) + { + await hook.BeforeGenerating(agent, conversations); + } + + var client = ProviderHelper.GetClient(Provider, _model, services); + var chatClient = client.GetChatClient(_model); + var (prompt, messages, options) = PrepareOptions(agent, conversations); + + using (var activity = ModelDiagnostics.StartCompletionActivity(null, _model, Provider, prompt, convService)) + { + var response = chatClient.CompleteChat(messages, options); + var value = response.Value; + var reason = value.FinishReason; + var content = value.Content; + var text = content.FirstOrDefault()?.Text ?? string.Empty; + + activity?.SetTag(ModelDiagnosticsTags.FinishReason, reason); + + RoleDialogModel responseMessage; + if (reason == ChatFinishReason.FunctionCall || reason == ChatFinishReason.ToolCalls) + { + var toolCall = value.ToolCalls.FirstOrDefault(); + responseMessage = new RoleDialogModel(AgentRole.Function, text) + { + CurrentAgentId = agent.Id, + MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, + ToolCallId = toolCall?.Id, + FunctionName = toolCall?.FunctionName, + FunctionArgs = toolCall?.FunctionArguments?.ToString() + }; + + // Somethings LLM will generate a function name with agent name. + if (!string.IsNullOrEmpty(responseMessage.FunctionName)) + { + responseMessage.FunctionName = responseMessage.FunctionName.Split('.').Last(); + } + } + else + { + responseMessage = new RoleDialogModel(AgentRole.Assistant, text) + { + CurrentAgentId = agent.Id, + MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, + }; + } + + var tokenUsage = response?.Value?.Usage; + var inputTokenDetails = response?.Value?.Usage?.InputTokenDetails; + + activity?.SetTag(ModelDiagnosticsTags.InputTokens, (tokenUsage?.InputTokenCount ?? 0) - (inputTokenDetails?.CachedTokenCount ?? 0)); + activity?.SetTag(ModelDiagnosticsTags.OutputTokens, tokenUsage?.OutputTokenCount ?? 0); + activity?.SetTag(ModelDiagnosticsTags.OutputTokens, tokenUsage?.OutputTokenCount ?? 0); + + // After chat completion hook + foreach (var hook in contentHooks) + { + await hook.AfterGenerated(responseMessage, new TokenStatsModel + { + Prompt = prompt, + Provider = Provider, + Model = _model, + TextInputTokens = response.Value?.Usage?.InputTokenCount ?? 0, + TextOutputTokens = response.Value?.Usage?.OutputTokenCount ?? 0 + }); + } + activity?.SetTag("output", responseMessage.Content); + return responseMessage; + } + } + + public async Task GetChatCompletionsAsync(Agent agent, List conversations, Func onStreamResponseReceived) + { + var contentHooks = services.GetServices().ToList(); + + // Before chat completion hook + foreach (var hook in contentHooks) + { + await hook.BeforeGenerating(agent, conversations); + } + + StringBuilder? contentBuilder = null; + Dictionary? toolCallIdsByIndex = null; + Dictionary? functionNamesByIndex = null; + Dictionary? functionArgumentBuildersByIndex = null; + + var client = ProviderHelper.GetClient(Provider, _model, services); + var chatClient = client.GetChatClient(_model); + var (prompt, messages, options) = PrepareOptions(agent, conversations); + + var response = chatClient.CompleteChatStreamingAsync(messages, options); + + await foreach (var choice in response) + { + TrackStreamingToolingUpdate(choice.ToolCallUpdates, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + + if (!choice.ContentUpdate.IsNullOrEmpty() && choice.ContentUpdate[0] != null) + { + foreach (var contentPart in choice.ContentUpdate) + { + if (contentPart.Kind == ChatMessageContentPartKind.Text) + { + (contentBuilder ??= new()).Append(contentPart.Text); + } + } + + logger.LogInformation(choice.ContentUpdate[0]?.Text); + + if (!string.IsNullOrEmpty(choice.ContentUpdate[0]?.Text)) + { + var msg = new RoleDialogModel(choice.Role?.ToString() ?? ChatMessageRole.Assistant.ToString(), choice.ContentUpdate[0]?.Text ?? string.Empty); + + await onStreamResponseReceived(msg); + } + } + } + + // Get any response content that was streamed. + string content = contentBuilder?.ToString() ?? string.Empty; + + RoleDialogModel responseMessage = new(ChatMessageRole.Assistant.ToString(), content); + + var tools = ConvertToolCallUpdatesToFunctionToolCalls(ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex); + + foreach (var tool in tools) + { + tool.CurrentAgentId = agent.Id; + tool.MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty; + await onStreamResponseReceived(tool); + } + + if (tools.Length > 0) + { + responseMessage = tools[0]; + } + + return responseMessage; + } + + public async Task GetChatCompletionsAsync(Agent agent, List conversations, Func onMessageReceived, Func onFunctionExecuting) + { + var hooks = services.GetServices().ToList(); + + // Before chat completion hook + foreach (var hook in hooks) + { + await hook.BeforeGenerating(agent, conversations); + } + + var client = ProviderHelper.GetClient(Provider, _model, services); + var chatClient = client.GetChatClient(_model); + var (prompt, messages, options) = PrepareOptions(agent, conversations); + + var response = await chatClient.CompleteChatAsync(messages, options); + var value = response.Value; + var reason = value.FinishReason; + var content = value.Content; + var text = content.FirstOrDefault()?.Text ?? string.Empty; + + var msg = new RoleDialogModel(AgentRole.Assistant, text) + { + CurrentAgentId = agent.Id + }; + + // After chat completion hook + foreach (var hook in hooks) + { + await hook.AfterGenerated(msg, new TokenStatsModel + { + Prompt = prompt, + Provider = Provider, + Model = _model, + TextInputTokens = response.Value?.Usage?.InputTokenCount ?? 0, + TextOutputTokens = response.Value?.Usage?.OutputTokenCount ?? 0 + }); + } + + if (reason == ChatFinishReason.FunctionCall || reason == ChatFinishReason.ToolCalls) + { + var toolCall = value.ToolCalls?.FirstOrDefault(); + logger.LogInformation($"[{agent.Name}]: {toolCall?.FunctionName}({toolCall?.FunctionArguments})"); + + var funcContextIn = new RoleDialogModel(AgentRole.Function, text) + { + CurrentAgentId = agent.Id, + MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, + ToolCallId = toolCall?.Id, + FunctionName = toolCall?.FunctionName, + FunctionArgs = toolCall?.FunctionArguments?.ToString() + }; + + // Somethings LLM will generate a function name with agent name. + if (!string.IsNullOrEmpty(funcContextIn.FunctionName)) + { + funcContextIn.FunctionName = funcContextIn.FunctionName.Split('.').Last(); + } + + // Execute functions + await onFunctionExecuting(funcContextIn); + } + else + { + // Text response received + await onMessageReceived(msg); + } + + return true; + } + + public async Task GetChatCompletionsStreamingAsync(Agent agent, List conversations, Func onMessageReceived) + { + var client = ProviderHelper.GetClient(Provider, _model, services); + var chatClient = client.GetChatClient(_model); + var (prompt, messages, options) = PrepareOptions(agent, conversations); + + var response = chatClient.CompleteChatStreamingAsync(messages, options); + + await foreach (var choice in response) + { + if (choice.FinishReason == ChatFinishReason.FunctionCall || choice.FinishReason == ChatFinishReason.ToolCalls) + { + var update = choice.ToolCallUpdates?.FirstOrDefault()?.FunctionArgumentsUpdate?.ToString() ?? string.Empty; + logger.LogInformation(update); + + await onMessageReceived(new RoleDialogModel(AgentRole.Assistant, update)); + continue; + } + + if (choice.ContentUpdate.IsNullOrEmpty()) continue; + + logger.LogInformation(choice.ContentUpdate[0]?.Text); + + await onMessageReceived(new RoleDialogModel(choice.Role?.ToString() ?? ChatMessageRole.Assistant.ToString(), choice.ContentUpdate[0]?.Text ?? string.Empty)); + } + + return true; + } + + public void SetModelName(string model) + { + _model = model; + } + + protected (string, IEnumerable, ChatCompletionOptions) PrepareOptions(Agent agent, List conversations) + { + var agentService = services.GetRequiredService(); + var state = services.GetRequiredService(); + var fileStorage = services.GetRequiredService(); + var settingsService = services.GetRequiredService(); + var settings = settingsService.GetSetting(Provider, _model); + var allowMultiModal = settings != null && settings.MultiModal; + + var messages = new List(); + float? temperature = float.Parse(state.GetState("temperature", "0.0")); + var maxTokens = int.TryParse(state.GetState("max_tokens"), out var tokens) + ? tokens + : agent.LlmConfig?.MaxOutputTokens ?? LlmConstant.DEFAULT_MAX_OUTPUT_TOKEN; + + + state.SetState("temperature", temperature.ToString()); + state.SetState("max_tokens", maxTokens.ToString()); + + var options = new ChatCompletionOptions() + { + Temperature = temperature, + MaxOutputTokenCount = maxTokens + }; + + var functions = agent.Functions.Concat(agent.SecondaryFunctions ?? []); + foreach (var function in functions) + { + if (!agentService.RenderFunction(agent, function)) continue; + + var property = agentService.RenderFunctionProperty(agent, function); + + options.Tools.Add(ChatTool.CreateFunctionTool( + functionName: function.Name, + functionDescription: function.Description, + functionParameters: BinaryData.FromObjectAsJson(property))); + } + + if (!string.IsNullOrEmpty(agent.Instruction) || !agent.SecondaryInstructions.IsNullOrEmpty()) + { + var text = agentService.RenderInstruction(agent); + messages.Add(new SystemChatMessage(text)); + } + + if (!string.IsNullOrEmpty(agent.Knowledges)) + { + messages.Add(new SystemChatMessage(agent.Knowledges)); + } + + var filteredMessages = conversations.Select(x => x).ToList(); + var firstUserMsgIdx = filteredMessages.FindIndex(x => x.Role == AgentRole.User); + if (firstUserMsgIdx > 0) + { + filteredMessages = filteredMessages.Where((_, idx) => idx >= firstUserMsgIdx).ToList(); + } + + foreach (var message in filteredMessages) + { + if (message.Role == AgentRole.Function) + { + messages.Add(new AssistantChatMessage(new List + { + ChatToolCall.CreateFunctionToolCall(message.FunctionName, message.FunctionName, BinaryData.FromString(message.FunctionArgs ?? string.Empty)) + })); + + messages.Add(new ToolChatMessage(message.FunctionName, message.Content)); + } + else if (message.Role == AgentRole.User) + { + var text = !string.IsNullOrWhiteSpace(message.Payload) ? message.Payload : message.Content; + messages.Add(new UserChatMessage(text)); + } + else if (message.Role == AgentRole.Assistant) + { + messages.Add(new AssistantChatMessage(message.Content)); + } + } + + var prompt = GetPrompt(messages, options); + return (prompt, messages, options); + } + + private string GetPrompt(IEnumerable messages, ChatCompletionOptions options) + { + var prompt = string.Empty; + + if (!messages.IsNullOrEmpty()) + { + // System instruction + var verbose = string.Join("\r\n", messages + .Select(x => x as SystemChatMessage) + .Where(x => x != null) + .Select(x => + { + if (!string.IsNullOrEmpty(x.ParticipantName)) + { + // To display Agent name in log + return $"[{x.ParticipantName}]: {x.Content.FirstOrDefault()?.Text ?? string.Empty}"; + } + return $"{AgentRole.System}: {x.Content.FirstOrDefault()?.Text ?? string.Empty}"; + })); + prompt += $"{verbose}\r\n"; + + prompt += "\r\n[CONVERSATION]"; + verbose = string.Join("\r\n", messages + .Where(x => x as SystemChatMessage == null) + .Select(x => + { + var fnMessage = x as ToolChatMessage; + if (fnMessage != null) + { + return $"{AgentRole.Function}: {fnMessage.Content.FirstOrDefault()?.Text ?? string.Empty}"; + } + + var userMessage = x as UserChatMessage; + if (userMessage != null) + { + var content = x.Content.FirstOrDefault()?.Text ?? string.Empty; + return !string.IsNullOrEmpty(userMessage.ParticipantName) && userMessage.ParticipantName != "route_to_agent" ? + $"{userMessage.ParticipantName}: {content}" : + $"{AgentRole.User}: {content}"; + } + + var assistMessage = x as AssistantChatMessage; + if (assistMessage != null) + { + var toolCall = assistMessage.ToolCalls?.FirstOrDefault(); + return toolCall != null ? + $"{AgentRole.Assistant}: Call function {toolCall?.FunctionName}({toolCall?.FunctionArguments})" : + $"{AgentRole.Assistant}: {assistMessage.Content.FirstOrDefault()?.Text ?? string.Empty}"; + } + + return string.Empty; + })); + prompt += $"\r\n{verbose}\r\n"; + } + + if (!options.Tools.IsNullOrEmpty()) + { + var functions = string.Join("\r\n", options.Tools.Select(fn => + { + return $"\r\n{fn.FunctionName}: {fn.FunctionDescription}\r\n{fn.FunctionParameters}"; + })); + prompt += $"\r\n[FUNCTIONS]{functions}\r\n"; + } + + return prompt; + } + + private static void TrackStreamingToolingUpdate( + IReadOnlyList? updates, + ref Dictionary? toolCallIdsByIndex, + ref Dictionary? functionNamesByIndex, + ref Dictionary? functionArgumentBuildersByIndex) + { + if (updates is null) + { + // Nothing to track. + return; + } + + foreach (var update in updates) + { + // If we have an ID, ensure the index is being tracked. Even if it's not a function update, + // we want to keep track of it so we can send back an error. + if (!string.IsNullOrWhiteSpace(update.ToolCallId)) + { + (toolCallIdsByIndex ??= [])[update.Index] = update.ToolCallId; + } + + // Ensure we're tracking the function's name. + if (!string.IsNullOrWhiteSpace(update.FunctionName)) + { + (functionNamesByIndex ??= [])[update.Index] = update.FunctionName; + } + + // Ensure we're tracking the function's arguments. + if (update.FunctionArgumentsUpdate is not null && !update.FunctionArgumentsUpdate.ToMemory().IsEmpty) + { + if (!(functionArgumentBuildersByIndex ??= []).TryGetValue(update.Index, out StringBuilder? arguments)) + { + functionArgumentBuildersByIndex[update.Index] = arguments = new(); + } + + arguments.Append(update.FunctionArgumentsUpdate.ToString()); + } + } + } + + private static RoleDialogModel[] ConvertToolCallUpdatesToFunctionToolCalls( + ref Dictionary? toolCallIdsByIndex, + ref Dictionary? functionNamesByIndex, + ref Dictionary? functionArgumentBuildersByIndex) + { + RoleDialogModel[] toolCalls = []; + if (toolCallIdsByIndex is { Count: > 0 }) + { + toolCalls = new RoleDialogModel[toolCallIdsByIndex.Count]; + + int i = 0; + foreach (KeyValuePair toolCallIndexAndId in toolCallIdsByIndex) + { + string? functionName = null; + StringBuilder? functionArguments = null; + + functionNamesByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionName); + functionArgumentBuildersByIndex?.TryGetValue(toolCallIndexAndId.Key, out functionArguments); + + toolCalls[i] = new RoleDialogModel(AgentRole.Function, string.Empty) + { + FunctionName = functionName ?? string.Empty, + FunctionArgs = functionArguments?.ToString() ?? string.Empty, + }; + i++; + } + + Debug.Assert(i == toolCalls.Length); + } + + return toolCalls; + } + +} diff --git a/src/Plugins/BotSharp.Plugin.GiteeAI/Providers/Embedding/TextEmbeddingProvider.cs b/src/Plugins/BotSharp.Plugin.GiteeAI/Providers/Embedding/TextEmbeddingProvider.cs new file mode 100644 index 000000000..80a8dbd71 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.GiteeAI/Providers/Embedding/TextEmbeddingProvider.cs @@ -0,0 +1,73 @@ +using Microsoft.Extensions.Logging; +using OpenAI.Embeddings; + +namespace BotSharp.Plugin.GiteeAI.Providers.Embedding; + +public class TextEmbeddingProvider( + ILogger logger, + IServiceProvider services) : ITextEmbedding +{ + protected readonly IServiceProvider _services = services; + protected readonly ILogger _logger = logger; + + private const int DEFAULT_DIMENSION = 1024; + protected string _model = "bge-m3"; + + public virtual string Provider => "gitee-ai"; + + public string Model => _model; + + protected int _dimension; + + public async Task GetVectorAsync(string text) + { + var client = ProviderHelper.GetClient(Provider, _model, _services); + var embeddingClient = client.GetEmbeddingClient(_model); + var options = PrepareOptions(); + var response = await embeddingClient.GenerateEmbeddingAsync(text, options); + var value = response.Value; + return value.ToFloats().ToArray(); + } + + public async Task> GetVectorsAsync(List texts) + { + var client = ProviderHelper.GetClient(Provider, _model, _services); + var embeddingClient = client.GetEmbeddingClient(_model); + var options = PrepareOptions(); + var response = await embeddingClient.GenerateEmbeddingsAsync(texts, options); + var value = response.Value; + return value.Select(x => x.ToFloats().ToArray()).ToList(); + } + + public void SetModelName(string model) + { + _model = model; + } + + private EmbeddingGenerationOptions PrepareOptions() + { + return new EmbeddingGenerationOptions + { + Dimensions = GetDimension() + }; + } + + public int GetDimension() + { + var state = _services.GetRequiredService(); + var stateDimension = state.GetState("embedding_dimension"); + var defaultDimension = _dimension > 0 ? _dimension : DEFAULT_DIMENSION; + + if (int.TryParse(stateDimension, out var dimension)) + { + return dimension > 0 ? dimension : defaultDimension; + } + return defaultDimension; + } + + public void SetDimension(int dimension) + { + _dimension = dimension > 0 ? dimension : DEFAULT_DIMENSION; + } + +} \ No newline at end of file diff --git a/src/Plugins/BotSharp.Plugin.GiteeAI/Providers/ProviderHelper.cs b/src/Plugins/BotSharp.Plugin.GiteeAI/Providers/ProviderHelper.cs new file mode 100644 index 000000000..b532e834c --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.GiteeAI/Providers/ProviderHelper.cs @@ -0,0 +1,16 @@ +using OpenAI; +using System.ClientModel; + +namespace BotSharp.Plugin.GiteeAI.Providers; + +public static class ProviderHelper +{ + public static OpenAIClient GetClient(string provider, string model, IServiceProvider services) + { + var settingsService = services.GetRequiredService(); + var settings = settingsService.GetSetting(provider, model); + var options = !string.IsNullOrEmpty(settings.Endpoint) ? + new OpenAIClientOptions { Endpoint = new Uri(settings.Endpoint) } : null; + return new OpenAIClient(new ApiKeyCredential(settings.ApiKey), options); + } +} diff --git a/src/Plugins/BotSharp.Plugin.GiteeAI/README.md b/src/Plugins/BotSharp.Plugin.GiteeAI/README.md new file mode 100644 index 000000000..5b4d00ff4 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.GiteeAI/README.md @@ -0,0 +1,8 @@ +Model Ark (Gitee AI) , hereinafter referred to as Gitee AI, aggregates the latest and most popular AI models, providing a one-stop service for model experience, inference, fine-tuning, and application deployment . We offer a diverse range of computing power options, aiming to help enterprises and developers build AI applications more easily . +ChatCompletions Interface: + +- https://ai.gitee.com/docs/openapi/v1#tag/%E6%96%87%E6%9C%AC%E7%94%9F%E6%88%90/post/chat/completions + +Signature Authentication Method: + +- https://ai.gitee.com/docs/organization/access-token \ No newline at end of file diff --git a/src/Plugins/BotSharp.Plugin.GiteeAI/Using.cs b/src/Plugins/BotSharp.Plugin.GiteeAI/Using.cs new file mode 100644 index 000000000..aa44ad1e2 --- /dev/null +++ b/src/Plugins/BotSharp.Plugin.GiteeAI/Using.cs @@ -0,0 +1,15 @@ +global using BotSharp.Abstraction.Agents; +global using BotSharp.Abstraction.Agents.Enums; +global using BotSharp.Abstraction.Agents.Models; +global using BotSharp.Abstraction.Conversations; +global using BotSharp.Abstraction.Conversations.Models; +global using BotSharp.Abstraction.Loggers; +global using BotSharp.Abstraction.MLTasks; +global using BotSharp.Abstraction.Utilities; +global using Microsoft.Extensions.Configuration; +global using Microsoft.Extensions.DependencyInjection; +global using System; +global using System.Collections.Generic; +global using System.Linq; +global using System.Text; +global using System.Threading.Tasks; diff --git a/src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs b/src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs index efa12572b..72a0da231 100644 --- a/src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs +++ b/src/Plugins/BotSharp.Plugin.OpenAI/Providers/Chat/ChatCompletionProvider.cs @@ -1,8 +1,12 @@ #pragma warning disable OPENAI001 +using BotSharp.Abstraction.Diagnostics; +using BotSharp.Abstraction.Hooks; using BotSharp.Abstraction.MessageHub.Models; using BotSharp.Core.Infrastructures.Streams; using BotSharp.Core.MessageHub; +using Microsoft.AspNetCore.Cors.Infrastructure; using OpenAI.Chat; +using static BotSharp.Abstraction.Diagnostics.ModelDiagnostics; namespace BotSharp.Plugin.OpenAI.Providers.Chat; @@ -34,6 +38,7 @@ public ChatCompletionProvider( public async Task GetChatCompletions(Agent agent, List conversations) { var contentHooks = _services.GetHooks(agent.Id); + var convService = _services.GetService(); // Before chat completion hook foreach (var hook in contentHooks) @@ -44,76 +49,77 @@ public async Task GetChatCompletions(Agent agent, List new ChatAnnotation + responseMessage = new RoleDialogModel(AgentRole.Assistant, text) { - Title = x.WebResourceTitle, - Url = x.WebResourceUri.AbsoluteUri, - StartIndex = x.StartIndex, - EndIndex = x.EndIndex - })?.ToList() - }; - } + CurrentAgentId = agent.Id, + MessageId = conversations.LastOrDefault()?.MessageId ?? string.Empty, + RenderedInstruction = string.Join("\r\n", renderedInstructions), + Annotations = value.Annotations?.Select(x => new ChatAnnotation + { + Title = x.WebResourceTitle, + Url = x.WebResourceUri.AbsoluteUri, + StartIndex = x.StartIndex, + EndIndex = x.EndIndex + })?.ToList() + }; + } - var tokenUsage = response.Value?.Usage; - var inputTokenDetails = response.Value?.Usage?.InputTokenDetails; + var tokenUsage = response.Value?.Usage; + var inputTokenDetails = response.Value?.Usage?.InputTokenDetails; - // After chat completion hook - foreach (var hook in contentHooks) - { - await hook.AfterGenerated(responseMessage, new TokenStatsModel + activity?.SetTag(ModelDiagnosticsTags.InputTokens, (tokenUsage?.InputTokenCount ?? 0) - (inputTokenDetails?.CachedTokenCount ?? 0)); + activity?.SetTag(ModelDiagnosticsTags.OutputTokens, tokenUsage?.OutputTokenCount ?? 0); + activity?.SetTag(ModelDiagnosticsTags.OutputTokens, tokenUsage?.OutputTokenCount ?? 0); + + + + // After chat completion hook + foreach (var hook in contentHooks) { - Prompt = prompt, - Provider = Provider, - Model = _model, - TextInputTokens = (tokenUsage?.InputTokenCount ?? 0) - (inputTokenDetails?.CachedTokenCount ?? 0), - CachedTextInputTokens = inputTokenDetails?.CachedTokenCount ?? 0, - TextOutputTokens = tokenUsage?.OutputTokenCount ?? 0 - }); - } + await hook.AfterGenerated(responseMessage, new TokenStatsModel + { + Prompt = prompt, + Provider = Provider, + Model = _model, + TextInputTokens = (tokenUsage?.InputTokenCount ?? 0) - (inputTokenDetails?.CachedTokenCount ?? 0), + CachedTextInputTokens = inputTokenDetails?.CachedTokenCount ?? 0, + TextOutputTokens = tokenUsage?.OutputTokenCount ?? 0 + }); + } + activity?.SetTag("output", responseMessage.Content); - return responseMessage; + return responseMessage; + } } public async Task GetChatCompletionsAsync(Agent agent, diff --git a/src/WebStarter/Program.cs b/src/WebStarter/Program.cs index 2c9c073c2..09a7344c5 100644 --- a/src/WebStarter/Program.cs +++ b/src/WebStarter/Program.cs @@ -1,11 +1,10 @@ +using BotSharp.Abstraction.Messaging.JsonConverters; using BotSharp.Core; using BotSharp.Core.MCP; -using BotSharp.OpenAPI; using BotSharp.Logger; +using BotSharp.OpenAPI; using BotSharp.Plugin.ChatHub; using Serilog; -using BotSharp.Abstraction.Messaging.JsonConverters; -using StackExchange.Redis; var builder = WebApplication.CreateBuilder(args); diff --git a/src/WebStarter/WebStarter.csproj b/src/WebStarter/WebStarter.csproj index c49e28cfc..dda3fc3d8 100644 --- a/src/WebStarter/WebStarter.csproj +++ b/src/WebStarter/WebStarter.csproj @@ -35,7 +35,9 @@ + + diff --git a/src/WebStarter/appsettings.json b/src/WebStarter/appsettings.json index 24aeb5e2e..2fd3536ea 100644 --- a/src/WebStarter/appsettings.json +++ b/src/WebStarter/appsettings.json @@ -6,6 +6,8 @@ } }, "AllowedHosts": "*", + "OTEL_EXPORTER_OTLP_ENDPOINT": "http://localhost:4317", + "OTEL_SERVICE_NAME": "apiservice", "AllowedOrigins": [ "http://localhost:5015", "http://0.0.0.0:5015", @@ -630,6 +632,43 @@ } } ] + }, + { + "Provider": "gitee-ai", + "Models": [ + { + "Name": "DeepSeek-V3_1", + "ApiKey": " ", + "Endpoint": "https://ai.gitee.com/v1/", + "Type": "chat", + "PromptCost": 0.0015, + "CompletionCost": 0.002, + "MaxTokens": 1024, + "Temperature": 0.6 + }, + { + "Name": "GLM-4_5", + "ApiKey": " ", + "Endpoint": "https://ai.gitee.com/v1/", + "Type": "chat", + "PromptCost": 0.0015, + "CompletionCost": 0.002, + "MaxTokens": 1024, + "Temperature": 0.6 + }, + { + "Id": "bge-m3", + "Name": "bge-m3", + "ApiKey": " ", + "Endpoint": "https://ai.gitee.com/v1/embeddings/", + "Type": "embedding", + "Dimension": 1024, + "PromptCost": 0.0015, + "CompletionCost": 0.002, + "MaxTokens": null, + "Temperature": 1.0 + } + ] } ], @@ -1006,7 +1045,24 @@ "Language": "en" } }, + "Langfuse": { + "SecretKey": "sk-lf- ", + "PublicKey": "pk-lf-", + "Host": "https://us.cloud.langfuse.com/api/public/otel/v1/traces" + }, + "A2AIntegration": { + "Enabled": true, + "DefaultTimeoutSeconds": 30, + "Agents": [ + { + "Id": "cdd9023f-a371-407a-43bf-f36ddccce340", + "Name": "SportKiosk", + "Description": "test", + "Endpoint": "http://localhost:5020/" + } + ] + }, "PluginLoader": { "Assemblies": [ "BotSharp.Core", @@ -1014,6 +1070,7 @@ "BotSharp.Core.Crontab", "BotSharp.Core.Realtime", "BotSharp.Logger", + "BotSharp.Plugin.A2A", "BotSharp.Plugin.MongoStorage", "BotSharp.Plugin.Dashboard", "BotSharp.Plugin.OpenAI", @@ -1022,13 +1079,13 @@ "BotSharp.Plugin.GoogleAI", "BotSharp.Plugin.MetaAI", "BotSharp.Plugin.DeepSeekAI", + "BotSharp.Plugin.GiteeAI", "BotSharp.Plugin.MetaMessenger", "BotSharp.Plugin.HuggingFace", "BotSharp.Plugin.KnowledgeBase", "BotSharp.Plugin.Planner", "BotSharp.Plugin.Graph", "BotSharp.Plugin.Qdrant", - "BotSharp.Plugin.Membase", "BotSharp.Plugin.ChatHub", "BotSharp.Plugin.WeChat", "BotSharp.Plugin.PizzaBot", @@ -1042,7 +1099,6 @@ "BotSharp.Plugin.EmailHandler", "BotSharp.Plugin.AudioHandler", "BotSharp.Plugin.ChartHandler", - "BotSharp.Plugin.AudioHandler", "BotSharp.Plugin.ExcelHandler", "BotSharp.Plugin.SqlDriver", "BotSharp.Plugin.TencentCos", diff --git a/tests/BotSharp.Plugin.PizzaBot/BotSharp.Plugin.PizzaBot.csproj b/tests/BotSharp.Plugin.PizzaBot/BotSharp.Plugin.PizzaBot.csproj index c056982df..fe2219ba4 100644 --- a/tests/BotSharp.Plugin.PizzaBot/BotSharp.Plugin.PizzaBot.csproj +++ b/tests/BotSharp.Plugin.PizzaBot/BotSharp.Plugin.PizzaBot.csproj @@ -61,6 +61,9 @@ PreserveNewest + + PreserveNewest + PreserveNewest diff --git a/tests/BotSharp.Plugin.PizzaBot/PizzaBotPlugin.cs b/tests/BotSharp.Plugin.PizzaBot/PizzaBotPlugin.cs index 6fa37ce67..15345b56f 100644 --- a/tests/BotSharp.Plugin.PizzaBot/PizzaBotPlugin.cs +++ b/tests/BotSharp.Plugin.PizzaBot/PizzaBotPlugin.cs @@ -15,7 +15,8 @@ public class PizzaBotPlugin : IBotSharpPlugin "b284db86-e9c2-4c25-a59e-4649797dd130", "c2b57a74-ae4e-4c81-b3ad-9ac5bff982bd", "dfd9b46d-d00c-40af-8a75-3fbdc2b89869", - "fe8c60aa-b114-4ef3-93cb-a8efeac80f75" + "fe8c60aa-b114-4ef3-93cb-a8efeac80f75", + "cdd9023f-a371-407a-43bf-f36ddccce340" }; public void RegisterDI(IServiceCollection services, IConfiguration config) diff --git a/tests/BotSharp.Plugin.PizzaBot/data/agents/cdd9023f-a371-407a-43bf-f36ddccce340/agent.json b/tests/BotSharp.Plugin.PizzaBot/data/agents/cdd9023f-a371-407a-43bf-f36ddccce340/agent.json new file mode 100644 index 000000000..ce963da82 --- /dev/null +++ b/tests/BotSharp.Plugin.PizzaBot/data/agents/cdd9023f-a371-407a-43bf-f36ddccce340/agent.json @@ -0,0 +1,14 @@ +{ + "id": "cdd9023f-a371-407a-43bf-f36ddccce340", + "name": "SportKiosk", + "description": "Answers questions about sport events", + "type": "a2a-remote", + "disabled": false, + "isPublic": true, + "profiles": [ "pizza" ], + "labels": [ "experiment" ], + "llmConfig": { + "provider": "openai", + "model": "gpt-5-nano" + } +} \ No newline at end of file