Skip to content

Commit

Permalink
Merge branch 'master' into v0.20-json-schema
Browse files Browse the repository at this point in the history
  • Loading branch information
Oceania2018 committed Dec 1, 2023
2 parents 56f99c2 + 5bb1765 commit 9cc9839
Show file tree
Hide file tree
Showing 8 changed files with 55 additions and 29 deletions.
24 changes: 24 additions & 0 deletions docs/architecture/logging.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Logging

## Setting
To initialize the logging feature, set up the following flags in `Conversation`. Each flag can display or record specific content during conversation.

* `ShowVerboseLog`: print conversation details or prompt in console.
* `EnableLlmCompletionLog`: log LLM completion results, e.g., real-time prompt sent to LLM and response generated from LLm.
* `EnableExecutionLog`: log details after events, e.g., receiving message, executing function, generating response, etc.


```json
"Conversation": {
"ShowVerboseLog": false,
"EnableLlmCompletionLog": false,
"EnableExecutionLog": true
}
```

### Usage
To enable the logging functionality, add the following line of code in `Program.cs`.

```csharp
builder.Services.AddBotSharpLogger(builder.Configuration);
```
1 change: 1 addition & 0 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ The main documentation for the site is organized into the following sections:
architecture/plugin
architecture/hooks
architecture/routing
architecture/logging
architecture/data-persistence

If you feel that this project is helpful to you, please Star us on the project, we will be very grateful.
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ public static IServiceCollection AddBotSharpLogger(this IServiceCollection servi
{
services.AddScoped<IContentGeneratingHook, CommonContentGeneratingHook>();
services.AddScoped<IContentGeneratingHook, TokenStatsConversationHook>();
services.AddScoped<IVerboseLogHook, VerboseLogHook>();
services.AddScoped<IContentGeneratingHook, VerboseLogHook>();
return services;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,6 @@ public CommonContentGeneratingHook(IServiceProvider services)
_services = services;
}

/// <summary>
/// After content generated.
/// </summary>
/// <returns></returns>
public async Task AfterGenerated(RoleDialogModel message, TokenStatsModel tokenStats)
{
SaveLlmCompletionLog(message, tokenStats);
Expand Down
33 changes: 29 additions & 4 deletions src/Infrastructure/BotSharp.Logger/Hooks/VerboseLogHook.cs
Original file line number Diff line number Diff line change
@@ -1,20 +1,45 @@
using BotSharp.Abstraction.Agents;
using BotSharp.Abstraction.Agents.Enums;

namespace BotSharp.Logger.Hooks;

public class VerboseLogHook : IVerboseLogHook
public class VerboseLogHook : IContentGeneratingHook
{
private readonly ConversationSetting _convSettings;
private readonly ILogger<VerboseLogHook> _logger;
private readonly IServiceProvider _services;

public VerboseLogHook(ConversationSetting convSettings, ILogger<VerboseLogHook> logger)
public VerboseLogHook(
ConversationSetting convSettings,
IServiceProvider serivces,
ILogger<VerboseLogHook> logger)
{
_convSettings = convSettings;
_services = serivces;
_logger = logger;
}

public void GenerateLog(string text)
public async Task BeforeGenerating(Agent agent, List<RoleDialogModel> conversations)
{
if (!_convSettings.ShowVerboseLog) return;

var dialog = conversations.Last();
var log = $"{dialog.Role}: {dialog.Content}";
_logger.LogInformation(log);
}

public async Task AfterGenerated(RoleDialogModel message, TokenStatsModel tokenStats)
{
if (!_convSettings.ShowVerboseLog) return;

_logger.LogInformation(text);
var agentService = _services.GetRequiredService<IAgentService>();
var agent = await agentService.LoadAgent(message.CurrentAgentId);

var log = message.Role == AgentRole.Function ?
$"[{agent.Name}]: {message.FunctionName}({message.FunctionArgs})" :
$"[{agent.Name}]: {message.Content}";

_logger.LogInformation(tokenStats.Prompt);
_logger.LogInformation(log);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
using BotSharp.Abstraction.Agents.Models;
using BotSharp.Abstraction.Conversations;
using BotSharp.Abstraction.Conversations.Models;
using BotSharp.Abstraction.Conversations.Settings;
using BotSharp.Abstraction.Loggers;
using BotSharp.Abstraction.MLTasks;
using BotSharp.Plugin.AzureOpenAI.Settings;
Expand Down Expand Up @@ -39,7 +38,6 @@ public ChatCompletionProvider(AzureOpenAiSettings settings,
public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> conversations)
{
var contentHooks = _services.GetServices<IContentGeneratingHook>().ToList();
var logHook = _services.GetService<IVerboseLogHook>();

// Before chat completion hook
Task.WaitAll(contentHooks.Select(hook =>
Expand Down Expand Up @@ -75,11 +73,6 @@ public RoleDialogModel GetChatCompletions(Agent agent, List<RoleDialogModel> con
}
}

var log = responseMessage.Role == AgentRole.Function ?
$"[{agent.Name}]: {responseMessage.FunctionName}({responseMessage.FunctionArgs})" :
$"[{agent.Name}]: {responseMessage.Content}";
logHook?.GenerateLog(log);

// After chat completion hook
Task.WaitAll(contentHooks.Select(hook =>
hook.AfterGenerated(responseMessage, new TokenStatsModel
Expand Down Expand Up @@ -192,7 +185,6 @@ public async Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleD
protected (string, ChatCompletionsOptions) PrepareOptions(Agent agent, List<RoleDialogModel> conversations)
{
var agentService = _services.GetRequiredService<IAgentService>();
var logHook = _services.GetService<IVerboseLogHook>();

var chatCompletionsOptions = new ChatCompletionsOptions();

Expand Down Expand Up @@ -248,7 +240,6 @@ public async Task<bool> GetChatCompletionsStreamingAsync(Agent agent, List<RoleD
// chatCompletionsOptions.PresencePenalty = 0;

var prompt = GetPrompt(chatCompletionsOptions);
logHook?.GenerateLog(prompt);

return (prompt, chatCompletionsOptions);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
using System;
using System.Threading.Tasks;
using BotSharp.Plugin.AzureOpenAI.Settings;
using Microsoft.Extensions.Logging;
using BotSharp.Abstraction.Conversations;
using Microsoft.Extensions.DependencyInjection;
using BotSharp.Abstraction.Conversations.Models;
Expand Down Expand Up @@ -32,7 +31,6 @@ public TextCompletionProvider(IServiceProvider services,
public async Task<string> GetCompletion(string text, string agentId, string messageId)
{
var contentHooks = _services.GetServices<IContentGeneratingHook>().ToList();
var logHook = _services.GetService<IVerboseLogHook>();

// Before chat completion hook
var agent = new Agent()
Expand Down Expand Up @@ -63,7 +61,6 @@ public async Task<string> GetCompletion(string text, string agentId, string mess
MaxTokens = 256,
};
completionsOptions.StopSequences.Add($"{AgentRole.Assistant}:");
logHook?.GenerateLog(text);

var state = _services.GetRequiredService<IConversationStateService>();
var temperature = float.Parse(state.GetState("temperature", "0.5"));
Expand All @@ -80,8 +77,6 @@ public async Task<string> GetCompletion(string text, string agentId, string mess
completion += t.Text;
};

logHook?.GenerateLog(completion);

// After chat completion hook
var responseMessage = new RoleDialogModel(AgentRole.Assistant, completion)
{
Expand Down

0 comments on commit 9cc9839

Please sign in to comment.