Skip to content

Commit fdbb504

Browse files
committed
Presidio sample working with Analyzer + Anonymizer
1 parent 3cd48fe commit fdbb504

File tree

5 files changed

+80
-11
lines changed

5 files changed

+80
-11
lines changed

Applying-Responsible-Secure-AI/Samples/PromptFilteringPresidio/Core/Models/PresidioAnalyzerConfig.cs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,5 +2,13 @@
22

33
internal class PresidioAnalyzerConfig
44
{
5+
/// <summary>
6+
/// Enable or disable analyzer.
7+
/// </summary>
8+
public bool Enabled { get; set; }
9+
10+
/// <summary>
11+
/// Threshold for score to consider PII detected.
12+
/// </summary>
513
public double? ScoreThreshold { get; set; }
614
}
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
namespace Sample;
2+
3+
internal class PresidioAnonymizerConfig
4+
{
5+
/// <summary>
6+
/// Enable or disable anonymizer.
7+
/// </summary>
8+
public bool Enabled { get; set; }
9+
10+
/// <summary>
11+
/// Anonymizer rules to be applied to the prompt.
12+
/// </summary>
13+
public Dictionary<string, PresidioTextAnonymizer> Anonymizers { get; set; } = [];
14+
}

Applying-Responsible-Secure-AI/Samples/PromptFilteringPresidio/Filters/PresidioPromptAnalyzerFilter.cs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@ public async Task OnPromptRenderAsync(PromptRenderContext context, Func<PromptRe
1313
{
1414
await next(context);
1515

16+
if (!config.Enabled) { return; }
17+
1618
// Get rendered prompt
1719
var prompt = context.RenderedPrompt!;
1820

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,41 +1,41 @@
1-
using Microsoft.Extensions.Logging;
21
using Microsoft.SemanticKernel;
32
using Sample;
43

54
/// <summary>
65
/// Filter which use Text Anonymizer to detect PII in prompt and update the prompt by following specified rules before sending it to LLM.
76
/// </summary>
87
internal sealed class PromptAnonymizerFilter(
9-
ILoggerFactory loggerFactory,
108
PresidioTextAnalyzerService analyzerService,
119
PresidioTextAnonymizerService anonymizerService,
12-
Dictionary<string, PresidioTextAnonymizer> anonymizers) : IPromptRenderFilter
10+
PresidioAnonymizerConfig config) : IPromptRenderFilter
1311
{
14-
private readonly ILogger<PromptAnonymizerFilter> logger = loggerFactory.CreateLogger<PromptAnonymizerFilter>();
15-
1612
public async Task OnPromptRenderAsync(PromptRenderContext context, Func<PromptRenderContext, Task> next)
1713
{
1814
await next(context);
15+
16+
if (!config.Enabled) { return; }
1917

2018
// Get rendered prompt
2119
var prompt = context.RenderedPrompt!;
2220

23-
logger.LogTrace("Prompt before anonymization : \n{Prompt}", prompt);
21+
Console.ForegroundColor = ConsoleColor.Cyan;
2422

2523
// Call analyzer to detect PII
2624
var analyzerResults = await analyzerService.AnalyzeAsync(new PresidioTextAnalyzerRequest { Text = prompt });
2725

28-
// Call anonymizer to update the prompt by following specified rules. Pass analyzer results received on previous step.
26+
// Call anonymizer to update the prompt by following specified anonymizer rules.
27+
// Pass analyzer results received on previous step.
2928
var anonymizerResult = await anonymizerService.AnonymizeAsync(new PresidioTextAnonymizerRequest
3029
{
3130
Text = prompt,
3231
AnalyzerResults = analyzerResults,
33-
Anonymizers = anonymizers
32+
Anonymizers = config.Anonymizers
3433
});
3534

36-
logger.LogTrace("Prompt after anonymization : \n{Prompt}", anonymizerResult.Text);
35+
Console.WriteLine($"Anonymized prompt: \n{anonymizerResult.Text}\n");
36+
Console.ResetColor();
3737

38-
// Update prompt in context to sent new prompt without PII to LLM
38+
// Update prompt in context to override the prompt without PII before it goes to LLM
3939
context.RenderedPrompt = anonymizerResult.Text;
4040
}
4141
}

Applying-Responsible-Secure-AI/Samples/PromptFilteringPresidio/Program.cs

Lines changed: 46 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,40 @@
11
using Microsoft.Extensions.Configuration;
22
using Microsoft.Extensions.DependencyInjection;
33
using Microsoft.SemanticKernel;
4+
using Microsoft.SemanticKernel.Connectors.OpenAI;
45
using Sample;
56

67
var apiKey = new ConfigurationBuilder().AddUserSecrets<Program>().Build()["OpenAI:ApiKey"]!;
78
var builder = Kernel.CreateBuilder();
89
var presidioAnalyzerEndpoint = new Uri("http://localhost:5002");
910
var presidioAnonymizerEndpoint = new Uri("http://localhost:5001");
11+
var analyzerConfig = new PresidioAnalyzerConfig() { Enabled = true, ScoreThreshold = 0.9 };
12+
var anonymizerConfig = new PresidioAnonymizerConfig()
13+
{
14+
Enabled = false,
15+
Anonymizers =
16+
{
17+
[PresidioAnalyzerEntityType.PhoneNumber] = new PresidioTextAnonymizer { Type = PresidioAnonymizerType.Redact },
18+
[PresidioAnalyzerEntityType.Person] = new PresidioTextAnonymizer { Type = PresidioAnonymizerType.Replace, NewValue = "ANONYMIZED" }
19+
}
20+
};
1021

22+
// Add Presidio Text Analyzer service and configure HTTP client for it
1123
builder.Services.AddHttpClient<PresidioTextAnalyzerService>(client => { client.BaseAddress = presidioAnalyzerEndpoint; });
1224

25+
// Add Presidio Text Anonymizer service and configure HTTP client for it
26+
builder.Services.AddHttpClient<PresidioTextAnonymizerService>(client => { client.BaseAddress = presidioAnonymizerEndpoint; });
27+
1328
// Change confidence score threshold ratio value from 0 to 1 during testing to see how the logic will behave.
14-
builder.Services.AddSingleton(new PresidioAnalyzerConfig() { ScoreThreshold = 0.9 });
29+
builder.Services.AddSingleton(anonymizerConfig);
30+
builder.Services.AddSingleton(analyzerConfig);
1531

1632
// Add prompt filter to analyze rendered prompt for PII before sending it to LLM.
1733
builder.Services.AddSingleton<IPromptRenderFilter, PromptAnalyzerFilter>();
1834

35+
// Add prompt filter to anonymize rendered prompt before sending it to LLM.
36+
builder.Services.AddSingleton<IPromptRenderFilter, PromptAnonymizerFilter>();
37+
1938
// Add OpenAI chat completion service
2039
builder.AddOpenAIChatCompletion("gpt-4o-mini", apiKey);
2140

@@ -41,4 +60,30 @@
4160
var prompt = "Hi, can you help me?";
4261
Console.WriteLine($"User > {prompt}");
4362
var result = await kernel.InvokePromptAsync(prompt);
63+
Console.WriteLine($"Assistant > {result}");
64+
65+
analyzerConfig.Enabled = false;
66+
67+
Console.WriteLine("\n=== Using prompt anonymizer ===\n\n");
68+
69+
anonymizerConfig.Enabled = true;
70+
71+
// Define instructions for LLM how to react when certain conditions are met for demonstration purposes
72+
var executionSettings = new OpenAIPromptExecutionSettings
73+
{
74+
ChatSystemPrompt = "If prompt does not contain first and last names - return 'true'."
75+
};
76+
77+
prompt = """
78+
79+
| Name | Phone number | Position |
80+
|---|---|---|
81+
| John Smith | +1 (123) 456-7890 | Developer |
82+
| Alice Doe | +1 (987) 654-3120 | Manager |
83+
| Emily Davis | +1 (555) 555-5555 | Designer |
84+
"""
85+
;
86+
87+
Console.WriteLine($"User > {prompt}\n");
88+
result = await kernel.InvokePromptAsync(prompt, new(executionSettings));
4489
Console.WriteLine($"Assistant > {result}");

0 commit comments

Comments
 (0)