Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

.Net: Memory Plugin extraction - Part 2 #3092

Merged
merged 9 commits into from
Oct 9, 2023
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ private static async Task EmailSamplesWithRecallAsync()
Console.WriteLine("======== Sequential Planner - Find and Execute Saved Plan ========");

// Save the plan for future use
var semanticMemory = GetMemory();
var semanticMemory = InitializeMemory();
await semanticMemory.SaveInformationAsync(
"plans",
id: Guid.NewGuid().ToString(),
Expand Down Expand Up @@ -228,7 +228,8 @@ private static async Task MemorySampleAsync()
{
Console.WriteLine("======== Sequential Planner - Create and Execute Plan using Memory ========");

var kernel = InitializeKernelWithMemory();
var kernel = InitializeKernel();
var memory = InitializeMemory();

string folder = RepoFiles.SamplePluginsPath();
kernel.ImportSemanticFunctionsFromDirectory(folder,
Expand All @@ -251,7 +252,7 @@ private static async Task MemorySampleAsync()
var goal = "Create a book with 3 chapters about a group of kids in a club called 'The Thinking Caps.'";

// IMPORTANT: To use memory and embeddings to find relevant plugins in the planner, set the 'Memory' property on the planner config.
var planner = new SequentialPlanner(kernel, new SequentialPlannerConfig { SemanticMemoryConfig = new() { RelevancyThreshold = 0.5, Memory = kernel.Memory } });
var planner = new SequentialPlanner(kernel, new SequentialPlannerConfig { SemanticMemoryConfig = new() { RelevancyThreshold = 0.5, Memory = memory } });

var plan = await planner.CreatePlanAsync(goal);

Expand All @@ -274,7 +275,7 @@ private static IKernel InitializeKernelAndPlanner(out SequentialPlanner planner,
return kernel;
}

private static IKernel InitializeKernelWithMemory()
private static IKernel InitializeKernel()
{
// IMPORTANT: Register an embedding generation service and a memory store. The Planner will
// use these to generate and store embeddings for the function descriptions.
Expand All @@ -288,24 +289,22 @@ private static IKernel InitializeKernelWithMemory()
TestConfiguration.AzureOpenAIEmbeddings.DeploymentName,
TestConfiguration.AzureOpenAIEmbeddings.Endpoint,
TestConfiguration.AzureOpenAIEmbeddings.ApiKey)
.WithMemoryStorage(new VolatileMemoryStore())
.Build();

return kernel;
}

private static ISemanticTextMemory GetMemory(IKernel? kernel = null)
private static SemanticTextMemory InitializeMemory()
{
if (kernel is not null)
{
return kernel.Memory;
}
var memoryStorage = new VolatileMemoryStore();

var textEmbeddingGenerator = new Microsoft.SemanticKernel.Connectors.AI.OpenAI.TextEmbedding.AzureTextEmbeddingGeneration(
dmytrostruk marked this conversation as resolved.
Show resolved Hide resolved
modelId: TestConfiguration.AzureOpenAIEmbeddings.DeploymentName,
endpoint: TestConfiguration.AzureOpenAIEmbeddings.Endpoint,
apiKey: TestConfiguration.AzureOpenAIEmbeddings.ApiKey);

var memory = new SemanticTextMemory(memoryStorage, textEmbeddingGenerator);

return memory;
}

Expand Down
47 changes: 27 additions & 20 deletions dotnet/samples/KernelSyntaxExamples/Example14_SemanticMemory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
using System.Collections.Generic;
using System.Threading.Tasks;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.AI.Embeddings;
using Microsoft.SemanticKernel.Connectors.Memory.AzureCognitiveSearch;
using Microsoft.SemanticKernel.Memory;
using Microsoft.SemanticKernel.Plugins.Memory;
Expand Down Expand Up @@ -35,13 +36,19 @@ public static async Task RunAsync()
* need to worry about embedding generation.
*/

var kernelWithACS = Kernel.Builder
.WithLoggerFactory(ConsoleLogger.LoggerFactory)
var kernel = Kernel.Builder
.WithOpenAITextEmbeddingGenerationService("text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey)
dmytrostruk marked this conversation as resolved.
Show resolved Hide resolved
.WithMemoryStorage(new AzureCognitiveSearchMemoryStore(TestConfiguration.ACS.Endpoint, TestConfiguration.ACS.ApiKey))
.Build();

await RunExampleAsync(kernelWithACS);
var embeddingGeneration = kernel.GetService<ITextEmbeddingGeneration>();

var memoryWithACS = new MemoryBuilder()
.WithLoggerFactory(ConsoleLogger.LoggerFactory)
.WithTextEmbeddingGeneration(embeddingGeneration)
.WithMemoryStore(new AzureCognitiveSearchMemoryStore(TestConfiguration.ACS.Endpoint, TestConfiguration.ACS.ApiKey))
.Build();

await RunExampleAsync(memoryWithACS);

Console.WriteLine("====================================================");
Console.WriteLine("======== Semantic Memory (volatile, in RAM) ========");
Expand All @@ -56,20 +63,20 @@ public static async Task RunAsync()
* or implement your connectors for Pinecone, Vespa, Postgres + pgvector, SQLite VSS, etc.
*/

var kernelWithCustomDb = Kernel.Builder
var memoryWithCustomDb = new MemoryBuilder()
.WithLoggerFactory(ConsoleLogger.LoggerFactory)
.WithOpenAITextEmbeddingGenerationService("ada", "text-embedding-ada-002", TestConfiguration.OpenAI.ApiKey)
.WithMemoryStorage(new VolatileMemoryStore())
.WithTextEmbeddingGeneration(embeddingGeneration)
.WithMemoryStore(new VolatileMemoryStore())
.Build();

await RunExampleAsync(kernelWithCustomDb);
await RunExampleAsync(memoryWithCustomDb);
}

public static async Task RunExampleAsync(IKernel kernel)
public static async Task RunExampleAsync(ISemanticTextMemory memory)
{
await StoreMemoryAsync(kernel);
await StoreMemoryAsync(memory);

await SearchMemoryAsync(kernel, "How do I get started?");
await SearchMemoryAsync(memory, "How do I get started?");

/*
Output:
Expand All @@ -86,7 +93,7 @@ public static async Task RunExampleAsync(IKernel kernel)

*/

await SearchMemoryAsync(kernel, "Can I build a chat with SK?");
await SearchMemoryAsync(memory, "Can I build a chat with SK?");

/*
Output:
Expand All @@ -104,26 +111,26 @@ public static async Task RunExampleAsync(IKernel kernel)
*/
}

private static async Task SearchMemoryAsync(IKernel kernel, string query)
private static async Task SearchMemoryAsync(ISemanticTextMemory memory, string query)
{
Console.WriteLine("\nQuery: " + query + "\n");

var memories = kernel.Memory.SearchAsync(MemoryCollectionName, query, limit: 2, minRelevanceScore: 0.5);
var memoryResults = memory.SearchAsync(MemoryCollectionName, query, limit: 2, minRelevanceScore: 0.5);

int i = 0;
await foreach (MemoryQueryResult memory in memories)
await foreach (MemoryQueryResult memoryResult in memoryResults)
{
Console.WriteLine($"Result {++i}:");
Console.WriteLine(" URL: : " + memory.Metadata.Id);
Console.WriteLine(" Title : " + memory.Metadata.Description);
Console.WriteLine(" Relevance: " + memory.Relevance);
Console.WriteLine(" URL: : " + memoryResult.Metadata.Id);
Console.WriteLine(" Title : " + memoryResult.Metadata.Description);
Console.WriteLine(" Relevance: " + memoryResult.Relevance);
Console.WriteLine();
}

Console.WriteLine("----------------------");
}

private static async Task StoreMemoryAsync(IKernel kernel)
private static async Task StoreMemoryAsync(ISemanticTextMemory memory)
{
/* Store some data in the semantic memory.
*
Expand All @@ -138,7 +145,7 @@ private static async Task StoreMemoryAsync(IKernel kernel)
var i = 0;
foreach (var entry in githubFiles)
{
await kernel.Memory.SaveReferenceAsync(
await memory.SaveReferenceAsync(
collection: MemoryCollectionName,
externalSourceName: "GitHub",
externalId: entry.Key,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ private static async Task RunWithStoreAsync(IMemoryStore memoryStore, Cancellati

// The combination of the text embedding generator and the memory store makes up the 'SemanticTextMemory' object used to
// store and retrieve memories.
using SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator);
SemanticTextMemory textMemory = new(memoryStore, embeddingGenerator);

/////////////////////////////////////////////////////////////////////////////////////////////////////
// PART 1: Store and retrieve memories using the ISemanticTextMemory (textMemory) object.
Expand Down
26 changes: 20 additions & 6 deletions dotnet/samples/KernelSyntaxExamples/Example31_CustomPlanner.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
using System.Xml;
using System.Xml.XPath;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.AI.OpenAI;
using Microsoft.SemanticKernel.Memory;
using Microsoft.SemanticKernel.Orchestration;
using Microsoft.SemanticKernel.Planning;
using Microsoft.SemanticKernel.Plugins.Core;
Expand All @@ -24,16 +26,17 @@ public static async Task RunAsync()
{
Console.WriteLine("======== Custom Planner - Create and Execute Markup Plan ========");
IKernel kernel = InitializeKernel();
ISemanticTextMemory memory = InitializeMemory();

// ContextQuery is part of the QAPlugin
IDictionary<string, ISKFunction> qaPlugin = LoadQAPlugin(kernel);
SKContext context = CreateContextQueryContext(kernel);

// Create a memory store using the VolatileMemoryStore and the embedding generator registered in the kernel
kernel.ImportFunctions(new TextMemoryPlugin(kernel.Memory));
kernel.ImportFunctions(new TextMemoryPlugin(memory));

// Setup defined memories for recall
await RememberFactsAsync(kernel);
await RememberFactsAsync(kernel, memory);

// MarkupPlugin named "markup"
var markup = kernel.ImportFunctions(new MarkupPlugin(), "markup");
Expand Down Expand Up @@ -85,9 +88,9 @@ private static SKContext CreateContextQueryContext(IKernel kernel)
return context;
}

private static async Task RememberFactsAsync(IKernel kernel)
private static async Task RememberFactsAsync(IKernel kernel, ISemanticTextMemory memory)
{
kernel.ImportFunctions(new TextMemoryPlugin(kernel.Memory));
kernel.ImportFunctions(new TextMemoryPlugin(memory));

List<string> memoriesToSave = new()
{
Expand All @@ -105,7 +108,7 @@ private static async Task RememberFactsAsync(IKernel kernel)

foreach (var memoryToSave in memoriesToSave)
{
await kernel.Memory.SaveInformationAsync("contextQueryMemories", memoryToSave, Guid.NewGuid().ToString());
await memory.SaveInformationAsync("contextQueryMemories", memoryToSave, Guid.NewGuid().ToString());
}
}

Expand Down Expand Up @@ -136,7 +139,18 @@ private static IKernel InitializeKernel()
TestConfiguration.AzureOpenAIEmbeddings.DeploymentName,
TestConfiguration.AzureOpenAI.Endpoint,
TestConfiguration.AzureOpenAI.ApiKey)
.WithMemoryStorage(new VolatileMemoryStore())
.Build();
}

private static ISemanticTextMemory InitializeMemory()
{
return new MemoryBuilder()
.WithLoggerFactory(ConsoleLogger.LoggerFactory)
.WithAzureTextEmbeddingGenerationService(
TestConfiguration.AzureOpenAIEmbeddings.DeploymentName,
TestConfiguration.AzureOpenAI.Endpoint,
TestConfiguration.AzureOpenAI.ApiKey)
.WithMemoryStore(new VolatileMemoryStore())
.Build();
}
}
Expand Down
27 changes: 2 additions & 25 deletions dotnet/samples/KernelSyntaxExamples/Example42_KernelBuilder.cs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@ public static Task RunAsync()
endpoint: azureOpenAIEndpoint,
apiKey: azureOpenAIKey,
loggerFactory: loggerFactory);
using var memory = new SemanticTextMemory(memoryStorage, textEmbeddingGenerator);

var memory = new SemanticTextMemory(memoryStorage, textEmbeddingGenerator);
var plugins = new FunctionCollection();
var templateEngine = new BasicPromptTemplateEngine(loggerFactory);

Expand All @@ -104,30 +105,6 @@ public static Task RunAsync()
// The kernel builder purpose is to simplify this process, automating how dependencies
// are connected, still allowing to customize parts of the composition.

// Example: how to use a custom memory and configure Azure OpenAI
var kernel4 = Kernel.Builder
.WithLoggerFactory(NullLoggerFactory.Instance)
.WithMemory(memory)
.WithAzureChatCompletionService(
deploymentName: azureOpenAIChatCompletionDeployment,
endpoint: azureOpenAIEndpoint,
apiKey: azureOpenAIKey)
.Build();

// Example: how to use a custom memory storage
var kernel6 = Kernel.Builder
.WithLoggerFactory(NullLoggerFactory.Instance)
.WithMemoryStorage(memoryStorage) // Custom memory storage
.WithAzureChatCompletionService(
deploymentName: azureOpenAIChatCompletionDeployment,
endpoint: azureOpenAIEndpoint,
apiKey: azureOpenAIKey) // This will be used when using AI completions
.WithAzureTextEmbeddingGenerationService(
deploymentName: azureOpenAIEmbeddingDeployment,
endpoint: azureOpenAIEndpoint,
apiKey: azureOpenAIKey) // This will be used when indexing memory records
.Build();

// ==========================================================================================================
// The AI services are defined with the builder

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
</PropertyGroup>

<ItemGroup>
<ProjectReference Include="..\..\Plugins\Plugins.Memory\Plugins.Memory.csproj" />
<ProjectReference Include="..\..\SemanticKernel.Core\SemanticKernel.Core.csproj" />
</ItemGroup>

Expand Down
Loading
Loading