Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/nuget/dotnet/DuckDB.NET.Data.Full…
Browse files Browse the repository at this point in the history
…-0.9.0
  • Loading branch information
markwallace-microsoft authored Oct 4, 2023
2 parents 38b95fb + a7566b6 commit aeafd80
Show file tree
Hide file tree
Showing 62 changed files with 838 additions and 558 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,13 @@ public static ISKFunction CreateSemanticFunction(
var config = new PromptTemplateConfig
{
Description = description ?? "Generic function, unknown purpose",
Type = "completion",
Completion = requestSettings
};

if (requestSettings is not null)
{
config.ModelSettings.Add(requestSettings);
}

return kernel.CreateSemanticFunction(
promptTemplate: promptTemplate,
config: config,
Expand Down Expand Up @@ -275,11 +278,6 @@ private static ISKFunction CreateSemanticFunction(
string functionName,
SemanticFunctionConfig functionConfig)
{
if (!functionConfig.PromptTemplateConfig.Type.Equals("completion", StringComparison.OrdinalIgnoreCase))
{
throw new SKException($"Function type not supported: {functionConfig.PromptTemplateConfig}");
}

ISKFunction func = SemanticFunction.FromSemanticConfig(
pluginName,
functionName,
Expand All @@ -291,10 +289,10 @@ private static ISKFunction CreateSemanticFunction(
// is invoked manually without a context and without a way to find other functions.
func.SetDefaultFunctionCollection(kernel.Functions);

func.SetAIConfiguration(functionConfig.PromptTemplateConfig.Completion);
func.SetAIConfiguration(functionConfig.PromptTemplateConfig.GetDefaultRequestSettings());

// Note: the service is instantiated using the kernel configuration state when the function is invoked
func.SetAIService(() => kernel.GetService<ITextCompletion>(functionConfig.PromptTemplateConfig.Completion?.ServiceId ?? null));
func.SetAIService(() => kernel.GetService<ITextCompletion>(functionConfig.PromptTemplateConfig.GetDefaultRequestSettings()?.ServiceId ?? null));

return func;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ public static ISKFunction FromSemanticConfig(
functionName: functionName,
loggerFactory: loggerFactory
);
func.SetAIConfiguration(functionConfig.PromptTemplateConfig.Completion);
func.SetAIConfiguration(functionConfig.PromptTemplateConfig.GetDefaultRequestSettings());

return func;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel.AI;
using Microsoft.SemanticKernel.Text;
Expand Down Expand Up @@ -63,43 +64,75 @@ public class InputConfig
public int Schema { get; set; } = 1;

/// <summary>
/// Type, such as "completion", "embeddings", etc.
/// Description
/// </summary>
/// <remarks>TODO: use enum</remarks>
[JsonPropertyName("type")]
[JsonPropertyName("description")]
[JsonPropertyOrder(2)]
public string Type { get; set; } = "completion";
public string Description { get; set; } = string.Empty;

/// <summary>
/// Description
/// Input configuration (that is, list of all input parameters).
/// </summary>
[JsonPropertyName("description")]
[JsonPropertyName("input")]
[JsonPropertyOrder(3)]
public string Description { get; set; } = string.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public InputConfig Input { get; set; } = new();

/// <summary>
/// Completion configuration parameters.
/// Model request settings.
/// Initially only a single model request settings is supported.
/// </summary>
[JsonPropertyName("completion")]
[JsonPropertyName("models")]
[JsonPropertyOrder(4)]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public AIRequestSettings? Completion { get; set; }
public List<AIRequestSettings> ModelSettings { get; set; } = new();

/// <summary>
/// Default AI services to use.
/// Return the default <see cref="AIRequestSettings"/>
/// </summary>
[JsonPropertyName("default_services")]
public AIRequestSettings GetDefaultRequestSettings()
{
return this.ModelSettings.FirstOrDefault<AIRequestSettings>();
}

#region Obsolete
/// <summary>
/// Type, such as "completion", "embeddings", etc.
/// </summary>
/// <remarks>TODO: use enum</remarks>
[JsonPropertyName("type")]
[JsonPropertyOrder(5)]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public List<string> DefaultServices { get; set; } = new();
[Obsolete("Type property is no longer used. This will be removed in a future release.")]
public string Type { get; set; } = "completion";

/// <summary>
/// Input configuration (that is, list of all input parameters).
/// Completion configuration parameters.
/// </summary>
[JsonPropertyName("input")]
[JsonPropertyName("completion")]
[JsonPropertyOrder(6)]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public InputConfig Input { get; set; } = new();
[Obsolete("Completion is no longer no longer supported. Use PromptTemplateConfig.Models collection instead. This will be removed in a future release.")]
public AIRequestSettings? Completion
{
get { return this.GetDefaultRequestSettings(); }
set
{
if (value is not null)
{
this.ModelSettings.Add(value);
}
}
}

/// <summary>
/// Default AI services to use.
/// </summary>
[JsonPropertyName("default_services")]
[JsonPropertyOrder(7)]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
[Obsolete("DefaultServices property is no longer used. This will be removed in a future release.")]
public List<string> DefaultServices { get; set; } = new();
#endregion

/// <summary>
/// Creates a prompt template configuration from JSON.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Text.Json;
using Microsoft.SemanticKernel.Connectors.AI.OpenAI;
using Microsoft.SemanticKernel.SemanticFunctions;
using Xunit;

namespace SemanticKernel.Functions.UnitTests.SemanticFunctions;

public class PromptTemplateConfigTests
{
[Fact]
public void DeserializingDoNotExpectChatSystemPromptToExist()
{
// Arrange
string configPayload = @"{
""max_tokens"": 60,
""temperature"": 0.5,
""top_p"": 0.0,
""presence_penalty"": 0.0,
""frequency_penalty"": 0.0
}";

// Act
var requestSettings = JsonSerializer.Deserialize<OpenAIRequestSettings>(configPayload);

// Assert
Assert.NotNull(requestSettings);
Assert.NotNull(requestSettings.ChatSystemPrompt);
Assert.Equal("Assistant is a large language model.", requestSettings.ChatSystemPrompt);
}

[Fact]
public void DeserializingExpectChatSystemPromptToExists()
{
// Arrange
string configPayload = @"{
""max_tokens"": 60,
""temperature"": 0.5,
""top_p"": 0.0,
""presence_penalty"": 0.0,
""frequency_penalty"": 0.0,
""chat_system_prompt"": ""I am a prompt""
}";

// Act
var requestSettings = JsonSerializer.Deserialize<OpenAIRequestSettings>(configPayload);

// Assert
Assert.NotNull(requestSettings);
Assert.NotNull(requestSettings.ChatSystemPrompt);
Assert.Equal("I am a prompt", requestSettings.ChatSystemPrompt);
}

[Fact]
public void DeserializingExpectMultipleModels()
{
// Arrange
string configPayload = @"
{
""schema"": 1,
""description"": """",
""models"":
[
{
""model_id"": ""gpt-4"",
""max_tokens"": 200,
""temperature"": 0.2,
""top_p"": 0.0,
""presence_penalty"": 0.0,
""frequency_penalty"": 0.0,
""stop_sequences"":
[
""Human"",
""AI""
]
},
{
""model_id"": ""gpt-3.5_turbo"",
""max_tokens"": 256,
""temperature"": 0.3,
""top_p"": 0.0,
""presence_penalty"": 0.0,
""frequency_penalty"": 0.0,
""stop_sequences"":
[
""Human"",
""AI""
]
}
]
}
";

// Act
var promptTemplateConfig = JsonSerializer.Deserialize<PromptTemplateConfig>(configPayload);

// Assert
Assert.NotNull(promptTemplateConfig);
Assert.NotNull(promptTemplateConfig.ModelSettings);
Assert.Equal(2, promptTemplateConfig.ModelSettings.Count);
}

[Fact]
public void DeserializingExpectCompletion()
{
// Arrange
string configPayload = @"
{
""schema"": 1,
""description"": """",
""models"":
[
{
""model_id"": ""gpt-4"",
""max_tokens"": 200,
""temperature"": 0.2,
""top_p"": 0.0,
""presence_penalty"": 0.0,
""frequency_penalty"": 0.0,
""stop_sequences"":
[
""Human"",
""AI""
]
}
]
}
";

// Act
var promptTemplateConfig = JsonSerializer.Deserialize<PromptTemplateConfig>(configPayload);

// Assert
Assert.NotNull(promptTemplateConfig);
#pragma warning disable CS0618 // Ensure backward compatibility
Assert.NotNull(promptTemplateConfig.Completion);
Assert.Equal("gpt-4", promptTemplateConfig.Completion.ModelId);
#pragma warning restore CS0618 // Ensure backward compatibility
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@ public async Task ItUsesChatSystemPromptWhenProvidedAsync(string providedSystemC
.Build();

var templateConfig = new PromptTemplateConfig();
templateConfig.Completion = new OpenAIRequestSettings()
templateConfig.ModelSettings.Add(new OpenAIRequestSettings()
{
ChatSystemPrompt = providedSystemChatPrompt
};
});

var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName");

Expand Down Expand Up @@ -130,7 +130,7 @@ public async Task ItUsesServiceIdWhenProvidedAsync()
.Build();

var templateConfig = new PromptTemplateConfig();
templateConfig.Completion = new AIRequestSettings() { ServiceId = "service1" };
templateConfig.ModelSettings.Add(new AIRequestSettings() { ServiceId = "service1" });
var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName");

// Act
Expand All @@ -154,7 +154,7 @@ public async Task ItFailsIfInvalidServiceIdIsProvidedAsync()
.Build();

var templateConfig = new PromptTemplateConfig();
templateConfig.Completion = new AIRequestSettings() { ServiceId = "service3" };
templateConfig.ModelSettings.Add(new AIRequestSettings() { ServiceId = "service3" });
var func = kernel.CreateSemanticFunction("template", templateConfig, "functionName", "pluginName");

// Act
Expand Down
56 changes: 56 additions & 0 deletions dotnet/src/IntegrationTests/Plugins/SamplePluginsTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Linq;
using Microsoft.SemanticKernel;
using Xunit;

namespace SemanticKernel.IntegrationTests.Plugins;
public class SamplePluginsTests
{
[Fact]
public void CanLoadSamplePluginsRequestSettings()
{
// Arrange
var kernel = new KernelBuilder().Build();

// Act
TestHelpers.ImportAllSamplePlugins(kernel);

// Assert
Assert.NotNull(kernel.Functions);
var functionViews = kernel.Functions.GetFunctionViews();
Assert.NotNull(functionViews);
Assert.Equal(48, functionViews.Count); // currently we have 48 sample plugin functions
functionViews.ToList().ForEach(view =>
{
var function = kernel.Functions.GetFunction(view.PluginName, view.Name);
Assert.NotNull(function);
Assert.NotNull(function.RequestSettings);
Assert.True(function.RequestSettings.ExtensionData.ContainsKey("max_tokens"));
});
}

[Fact]
// Including this to ensure backward compatibility as tools like Prompt Factory still use the old format
public void CanLoadSampleSkillsCompletions()
{
// Arrange
var kernel = new KernelBuilder().Build();

// Act
TestHelpers.ImportAllSampleSkills(kernel);

// Assert
Assert.NotNull(kernel.Functions);
var functionViews = kernel.Functions.GetFunctionViews();
Assert.NotNull(functionViews);
Assert.Equal(48, functionViews.Count); // currently we have 48 sample plugin functions
functionViews.ToList().ForEach(view =>
{
var function = kernel.Functions.GetFunction(view.PluginName, view.Name);
Assert.NotNull(function);
Assert.NotNull(function.RequestSettings);
Assert.True(function.RequestSettings.ExtensionData.ContainsKey("max_tokens"));
});
}
}
Loading

0 comments on commit aeafd80

Please sign in to comment.