Conflict between Response Format property & function calling
Andres Perez
5
Reputation points
A HTTP 500 server_error is thrown when a ChatCompletion Agent uses function calling to retrieve data. The error only happens if Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIPromptExecutionSettings.ResponseFormat property is enable on the settings of the agent. Otherwise error does not happen.
Considerations
- I am using a model based in GPT4o version: 2024-08-06
- Check the following post to get a better context: https://github.com/microsoft/semantic-kernel/issues/9768
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
namespace EntitiesAssistant.Recipes {
public class RecipeCopilot {
private const string _systemPrompt = "You are an expert recipe assistant." +
"Your key objective is guide to the user to find the ingredients & steps necessary to prepare the dishes suggested by the user. " +
"Friendly reminder: You have a repository with various recipes prepared earlier.";
private ChatHistory _history = new();
private Kernel BuildKernel() {
var builder = Kernel.CreateBuilder().AddAzureOpenAIChatCompletion(
deploymentName: ProjectSettings.DeploymentName,
apiKey: ProjectSettings.ApiKey,
serviceId: ProjectSettings.ServiceId,
endpoint: ProjectSettings.Endpoint);
builder.Plugins.AddFromObject(new RecipePlugin(), pluginName: "recipe");
return builder.Build();
}
public async Task ChatAsync() {
Console.WriteLine("What would you like to cook today ?");
string userInput = Console.ReadLine() ?? "I am not hungry";
var kernel = BuildKernel();
OpenAIPromptExecutionSettings settings = new() {
ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions,
ResponseFormat = typeof (RecipeOutput),
};
_history.AddSystemMessage(_systemPrompt);
_history.AddUserMessage(userInput);
try {
var chatCompletionService = kernel.GetRequiredService < IChatCompletionService > (ProjectSettings.ServiceId);
var response = await chatCompletionService.GetChatMessageContentAsync(_history, settings, kernel);
Console.WriteLine(response);
} catch (Exception e) {
var toolResponse = _history.LastOrDefault(x => x.Role == AuthorRole.Tool)?.Content;
if (toolResponse is not null) Console.WriteLine(toolResponse);
Console.WriteLine($"{e.Message} ::: {e.StackTrace}");
throw;
}
}
}
}
using Microsoft.SemanticKernel;
using System.ComponentModel;
namespace EntitiesAssistant.Recipes {
public class RecipePlugin {
RecipeOutput _defaultRecipe = new() {
Name = "Chicken Curry by APG",
Ingredients = [
new Ingredient {Name = "Money", Quantity = "5", MeasureUnit = "Dollars"},
],
Instructions = "Step 1: Check your pocket. " +
"Step 2: Goes to the restaurant. " +
"Step 3: Place your order. " +
"Step 4: Pay & enjoy it. "
};
[KernelFunction("FindRecipe")][Description("Search for all internal recipes. This is a customized repository for those recipes prepared earlier.")]
public async Task < RecipeOutput > FindRecipeAsync() {
await Task.Delay(1000);
return _defaultRecipe;
//return System.Text.Json.JsonSerializer.Serialize(_defaultRecipe);
}
}
}
** Stack trace**
at Azure.AI.OpenAI.ClientPipelineExtensions.
Sign in to answer