I have this code that works:
public ChatOllama(string? model = null, string? systemPrompt = null, Uri? endpoint = null, TimeSpan? timeout = null)
{
SystemPrompt = systemPrompt ?? SystemPrompt;
string ollamaModel = string.IsNullOrEmpty(model) ? DefaultModel : model.ToLower().Trim();
Uri ollamaEndpoint = endpoint ?? DefaultUri;
TimeSpan ollamaTimeout = timeout ?? DefaultHttpTimeout;
this.ollamaEndpoint = new HttpClient
{
Timeout = ollamaTimeout,
BaseAddress = ollamaEndpoint,
};
var builder = Kernel.CreateBuilder();
builder.Services.AddOllamaChatCompletion(
ollamaModel,
httpClient: this.ollamaEndpoint
);
var kernel = builder.Build();
m_chatService = kernel.GetRequiredService<IChatCompletionService>();
History.AddSystemMessage(SystemPrompt);
}
How can I set the ollama temperature when I am connecting to ollama using the Semantic Kernel library from Microsoft?