Skip to content

Instantly share code, notes, and snippets.

@rkttu
Created January 12, 2025 09:26
Show Gist options
  • Save rkttu/057e1465a7698a2ba5047790680728a5 to your computer and use it in GitHub Desktop.
Save rkttu/057e1465a7698a2ba5047790680728a5 to your computer and use it in GitHub Desktop.
LINQpad + macOS + Ollama + LG EXAONE + C#
<Query Kind="Statements">
<NuGetReference Prerelease="true">Microsoft.SemanticKernel.Connectors.Ollama</NuGetReference>
<Namespace>Microsoft.SemanticKernel</Namespace>
<Namespace>Microsoft.Extensions.DependencyInjection</Namespace>
<Namespace>Microsoft.Extensions.Logging</Namespace>
<Namespace>Microsoft.SemanticKernel.ChatCompletion</Namespace>
<Namespace>Microsoft.SemanticKernel.Connectors.Ollama</Namespace>
<Namespace>System.Text.Json.Serialization</Namespace>
<Namespace>System.ComponentModel</Namespace>
<Namespace>System.Threading.Tasks</Namespace>
<Namespace>OllamaSharp</Namespace>
<IncludeAspNet>true</IncludeAspNet>
</Query>
#pragma warning disable SKEXP0001
#pragma warning disable SKEXP0070
var builder = Kernel.CreateBuilder();
var modelName = "exaone3.5";
var apiClient = new OllamaApiClient(new Uri("http://127.0.0.1:11434", UriKind.Absolute));
await foreach (var eachResult in apiClient.PullModelAsync(modelName).ConfigureAwait(false))
{
if (eachResult == null)
continue;
await Console.Out.WriteLineAsync($"{eachResult.Status}: {eachResult.Percent:#,#0.00}%".AsMemory()).ConfigureAwait(false);
}
apiClient.SelectedModel = modelName;
builder.AddOllamaChatCompletion(apiClient);
builder.Services.AddLogging(services => services.AddConsole());
// Build the kernel
var kernel = builder.Build();
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();
// Create a history store the conversation
var history = new ChatHistory();
// Initiate a back-and-forth chat
var userInput = string.Empty;
do
{
// Collect user input
Console.Write("User > ");
userInput = Console.ReadLine() ?? string.Empty;
// Add user input
history.AddUserMessage(userInput);
// Get the response from the AI
await foreach (var eachFragment in chatCompletionService.GetStreamingChatMessageContentsAsync(
history,
kernel: kernel).ConfigureAwait(false))
{
Console.Write(eachFragment.Content);
history.AddMessage(eachFragment.Role.Value, eachFragment.Content ?? string.Empty);
}
Console.WriteLine();
} while (userInput is not null);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment