%%{init: {'theme': 'dark', 'themeVariables': { 'darkMode': true }}}%%
flowchart LR
user(["User"]) -- input --> llm["LLM"]
llm -- output --> user
llm@{ shape: hex}
var kernel = Kernel.CreateBuilder()
.AddOllamaChatCompletion("gemma3:4b", new Uri("http://localhost:11434"))
.Build();
var chat = kernel.GetRequiredService();
while (true)
{
Console.Write("You: ");
var question = Console.ReadLine();
var result = await chat.GetChatMessageContentsAsync(question ?? "");
Console.WriteLine("LLM: " + result.First().Content ?? "");
}
%%{init: {'theme': 'dark', 'themeVariables': { 'darkMode': true }}}%%
flowchart LR
user(["User"]) -- input --> history
history["ChatHistory"] -- context --> llm["LLM"]
llm -- add output --> history
history -- output --> user
history@{ shape: cyl}
llm@{ shape: hex}
var kernel = Kernel.CreateBuilder()
.AddOllamaChatCompletion("gemma3:4b", new Uri("http://localhost:11434"))
.Build();
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();
var chatHistory = new ChatHistory();
chatHistory.AddSystemMessage(
"""
You are a helpful metal music expert.
Your task is to provide the best music advice and facts about metal music for the user.
Only metal music of course. Hell yeah! 🤘
"""
);
chatHistory.AddUserMessage([
new TextContent("Which band invented metal? Just give the band name, no explanation.")
]);
var chatResult = await chatCompletionService.GetChatMessageContentAsync(chatHistory);
%%{init: {'theme': 'dark', 'themeVariables': { 'darkMode': true }}}%%
flowchart LR
user(["User"]) -- input --> template
template["Template"] --> llm["LLM"]
llm -- output --> user
template@{ shape: doc}
llm@{ shape: hex}
---
name: Metal_music_assistant
description: A prompt that leads to proper music advice 🎸
model:
api: chat
configuration:
name: gemma3:4b
sample:
question: "Which band invented metal?"
---
system:
You are a helpful metal music expert.
Your task is to provide the best music advice and facts about metal music for the user.
Only metal music of course. Hell yeah! 🤘
Never give an explanation unless explicitly asked.
user:
{{question}}
%%{init: {'theme': 'dark', 'themeVariables': { 'darkMode': true }}}%%
flowchart LR
user(["User"]) -- input --> llm["LLM"]
llm -- output --> user
llm -- call --> tool
llm@{ shape: hex}
tool@{ shape: lin-rect}
public class MusicPlayerPlugin
{
[KernelFunction("play_song")]
public void PlaySong(string artist, string song)
{
Console.WriteLine($"MusicPlayerPlugin: Playing {song} by {artist}");
}
}
var kernelBuilder = Kernel.CreateBuilder()
.AddOllamaChatCompletion("llama3.1:8b", new Uri("http://localhost:11434"));
kernelBuilder.Plugins.AddFromType<MusicPlayerPlugin>("PlaySong");
var kernel = kernelBuilder.Build();
_kernel.PromptRenderFilters.Add(new FightClubFilter());
try
{
var result = await _kernel.InvokeAsync(_function, arguments);
return result.ToString();
}
catch (ContentException ex)
{
return ex.Message;
}
public class FightClubFilter : IPromptRenderFilter
{
public async Task OnPromptRenderAsync(PromptRenderContext context, Func next)
{
await next(context);
var prompt = context.RenderedPrompt;
if (prompt !=null &&
prompt.Contains("Fight Club", StringComparison.InvariantCultureIgnoreCase))
{
throw new ContentException("We don't talk about Fight Club.");
}
}
}
%%{init: {'theme': 'dark', 'themeVariables': { 'darkMode': true }}}%%
flowchart LR
user(["User"]) -- input --> llm["LLM"]
llm -- output --> user
llm -- call --> mcpclient["MCP (tool) Client"]
mcpclient -- http/stdio --> mcpserver["MCP Server"]
llm@{ shape: hex}
var kernelBuilder = Kernel.CreateBuilder()
.AddOllamaChatCompletion("llama3.1:8b", new Uri("http://localhost:11434"));
var transport = new SseClientTransport(new SseClientTransportOptions
{
Endpoint = new Uri("http://localhost:3001"),
UseStreamableHttp = true,
});
var mcpClient = await McpClientFactory.CreateAsync(transport);
var tools = await mcpClient.ListToolsAsync();
kernel.Plugins.AddFromFunctions("McpTools", tools.Select(f => f.AsKernelFunction()));
var kernel = kernelBuilder.Build();
%%{init: {'theme': 'dark', 'themeVariables': { 'darkMode': true }}}%%
flowchart LR
user(["User"]) -- input --> p1[/embed/]
p1 -- input --> embedllm
embedllm -- vector --> p1
p1 -- vector --> db[(Store)]
db -- best matches --> p2{{sfd}}
user -- input --> p2[/create prompt/]
p2 -- prompt -->llm
llm -- output --> user
llm["Query LLM"]
embedllm["Embed LLM"]
embedllm@{ shape: hex}
llm@{ shape: hex}
Lorem ipsum dolor sit amet, consectetur adipiscing
%%{init: {'theme': 'dark', 'themeVariables': { 'darkMode': true }}}%%
flowchart TD
i(["input"]) --> a1["Agent 1"]
i --> a2["Agent 2"]
i --> a3["Agent 3"]
a1 --> c["Collector (aggregates)"]
a2 --> c
a3 --> c
c --> o(["output"])
a1@{ shape: hex}
a2@{ shape: hex}
a3@{ shape: hex}
Kernel kernel = ...;
ChatCompletionAgent physicist = new ChatCompletionAgent{
Name = "PhysicsExpert",
Instructions = "You are an expert in physics. You answer from physics perspective."
Kernel = kernel,
};
ChatCompletionAgent chemist = new ChatCompletionAgent{
Name = "ChemistryExpert",
Instructions = "You are an expert in chemistry. You answer from chemistry perspective."
Kernel = kernel,
};
ConcurrentOrchestration orchestration = new (physicist, chemist);
InProcessRuntime runtime = new InProcessRuntime();
await runtime.StartAsync();
var result = await orchestration.InvokeAsync("What is temperature?", runtime);
%%{init: {'theme': 'dark', 'themeVariables': { 'darkMode': true }}}%%
flowchart TD
i(["input"]) --> a1["Agent 1"]
a1 --> a2["Agent 2"]
a2 --> a3["Agent 3"]
a3 --> o(["output"])
a1@{ shape: hex}
a2@{ shape: hex}
a3@{ shape: hex}
%%{init: {'theme': 'dark', 'themeVariables': { 'darkMode': true }}}%%
flowchart TD
i(["input"]) --> a1["Agent 1"]
h1["Human 1"] <--> a1
a1 -- handoff --> a2["Agent 2"]
a1 -- handoff --> a3["Agent 3"]
a1 -- done --> o(["output"])
a2 -- done --> o
a3 -- done --> o
a1@{ shape: hex}
a2@{ shape: hex}
a3@{ shape: hex}
var handoffs = OrchestrationHandoffs
.StartWith(triageAgent)
.Add(triageAgent, statusAgent, returnAgent)
.Add(statusAgent, triageAgent, "Transfer to this agent if the issue is not status related")
.Add(returnAgent, triageAgent, "Transfer to this agent if the issue is not return related");
HandoffOrchestration orchestration = new HandoffOrchestration(
handoffs,
triageAgent,
statusAgent,
returnAgent)
{
InteractiveCallback = interactiveCallback,
ResponseCallback = responseCallback,
};
ValueTask interactiveCallback()
{
var input = Console.ReadLine();
return ValueTask.FromResult(new ChatMessageContent(AuthorRole.User, input));
}
InProcessRuntime runtime = new InProcessRuntime();
await runtime.StartAsync();
var result = await orchestration.InvokeAsync("I need help with my orders", runtime);
Do you really want that kind of dependency?
Semantic Kernel makes it easy to do: