From b5d0d55f7b00c82b132aa177716e7b2fadf64f19 Mon Sep 17 00:00:00 2001 From: John Oliver <1615532+johnoliver@users.noreply.github.com> Date: Thu, 25 Jul 2024 13:49:37 +0100 Subject: [PATCH 1/2] Add learn docs --- learnDocs/LightsApp/pom.xml | 83 +++++++++++++ .../LightsApp/src/main/java/LightModel.java | 40 ++++++ .../LightsApp/src/main/java/LightsApp.java | 107 ++++++++++++++++ .../main/java/LightsAppNonInteractive.java | 84 +++++++++++++ .../LightsApp/src/main/java/LightsPlugin.java | 42 +++++++ .../main/java/withbrightness/LightModel.java | 68 ++++++++++ .../LightsAppNonInteractive.java | 116 ++++++++++++++++++ .../java/withbrightness/LightsPlugin.java | 47 +++++++ .../LightsApp/src/main/resources/log4j2.xml | 26 ++++ 9 files changed, 613 insertions(+) create mode 100644 learnDocs/LightsApp/pom.xml create mode 100644 learnDocs/LightsApp/src/main/java/LightModel.java create mode 100644 learnDocs/LightsApp/src/main/java/LightsApp.java create mode 100644 learnDocs/LightsApp/src/main/java/LightsAppNonInteractive.java create mode 100644 learnDocs/LightsApp/src/main/java/LightsPlugin.java create mode 100644 learnDocs/LightsApp/src/main/java/withbrightness/LightModel.java create mode 100644 learnDocs/LightsApp/src/main/java/withbrightness/LightsAppNonInteractive.java create mode 100644 learnDocs/LightsApp/src/main/java/withbrightness/LightsPlugin.java create mode 100644 learnDocs/LightsApp/src/main/resources/log4j2.xml diff --git a/learnDocs/LightsApp/pom.xml b/learnDocs/LightsApp/pom.xml new file mode 100644 index 00000000..e8d8d5fb --- /dev/null +++ b/learnDocs/LightsApp/pom.xml @@ -0,0 +1,83 @@ + + + + 4.0.0 + + com.microsoft.semantic-kernel + LightsApp + 1.2.0-SNAPSHOT + jar + + + + + com.microsoft.semantic-kernel + semantickernel-bom + 1.2.0-SNAPSHOT + pom + import + + + + + + + com.microsoft.semantic-kernel + semantickernel-api + + + org.apache.logging.log4j + log4j-api + runtime + 2.22.1 + + + org.apache.logging.log4j + log4j-core + runtime + 2.22.1 + + + org.apache.logging.log4j + log4j-slf4j2-impl + runtime + 2.22.1 + + + com.fasterxml.jackson.core + jackson-databind + compile + + + com.fasterxml.jackson.core + jackson-core + compile + + + com.azure + azure-identity + + + com.microsoft.semantic-kernel + semantickernel-aiservices-openai + + + com.google.code.gson + gson + 2.10.1 + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 15 + 15 + + + + + \ No newline at end of file diff --git a/learnDocs/LightsApp/src/main/java/LightModel.java b/learnDocs/LightsApp/src/main/java/LightModel.java new file mode 100644 index 00000000..1ec7f60a --- /dev/null +++ b/learnDocs/LightsApp/src/main/java/LightModel.java @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft. All rights reserved. + +// +public class LightModel { + + private int id; + private String name; + private Boolean isOn; + + public LightModel(int id, String name, Boolean isOn) { + this.id = id; + this.name = name; + this.isOn = isOn; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Boolean getIsOn() { + return isOn; + } + + public void setIsOn(Boolean isOn) { + this.isOn = isOn; + } +} +// \ No newline at end of file diff --git a/learnDocs/LightsApp/src/main/java/LightsApp.java b/learnDocs/LightsApp/src/main/java/LightsApp.java new file mode 100644 index 00000000..df4f3ffa --- /dev/null +++ b/learnDocs/LightsApp/src/main/java/LightsApp.java @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft. All rights reserved. + +import com.azure.ai.openai.OpenAIAsyncClient; +import com.azure.ai.openai.OpenAIClientBuilder; +import com.azure.core.credential.AzureKeyCredential; +import com.google.gson.Gson; +import com.microsoft.semantickernel.Kernel; +import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion; +import com.microsoft.semantickernel.contextvariables.ContextVariableTypeConverter; +import com.microsoft.semantickernel.contextvariables.ContextVariableTypes; +import com.microsoft.semantickernel.orchestration.InvocationContext; +import com.microsoft.semantickernel.orchestration.InvocationReturnMode; +import com.microsoft.semantickernel.orchestration.ToolCallBehavior; +import com.microsoft.semantickernel.plugin.KernelPlugin; +import com.microsoft.semantickernel.plugin.KernelPluginFactory; +import com.microsoft.semantickernel.services.chatcompletion.AuthorRole; +import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService; +import com.microsoft.semantickernel.services.chatcompletion.ChatHistory; +import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent; +import java.util.List; +import java.util.Scanner; + +public class LightsApp { + + private static final String AZURE_CLIENT_KEY = System.getenv("AZURE_CLIENT_KEY"); + private static final String CLIENT_ENDPOINT = System.getenv("CLIENT_ENDPOINT"); + private static final String MODEL_ID = System.getenv().getOrDefault("MODEL_ID", "gpt-4o"); + + public static void main(String[] args) throws Exception { + + // + OpenAIAsyncClient client = new OpenAIClientBuilder() + .credential(new AzureKeyCredential(AZURE_CLIENT_KEY)) + .endpoint(CLIENT_ENDPOINT) + .buildAsyncClient(); + + // + // Import the LightsPlugin + KernelPlugin lightPlugin = KernelPluginFactory.createFromObject(new LightsPlugin(), + "LightsPlugin"); + // + + // + // Create your AI service client + ChatCompletionService chatCompletionService = OpenAIChatCompletion.builder() + .withModelId(MODEL_ID) + .withOpenAIAsyncClient(client) + .build(); + + // + // Create a kernel with Azure OpenAI chat completion and plugin + Kernel kernel = Kernel.builder() + .withAIService(ChatCompletionService.class, chatCompletionService) + .withPlugin(lightPlugin) + .build(); + // + // + + // Add a converter to the kernel to show it how to serialise LightModel objects into a prompt + ContextVariableTypes + .addGlobalConverter( + ContextVariableTypeConverter.builder(LightModel.class) + .toPromptString(new Gson()::toJson) + .build()); + + // + // Enable planning + InvocationContext invocationContext = new InvocationContext.Builder() + .withReturnMode(InvocationReturnMode.LAST_MESSAGE_ONLY) + .withToolCallBehavior(ToolCallBehavior.allowAllKernelFunctions(true)) + .build(); + // + + // Create a history to store the conversation + ChatHistory history = new ChatHistory(); + + // Initiate a back-and-forth chat + Scanner scanner = new Scanner(System.in); + String userInput; + do { + // Collect user input + System.out.print("User > "); + + // + userInput = scanner.nextLine(); + // Add user input + history.addUserMessage(userInput); + + // Prompt AI for response to users input + List> results = chatCompletionService + .getChatMessageContentsAsync(history, kernel, invocationContext) + .block(); + // + + for (ChatMessageContent result : results) { + // Print the results + if (result.getAuthorRole() == AuthorRole.ASSISTANT && result.getContent() != null) { + System.out.println("Assistant > " + result); + } + // Add the message from the agent to the chat history + history.addMessage(result); + } + } while (userInput != null && !userInput.isEmpty()); + + // + } +} diff --git a/learnDocs/LightsApp/src/main/java/LightsAppNonInteractive.java b/learnDocs/LightsApp/src/main/java/LightsAppNonInteractive.java new file mode 100644 index 00000000..c5b6753f --- /dev/null +++ b/learnDocs/LightsApp/src/main/java/LightsAppNonInteractive.java @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft. All rights reserved. + +import com.azure.ai.openai.OpenAIAsyncClient; +import com.azure.ai.openai.OpenAIClientBuilder; +import com.azure.core.credential.AzureKeyCredential; +import com.google.gson.Gson; +import com.microsoft.semantickernel.Kernel; +import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion; +import com.microsoft.semantickernel.contextvariables.ContextVariableTypeConverter; +import com.microsoft.semantickernel.contextvariables.ContextVariableTypes; +import com.microsoft.semantickernel.orchestration.InvocationContext; +import com.microsoft.semantickernel.orchestration.InvocationReturnMode; +import com.microsoft.semantickernel.orchestration.ToolCallBehavior; +import com.microsoft.semantickernel.plugin.KernelPlugin; +import com.microsoft.semantickernel.plugin.KernelPluginFactory; +import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService; +import com.microsoft.semantickernel.services.chatcompletion.ChatHistory; +import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent; +import java.util.List; + +public class LightsAppNonInteractive { + + private static final String AZURE_CLIENT_KEY = System.getenv("AZURE_CLIENT_KEY"); + private static final String CLIENT_ENDPOINT = System.getenv("CLIENT_ENDPOINT"); + private static final String MODEL_ID = System.getenv().getOrDefault("MODEL_ID", "gpt-4o"); + + public static void main(String[] args) { + + // + OpenAIAsyncClient client = new OpenAIClientBuilder() + .credential(new AzureKeyCredential(AZURE_CLIENT_KEY)) + .endpoint(CLIENT_ENDPOINT) + .buildAsyncClient(); + + // + // Import the LightsPlugin + KernelPlugin lightPlugin = KernelPluginFactory.createFromObject(new LightsPlugin(), + "LightsPlugin"); + // + + // + // Create your AI service client + ChatCompletionService chatCompletionService = OpenAIChatCompletion.builder() + .withModelId(MODEL_ID) + .withOpenAIAsyncClient(client) + .build(); + + // + // Create a kernel with Azure OpenAI chat completion and plugin + Kernel kernel = Kernel.builder() + .withAIService(ChatCompletionService.class, chatCompletionService) + .withPlugin(lightPlugin) + .build(); + // + // + + // Add a converter to the kernel to show it how to serialise LightModel objects into a prompt + ContextVariableTypes + .addGlobalConverter( + ContextVariableTypeConverter.builder(LightModel.class) + .toPromptString(new Gson()::toJson) + .build()); + + // + // + // Enable planning + InvocationContext invocationContext = new InvocationContext.Builder() + .withReturnMode(InvocationReturnMode.LAST_MESSAGE_ONLY) + .withToolCallBehavior(ToolCallBehavior.allowAllKernelFunctions(true)) + .build(); + // + + // Create a history to store the conversation + ChatHistory history = new ChatHistory(); + history.addUserMessage("Turn on light 2"); + + List> results = chatCompletionService + .getChatMessageContentsAsync(history, kernel, invocationContext) + .block(); + + System.out.println("Assistant > " + results.get(0)); + // + } +} diff --git a/learnDocs/LightsApp/src/main/java/LightsPlugin.java b/learnDocs/LightsApp/src/main/java/LightsPlugin.java new file mode 100644 index 00000000..15eedf72 --- /dev/null +++ b/learnDocs/LightsApp/src/main/java/LightsPlugin.java @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +import com.microsoft.semantickernel.semanticfunctions.annotations.DefineKernelFunction; +import com.microsoft.semantickernel.semanticfunctions.annotations.KernelFunctionParameter; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +// +public class LightsPlugin { + + // Mock data for the lights + private final Map lights = new HashMap<>(); + + public LightsPlugin() { + lights.put(1, new LightModel(1, "Table Lamp", false)); + lights.put(2, new LightModel(2, "Porch light", false)); + lights.put(3, new LightModel(3, "Chandelier", true)); + } + + @DefineKernelFunction(name = "get_lights", description = "Gets a list of lights and their current state") + public List getLights() { + System.out.println("Getting lights"); + return new ArrayList<>(lights.values()); + } + + @DefineKernelFunction(name = "change_state", description = "Changes the state of the light") + public LightModel changeState( + @KernelFunctionParameter(name = "id", description = "The ID of the light to change") int id, + @KernelFunctionParameter(name = "isOn", description = "The new state of the light") boolean isOn) { + System.out.println("Changing light " + id + " " + isOn); + if (!lights.containsKey(id)) { + throw new IllegalArgumentException("Light not found"); + } + + lights.get(id).setIsOn(isOn); + + return lights.get(id); + } +} +// \ No newline at end of file diff --git a/learnDocs/LightsApp/src/main/java/withbrightness/LightModel.java b/learnDocs/LightsApp/src/main/java/withbrightness/LightModel.java new file mode 100644 index 00000000..174de991 --- /dev/null +++ b/learnDocs/LightsApp/src/main/java/withbrightness/LightModel.java @@ -0,0 +1,68 @@ +package withbrightness; +// Copyright (c) Microsoft. All rights reserved. + +// +public class LightModel { + + private int id; + private String name; + private Boolean isOn; + private Brightness brightness; + private String color; + + + public enum Brightness { + LOW, + MEDIUM, + HIGH + } + + public LightModel(int id, String name, Boolean isOn, Brightness brightness, String color) { + this.id = id; + this.name = name; + this.isOn = isOn; + this.brightness = brightness; + this.color = color; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Boolean getIsOn() { + return isOn; + } + + public void setIsOn(Boolean isOn) { + this.isOn = isOn; + } + + public Brightness getBrightness() { + return brightness; + } + + public void setBrightness(Brightness brightness) { + this.brightness = brightness; + } + + public String getColor() { + return color; + } + + public void setColor(String color) { + this.color = color; + } +} +// \ No newline at end of file diff --git a/learnDocs/LightsApp/src/main/java/withbrightness/LightsAppNonInteractive.java b/learnDocs/LightsApp/src/main/java/withbrightness/LightsAppNonInteractive.java new file mode 100644 index 00000000..c5b387f1 --- /dev/null +++ b/learnDocs/LightsApp/src/main/java/withbrightness/LightsAppNonInteractive.java @@ -0,0 +1,116 @@ +package withbrightness;// Copyright (c) Microsoft. All rights reserved. + +import com.azure.ai.openai.OpenAIAsyncClient; +import com.azure.ai.openai.OpenAIClientBuilder; +import com.azure.core.credential.AzureKeyCredential; +import com.google.gson.Gson; +import com.microsoft.semantickernel.Kernel; +import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion; +import com.microsoft.semantickernel.contextvariables.ContextVariableTypeConverter; +import com.microsoft.semantickernel.contextvariables.ContextVariableTypes; +import com.microsoft.semantickernel.orchestration.InvocationContext; +import com.microsoft.semantickernel.orchestration.InvocationReturnMode; +import com.microsoft.semantickernel.orchestration.ToolCallBehavior; +import com.microsoft.semantickernel.plugin.KernelPlugin; +import com.microsoft.semantickernel.plugin.KernelPluginFactory; +import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService; +import com.microsoft.semantickernel.services.chatcompletion.ChatHistory; +import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent; + +import java.util.List; + +public class LightsAppNonInteractive { + + private static final String AZURE_CLIENT_KEY = System.getenv("AZURE_CLIENT_KEY"); + private static final String CLIENT_ENDPOINT = System.getenv("CLIENT_ENDPOINT"); + private static final String MODEL_ID = System.getenv().getOrDefault("MODEL_ID", "gpt-4o"); + + public static void main(String[] args) { + + // + OpenAIAsyncClient client = new OpenAIClientBuilder() + .credential(new AzureKeyCredential(AZURE_CLIENT_KEY)) + .endpoint(CLIENT_ENDPOINT) + .buildAsyncClient(); + + // + // Import the LightsPlugin + KernelPlugin lightPlugin = KernelPluginFactory.createFromObject(new LightsPlugin(), + "LightsPlugin"); + // + + // + // Create your AI service client + ChatCompletionService chatCompletionService = OpenAIChatCompletion.builder() + .withModelId(MODEL_ID) + .withOpenAIAsyncClient(client) + .build(); + + // + // Create a kernel with Azure OpenAI chat completion and plugin + Kernel kernel = Kernel.builder() + .withAIService(ChatCompletionService.class, chatCompletionService) + .withPlugin(lightPlugin) + .build(); + // + // + + + // Add a converter to the kernel to show it how to serialise LightModel objects into a prompt + ContextVariableTypes + .addGlobalConverter( + ContextVariableTypeConverter.builder(LightModel.class) + .toPromptString(new Gson()::toJson) + .fromObject(it -> { + if (it instanceof String) { + try { + return new Gson().fromJson((String) it, LightModel.class); + } catch (Exception e) { + throw new RuntimeException("Cannot convert to LightModel"); + } + } + return null; + }) + .fromPromptString(string -> { + return new Gson().fromJson(string, LightModel.class); + }) + .build()); + + // + // + // Enable planning + InvocationContext invocationContext = new InvocationContext.Builder() + .withReturnMode(InvocationReturnMode.FULL_HISTORY) + .withToolCallBehavior(ToolCallBehavior.allowAllKernelFunctions(true)) + .build(); + // + + // Create a history to store the conversation + ChatHistory history = new ChatHistory(); + history.addUserMessage("Turn on light 2"); + + List> results = chatCompletionService + .getChatMessageContentsAsync(history, kernel, invocationContext) + .block(); + + System.out.println("Assistant > " + results.get(results.size() - 1)); + + history.addUserMessage("Decrease brightness of light 1"); + + results = chatCompletionService + .getChatMessageContentsAsync(history, kernel, invocationContext) + .block(); + + System.out.println("Assistant > " + results.get(results.size() - 1)); + + + history.addUserMessage("What is the state of all the lights?"); + + results = chatCompletionService + .getChatMessageContentsAsync(history, kernel, invocationContext) + .block(); + + System.out.println("Assistant > " + results.get(results.size() - 1)); + // + } +} diff --git a/learnDocs/LightsApp/src/main/java/withbrightness/LightsPlugin.java b/learnDocs/LightsApp/src/main/java/withbrightness/LightsPlugin.java new file mode 100644 index 00000000..d02b40e5 --- /dev/null +++ b/learnDocs/LightsApp/src/main/java/withbrightness/LightsPlugin.java @@ -0,0 +1,47 @@ +package withbrightness;// Copyright (c) Microsoft. All rights reserved. + +import com.microsoft.semantickernel.semanticfunctions.annotations.DefineKernelFunction; +import com.microsoft.semantickernel.semanticfunctions.annotations.KernelFunctionParameter; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +// +public class LightsPlugin { + + // Mock data for the lights + private final Map lights = new HashMap<>(); + + public LightsPlugin() { + lights.put(1, new LightModel(1, "Table Lamp", false, LightModel.Brightness.MEDIUM, "#FFFFFF")); + lights.put(2, new LightModel(2, "Porch light", false, LightModel.Brightness.HIGH, "#FF0000")); + lights.put(3, new LightModel(3, "Chandelier", true, LightModel.Brightness.LOW, "#FFFF00")); + } + + @DefineKernelFunction(name = "get_lights", description = "Gets a list of lights and their current state") + public List getLights() { + System.out.println("Getting lights"); + return new ArrayList<>(lights.values()); + } + + @DefineKernelFunction(name = "change_state", description = "Changes the state of the light") + public LightModel changeState( + @KernelFunctionParameter( + name = "model", + description = "The new state of the model to set. Example model: " + + "{\"id\":99,\"name\":\"Head Lamp\",\"isOn\":false,\"brightness\":\"MEDIUM\",\"color\":\"#FFFFFF\"}", + type = LightModel.class) LightModel model + ) { + System.out.println("Changing light " + model.getId() + " " + model.getIsOn()); + if (!lights.containsKey(model.getId())) { + throw new IllegalArgumentException("Light not found"); + } + + lights.put(model.getId(), model); + + return lights.get(model.getId()); + } +} +// \ No newline at end of file diff --git a/learnDocs/LightsApp/src/main/resources/log4j2.xml b/learnDocs/LightsApp/src/main/resources/log4j2.xml new file mode 100644 index 00000000..2774f660 --- /dev/null +++ b/learnDocs/LightsApp/src/main/resources/log4j2.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file From 2b582e317ccef8512cbe7dc0bceb705884c553c3 Mon Sep 17 00:00:00 2001 From: John Oliver <1615532+johnoliver@users.noreply.github.com> Date: Thu, 25 Jul 2024 14:08:03 +0100 Subject: [PATCH 2/2] Use release version --- learnDocs/LightsApp/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/learnDocs/LightsApp/pom.xml b/learnDocs/LightsApp/pom.xml index e8d8d5fb..84b9fd49 100644 --- a/learnDocs/LightsApp/pom.xml +++ b/learnDocs/LightsApp/pom.xml @@ -14,7 +14,7 @@ com.microsoft.semantic-kernel semantickernel-bom - 1.2.0-SNAPSHOT + 1.2.0 pom import