diff --git a/.editorconfig b/.editorconfig
deleted file mode 100644
index cbe625271..000000000
--- a/.editorconfig
+++ /dev/null
@@ -1,305 +0,0 @@
-# To learn more about .editorconfig see https://aka.ms/editorconfigdocs
-###############################
-# Core EditorConfig Options #
-###############################
-root = true
-# All files
-[*]
-indent_style = space
-end_of_line = lf
-
-# XML project files
-[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj}]
-indent_size = 2
-
-# XML config files
-[*.{props,targets,ruleset,config,nuspec,resx,vsixmanifest,vsct}]
-indent_size = 2
-
-# YAML config files
-[*.{yml,yaml}]
-tab_width = 2
-indent_size = 2
-insert_final_newline = true
-trim_trailing_whitespace = true
-
-# JSON config files
-[*.json]
-tab_width = 2
-indent_size = 2
-insert_final_newline = false
-trim_trailing_whitespace = true
-
-# Stylesheet files
-[*.{css,scss,sass,less}]
-insert_final_newline = true
-trim_trailing_whitespace = true
-tab_width = 4
-indent_size = 4
-
-# Code files
-[*.{cs,csx,vb,vbx}]
-tab_width = 4
-indent_size = 4
-insert_final_newline = true
-trim_trailing_whitespace = true
-charset = utf-8-bom
-file_header_template = Copyright (c) Microsoft. All rights reserved.
-
-###############################
-# Java Coding Conventions #
-###############################
-[*.java]
-charset = utf-8
-end_of_line = lf
-indent_size = 4
-indent_style = space
-insert_final_newline = false
-tab_width = 4
-ij_formatter_off_tag = @formatter:off
-ij_formatter_on_tag = @formatter:on
-ij_smart_tabs = false
-ij_visual_guides = none
-
-max_line_length = 100
-ij_continuation_indent_size = 4
-ij_formatter_tags_enabled = false
-ij_wrap_on_typing = false
-
-ij_java_align_consecutive_assignments = false
-ij_java_align_consecutive_variable_declarations = false
-ij_java_align_group_field_declarations = false
-ij_java_align_multiline_annotation_parameters = false
-ij_java_align_multiline_array_initializer_expression = false
-ij_java_align_multiline_assignment = false
-ij_java_align_multiline_binary_operation = false
-ij_java_align_multiline_chained_methods = false
-ij_java_align_multiline_extends_list = false
-ij_java_align_multiline_for = false
-ij_java_align_multiline_method_parentheses = false
-ij_java_align_multiline_parameters = false
-ij_java_align_multiline_parameters_in_calls = false
-ij_java_align_multiline_parenthesized_expression = false
-ij_java_align_multiline_resources = false
-ij_java_align_multiline_ternary_operation = false
-ij_java_align_multiline_throws_list = false
-ij_java_align_subsequent_simple_methods = false
-ij_java_align_throws_keyword = false
-ij_java_annotation_parameter_wrap = off
-ij_java_array_initializer_new_line_after_left_brace = false
-ij_java_array_initializer_right_brace_on_new_line = false
-ij_java_array_initializer_wrap = normal
-ij_java_assert_statement_colon_on_next_line = false
-ij_java_assert_statement_wrap = off
-ij_java_assignment_wrap = off
-ij_java_binary_operation_sign_on_next_line = true
-ij_java_binary_operation_wrap = normal
-ij_java_blank_lines_after_anonymous_class_header = 0
-ij_java_blank_lines_after_class_header = 1
-ij_java_blank_lines_after_imports = 1
-ij_java_blank_lines_after_package = 1
-ij_java_blank_lines_around_class = 1
-ij_java_blank_lines_around_field = 0
-ij_java_blank_lines_around_field_in_interface = 0
-ij_java_blank_lines_around_initializer = 1
-ij_java_blank_lines_around_method = 1
-ij_java_blank_lines_around_method_in_interface = 1
-ij_java_blank_lines_before_class_end = 0
-ij_java_blank_lines_before_imports = 1
-ij_java_blank_lines_before_method_body = 0
-ij_java_blank_lines_before_package = 0
-ij_java_block_brace_style = end_of_line
-ij_java_block_comment_at_first_column = true
-ij_java_call_parameters_new_line_after_left_paren = false
-ij_java_call_parameters_right_paren_on_new_line = false
-ij_java_call_parameters_wrap = normal
-ij_java_case_statement_on_separate_line = true
-ij_java_catch_on_new_line = false
-ij_java_class_annotation_wrap = split_into_lines
-ij_java_class_brace_style = end_of_line
-ij_java_class_count_to_use_import_on_demand = 999
-ij_java_class_names_in_javadoc = 1
-ij_java_do_not_indent_top_level_class_members = false
-ij_java_do_not_wrap_after_single_annotation = false
-ij_java_do_while_brace_force = always
-ij_java_doc_add_blank_line_after_description = true
-ij_java_doc_add_blank_line_after_param_comments = false
-ij_java_doc_add_blank_line_after_return = false
-ij_java_doc_add_p_tag_on_empty_lines = true
-ij_java_doc_align_exception_comments = true
-ij_java_doc_align_param_comments = true
-ij_java_doc_do_not_wrap_if_one_line = false
-ij_java_doc_enable_formatting = true
-ij_java_doc_enable_leading_asterisks = true
-ij_java_doc_indent_on_continuation = false
-ij_java_doc_keep_empty_lines = true
-ij_java_doc_keep_empty_parameter_tag = true
-ij_java_doc_keep_empty_return_tag = true
-ij_java_doc_keep_empty_throws_tag = true
-ij_java_doc_keep_invalid_tags = true
-ij_java_doc_param_description_on_new_line = false
-ij_java_doc_preserve_line_breaks = false
-ij_java_doc_use_throws_not_exception_tag = true
-ij_java_else_on_new_line = false
-ij_java_entity_dd_suffix = EJB
-ij_java_entity_eb_suffix = Bean
-ij_java_entity_hi_suffix = Home
-ij_java_entity_lhi_prefix = Local
-ij_java_entity_lhi_suffix = Home
-ij_java_entity_li_prefix = Local
-ij_java_entity_pk_class = java.lang.String
-ij_java_entity_vo_suffix = VO
-ij_java_enum_constants_wrap = off
-ij_java_extends_keyword_wrap = off
-ij_java_extends_list_wrap = normal
-ij_java_field_annotation_wrap = split_into_lines
-ij_java_finally_on_new_line = false
-ij_java_for_brace_force = always
-ij_java_for_statement_new_line_after_left_paren = false
-ij_java_for_statement_right_paren_on_new_line = false
-ij_java_for_statement_wrap = normal
-ij_java_generate_final_locals = false
-ij_java_generate_final_parameters = false
-ij_java_if_brace_force = always
-ij_java_imports_layout = $*, |, *
-ij_java_indent_case_from_switch = true
-ij_java_insert_inner_class_imports = true
-ij_java_insert_override_annotation = true
-ij_java_keep_blank_lines_before_right_brace = 2
-ij_java_keep_blank_lines_between_package_declaration_and_header = 2
-ij_java_keep_blank_lines_in_code = 1
-ij_java_keep_blank_lines_in_declarations = 2
-ij_java_keep_control_statement_in_one_line = false
-ij_java_keep_first_column_comment = true
-ij_java_keep_indents_on_empty_lines = false
-ij_java_keep_line_breaks = true
-ij_java_keep_multiple_expressions_in_one_line = false
-ij_java_keep_simple_blocks_in_one_line = false
-ij_java_keep_simple_classes_in_one_line = false
-ij_java_keep_simple_lambdas_in_one_line = false
-ij_java_keep_simple_methods_in_one_line = false
-ij_java_lambda_brace_style = end_of_line
-ij_java_layout_static_imports_separately = true
-ij_java_line_comment_add_space = false
-ij_java_line_comment_at_first_column = true
-ij_java_message_dd_suffix = EJB
-ij_java_message_eb_suffix = Bean
-ij_java_method_annotation_wrap = split_into_lines
-ij_java_method_brace_style = end_of_line
-ij_java_method_call_chain_wrap = normal
-ij_java_method_parameters_new_line_after_left_paren = false
-ij_java_method_parameters_right_paren_on_new_line = false
-ij_java_method_parameters_wrap = normal
-ij_java_modifier_list_wrap = false
-ij_java_names_count_to_use_import_on_demand = 999
-ij_java_parameter_annotation_wrap = off
-ij_java_parentheses_expression_new_line_after_left_paren = false
-ij_java_parentheses_expression_right_paren_on_new_line = false
-ij_java_place_assignment_sign_on_next_line = false
-ij_java_prefer_longer_names = true
-ij_java_prefer_parameters_wrap = false
-ij_java_repeat_synchronized = true
-ij_java_replace_instanceof_and_cast = false
-ij_java_replace_null_check = true
-ij_java_replace_sum_lambda_with_method_ref = true
-ij_java_resource_list_new_line_after_left_paren = false
-ij_java_resource_list_right_paren_on_new_line = false
-ij_java_resource_list_wrap = off
-ij_java_session_dd_suffix = EJB
-ij_java_session_eb_suffix = Bean
-ij_java_session_hi_suffix = Home
-ij_java_session_lhi_prefix = Local
-ij_java_session_lhi_suffix = Home
-ij_java_session_li_prefix = Local
-ij_java_session_si_suffix = Service
-ij_java_space_after_closing_angle_bracket_in_type_argument = false
-ij_java_space_after_colon = true
-ij_java_space_after_comma = true
-ij_java_space_after_comma_in_type_arguments = true
-ij_java_space_after_for_semicolon = true
-ij_java_space_after_quest = true
-ij_java_space_after_type_cast = true
-ij_java_space_before_annotation_array_initializer_left_brace = false
-ij_java_space_before_annotation_parameter_list = false
-ij_java_space_before_array_initializer_left_brace = false
-ij_java_space_before_catch_keyword = true
-ij_java_space_before_catch_left_brace = true
-ij_java_space_before_catch_parentheses = true
-ij_java_space_before_class_left_brace = true
-ij_java_space_before_colon = true
-ij_java_space_before_colon_in_foreach = true
-ij_java_space_before_comma = false
-ij_java_space_before_do_left_brace = true
-ij_java_space_before_else_keyword = true
-ij_java_space_before_else_left_brace = true
-ij_java_space_before_finally_keyword = true
-ij_java_space_before_finally_left_brace = true
-ij_java_space_before_for_left_brace = true
-ij_java_space_before_for_parentheses = true
-ij_java_space_before_for_semicolon = false
-ij_java_space_before_if_left_brace = true
-ij_java_space_before_if_parentheses = true
-ij_java_space_before_method_call_parentheses = false
-ij_java_space_before_method_left_brace = true
-ij_java_space_before_method_parentheses = false
-ij_java_space_before_opening_angle_bracket_in_type_parameter = false
-ij_java_space_before_quest = true
-ij_java_space_before_switch_left_brace = true
-ij_java_space_before_switch_parentheses = true
-ij_java_space_before_synchronized_left_brace = true
-ij_java_space_before_synchronized_parentheses = true
-ij_java_space_before_try_left_brace = true
-ij_java_space_before_try_parentheses = true
-ij_java_space_before_type_parameter_list = false
-ij_java_space_before_while_keyword = true
-ij_java_space_before_while_left_brace = true
-ij_java_space_before_while_parentheses = true
-ij_java_space_inside_one_line_enum_braces = false
-ij_java_space_within_empty_array_initializer_braces = false
-ij_java_space_within_empty_method_call_parentheses = false
-ij_java_space_within_empty_method_parentheses = false
-ij_java_spaces_around_additive_operators = true
-ij_java_spaces_around_assignment_operators = true
-ij_java_spaces_around_bitwise_operators = true
-ij_java_spaces_around_equality_operators = true
-ij_java_spaces_around_lambda_arrow = true
-ij_java_spaces_around_logical_operators = true
-ij_java_spaces_around_method_ref_dbl_colon = false
-ij_java_spaces_around_multiplicative_operators = true
-ij_java_spaces_around_relational_operators = true
-ij_java_spaces_around_shift_operators = true
-ij_java_spaces_around_type_bounds_in_type_parameters = true
-ij_java_spaces_around_unary_operator = false
-ij_java_spaces_within_angle_brackets = false
-ij_java_spaces_within_annotation_parentheses = false
-ij_java_spaces_within_array_initializer_braces = false
-ij_java_spaces_within_braces = false
-ij_java_spaces_within_brackets = false
-ij_java_spaces_within_cast_parentheses = false
-ij_java_spaces_within_catch_parentheses = false
-ij_java_spaces_within_for_parentheses = false
-ij_java_spaces_within_if_parentheses = false
-ij_java_spaces_within_method_call_parentheses = false
-ij_java_spaces_within_method_parentheses = false
-ij_java_spaces_within_parentheses = false
-ij_java_spaces_within_switch_parentheses = false
-ij_java_spaces_within_synchronized_parentheses = false
-ij_java_spaces_within_try_parentheses = false
-ij_java_spaces_within_while_parentheses = false
-ij_java_special_else_if_treatment = true
-ij_java_subclass_name_suffix = Impl
-ij_java_ternary_operation_signs_on_next_line = true
-ij_java_ternary_operation_wrap = normal
-ij_java_test_name_suffix = Test
-ij_java_throws_keyword_wrap = normal
-ij_java_throws_list_wrap = off
-ij_java_use_external_annotations = false
-ij_java_use_fq_class_names = false
-ij_java_use_single_class_imports = true
-ij_java_variable_annotation_wrap = off
-ij_java_visibility = public
-ij_java_while_brace_force = always
-ij_java_while_on_new_line = false
-ij_java_wrap_comments = true
-ij_java_wrap_first_method_in_call_chain = false
-ij_java_wrap_long_lines = false
diff --git a/.gitattributes b/.gitattributes
deleted file mode 100644
index b5845d148..000000000
--- a/.gitattributes
+++ /dev/null
@@ -1,6 +0,0 @@
-# Auto-detect text files, ensure they use LF.
-* text=auto eol=lf working-tree-encoding=UTF-8
-
-# Bash scripts
-*.sh text eol=lf
-*.cmd text eol=crlf
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
deleted file mode 100644
index d6e7dac3f..000000000
--- a/.github/CODEOWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-# @microsoft/octo-semantickernel-pr-java owns any files in the java
-# directory at the root of the repository and any of its
-# subdirectories.
-/ @microsoft/octo-semantickernel-pr-java
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
deleted file mode 100644
index 6f7815004..000000000
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ /dev/null
@@ -1,36 +0,0 @@
----
-name: Bug report
-about: Create a report to help us improve
-title: 'Bug: '
-labels: ["bug", "triage"]
-projects: ["semantic-kernel"]
-assignees: ''
-
----
-
-**Describe the bug**
-A clear and concise description of what the bug is.
-
-**To Reproduce**
-Steps to reproduce the behavior:
-1. Go to '...'
-2. Click on '....'
-3. Scroll down to '....'
-4. See error
-
-**Expected behavior**
-A clear and concise description of what you expected to happen.
-
-**Screenshots**
-If applicable, add screenshots to help explain your problem.
-
-**Maven**
- - Version: [e.g. 1.1.5]
- - Dependencies: list of semantic-kernel related dependencies in your `pom.xml`
-
-**Platform**
- - IDE: [e.g. IntelliJ, Eclipse, VS Code]
- - JDK version: [e.g. JDK 11.0.17]
-
-**Additional context**
-Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/feature_graduation.md b/.github/ISSUE_TEMPLATE/feature_graduation.md
deleted file mode 100644
index 37d207ea1..000000000
--- a/.github/ISSUE_TEMPLATE/feature_graduation.md
+++ /dev/null
@@ -1,29 +0,0 @@
----
-name: Feature graduation
-about: Plan the graduation of an experimental feature
-title: 'Graduate XXX feature'
-labels: ["feature_graduation"]
-projects: ["semantic-kernel"]
-assignees: ''
-
----
-
----
-name: Feature graduation
-about: Plan the graduation of an experimental feature
-
----
-
-Checklist to be completed when graduating an experimental feature
-
-- [ ] Notify PM's and EM's that feature is read for graduation
-- [ ] Contact PM for list of sample use cases
-- [ ] Verify there are sample implementations for each of the use cases
-- [ ] Verify telemetry and logging are complete
-- [ ] Verify API docs are complete and arrange to have them published
-- [ ] Make appropriate updates to Learn docs
-- [ ] Make appropriate updates to Concept samples
-- [ ] Male appropriate updates to Blog posts
-- [ ] Verify there are no serious open Issues
-- [ ] Update table in EXPERIMENTS.md
-- [ ] Remove SKEXP flag from the experimental code
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index ca7db8088..000000000
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,21 +0,0 @@
----
-name: Feature request
-about: Suggest an idea for this project
-title: 'New Feature: '
-labels: '["triage", "enhancement"]'
-projects: ["semantic-kernel"]
-assignees: ''
-
----
-
----
-name: Feature request
-about: Suggest an idea for this project
-
----
-
-
-
-
-
-
diff --git a/.github/_typos.toml b/.github/_typos.toml
deleted file mode 100644
index c101713b6..000000000
--- a/.github/_typos.toml
+++ /dev/null
@@ -1,45 +0,0 @@
-# Typos configuration file
-#
-# Info: https://github.com/marketplace/actions/typos-action
-# Install: brew install typos-cli
-# Install: conda install typos
-# Run: typos -c .github/_typos.toml
-
-[files]
-extend-exclude = [
- "_typos.toml",
- "package-lock.json",
- "*.bicep",
- "encoder.json",
- "vocab.bpe",
- "CodeTokenizerTests.cs",
- "test_code_tokenizer.py",
- "*response.json",
- "samples/semantickernel-demos/sk-presidio-sample/README.md"
-]
-
-[default.extend-words]
-ACI = "ACI" # Azure Container Instance
-exercize = "exercize" # test typos
-gramatical = "gramatical" # test typos
-Guid = "Guid" # Globally Unique Identifier
-HD = "HD" # Test header value
-EOF = "EOF" # End of File
-ans = "ans" # Short for answers
-arange = "arange" # Method in Python numpy package
-prompty = "prompty" # prompty is a format name.
-ist = "ist" # German language
-Prelease = "Prelease" # Prelease is a format name.
-
-[default.extend-identifiers]
-ags = "ags" # Azure Graph Service
-
-[type.jupyter]
-extend-ignore-re = [
- '"[A-Fa-f0-9]{8}"', # cell id strings
-]
-
-[type.msbuild]
-extend-ignore-re = [
- 'Version=".*"', # ignore package version numbers
-]
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
deleted file mode 100644
index 97b4f6647..000000000
--- a/.github/dependabot.yml
+++ /dev/null
@@ -1,58 +0,0 @@
-# To get started with Dependabot version updates, you'll need to specify which
-# package ecosystems to update and where the package manifests are located.
-# Please see the documentation for all configuration options:
-# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
-
-version: 2
-updates:
- # Maintain dependencies for nuget
- - package-ecosystem: "nuget"
- directory: "dotnet/"
- schedule:
- interval: "weekly"
- day: "monday"
- ignore:
- # For all System.* and Microsoft.Extensions/Bcl.* packages, ignore all major version updates
- - dependency-name: "System.*"
- update-types: ["version-update:semver-major"]
- - dependency-name: "Microsoft.Extensions.*"
- update-types: ["version-update:semver-major"]
- - dependency-name: "Microsoft.Bcl.*"
- update-types: ["version-update:semver-major"]
- - dependency-name: "Moq"
- labels:
- - ".NET"
- - "dependencies"
-
- # Maintain dependencies for nuget
- - package-ecosystem: "nuget"
- directory: "samples/dotnet"
- schedule:
- interval: "weekly"
- day: "monday"
-
- # Maintain dependencies for npm
- - package-ecosystem: "npm"
- directory: "samples/apps"
- schedule:
- interval: "weekly"
- day: "monday"
-
- # Maintain dependencies for pip
- - package-ecosystem: "pip"
- directory: "python/"
- schedule:
- interval: "weekly"
- day: "monday"
- labels:
- - "python"
- - "dependencies"
-
- # Maintain dependencies for github-actions
- - package-ecosystem: "github-actions"
- # Workflow files stored in the
- # default location of `.github/workflows`
- directory: "/"
- schedule:
- interval: "weekly"
- day: "monday"
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
deleted file mode 100644
index 38e62262d..000000000
--- a/.github/pull_request_template.md
+++ /dev/null
@@ -1,23 +0,0 @@
-### Motivation and Context
-
-
-
-### Description
-
-
-
-### Contribution Checklist
-
-
-
-- [ ] The code builds clean without any errors or warnings
-- [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations
-- [ ] All unit tests pass, and I have added new tests where possible
-- [ ] I didn't break anyone :smile:
diff --git a/.github/workflows/close-inactive-issues.yml b/.github/workflows/close-inactive-issues.yml
deleted file mode 100644
index 4e6ebccef..000000000
--- a/.github/workflows/close-inactive-issues.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-name: Close inactive issues
-on:
- schedule:
- - cron: "30 1 * * *"
-
-jobs:
- close-issues:
- runs-on: ubuntu-latest
- permissions:
- issues: write
- pull-requests: write
- steps:
- - uses: actions/stale@v10
- with:
- days-before-issue-stale: 90
- days-before-issue-close: 14
- stale-issue-label: "stale"
- stale-issue-message: "This issue is stale because it has been open for 90 days with no activity."
- close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale."
- days-before-pr-stale: -1
- days-before-pr-close: -1
- repo-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
deleted file mode 100644
index dee4d1c46..000000000
--- a/.github/workflows/codeql-analysis.yml
+++ /dev/null
@@ -1,78 +0,0 @@
-# CodeQL is the code analysis engine developed by GitHub to automate security checks.
-# The results are shown as code scanning alerts in GitHub. For more details, visit:
-# https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/about-code-scanning-with-codeql
-
-name: "CodeQL"
-
-on:
- push:
- branches: ["main"]
- schedule:
- - cron: "17 11 * * 2"
-
-jobs:
- analyze:
- name: Analyze
- runs-on: ubuntu-latest
- permissions:
- actions: read
- contents: read
- security-events: write
-
- strategy:
- fail-fast: false
- matrix:
- language: ["java"]
- # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
- # Use only 'java' to analyze code written in Java, Kotlin or both
- # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
- # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v6
-
- # Initializes the CodeQL tools for scanning.
- - name: Initialize CodeQL
- uses: github/codeql-action/init@v4
- with:
- languages: ${{ matrix.language }}
- # If you wish to specify custom queries, you can do so here or in a config file.
- # By default, queries listed here will override any specified in a config file.
- # Prefix the list here with "+" to use these queries and those in the config file.
-
- # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
- # queries: security-extended,security-and-quality
-
- # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
- # If this step fails, then you should remove it and run the build manually (see below)
- - name: Autobuild
- if: ${{ matrix.language != 'java' }}
- uses: github/codeql-action/autobuild@v4
-
- - name: Setup JDK
- uses: actions/setup-java@v5
- if: ${{ matrix.language == 'java' }}
- with:
- java-version: 17
- distribution: microsoft
- cache: maven
-
- - name: Build Java
- if: ${{ matrix.language == 'java' }}
- run: ./mvnw -B -DskipTests -Pcompile-jdk17 clean install --file pom.xml
-
- # ℹ️ Command-line programs to run using the OS shell.
- # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
-
- # If the Autobuild fails above, remove it and uncomment the following three lines.
- # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
-
- # - run: |
- # echo "Run, Build Application using script"
- # ./location_of_script_within_repo/buildscript.sh
-
- - name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v4
- with:
- category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/java-build.yml b/.github/workflows/java-build.yml
deleted file mode 100644
index 32b71f757..000000000
--- a/.github/workflows/java-build.yml
+++ /dev/null
@@ -1,63 +0,0 @@
-name: Build Java Semantic Kernel
-
-# Triggers the workflow on manual dispatch, push, and pull request events
-# for the specified branches and paths
-on:
- workflow_dispatch:
- push:
- branches: [ "main" ]
- pull_request:
- branches: [ "main" ]
-
-permissions:
- contents: read
-
-jobs:
- # Builds and tests the Java project
- java-build:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- # Defines a matrix strategy for JDK versions 8 and 17
- java-versions: [8, 17]
-
- name: Java CI on JDK${{ matrix.java-versions }}
-
- steps:
- - name: Checkout
- uses: actions/checkout@v6
-
- # Need to use JDK 11 to build for JDK 8
- - name: Set JDK
- id: set-jdk
- shell: bash
- run: |
- if [[ ${{ matrix.java-versions }} == 8 ]]; then
- echo "JDK_VERSION=11" >> $GITHUB_OUTPUT
- else
- version=${{ matrix.java-versions }}
- echo "JDK_VERSION=$version" >> $GITHUB_OUTPUT
- fi
-
- # Sets up the specified JDK version from the matrix
- - uses: actions/setup-java@v5
- with:
- java-version: ${{ steps.set-jdk.outputs.JDK_VERSION }}
- distribution: microsoft
- cache: maven
-
- # Builds the project with Maven using the matrix JDK version
- - name: Build with Maven
- run: ./mvnw -B -Pbug-check -DskipTests -Pcompile-jdk${{ matrix.java-versions }} clean install --file pom.xml
-
- # Runs tests with Maven using the matrix JDK version
- - name: Run tests
- run: ./mvnw -B -Pbug-check -Pcompile-jdk${{ matrix.java-versions }} test --file pom.xml
-
- # Uploads test artifacts for each JDK version
- - uses: actions/upload-artifact@v6
- if: always()
- with:
- name: test_output_sk_jdk${{ matrix.java-versions }}u
- path: ./**/target/surefire-reports/*Test.txt
diff --git a/.github/workflows/java-integration-tests.yml b/.github/workflows/java-integration-tests.yml
deleted file mode 100644
index 17edd75ed..000000000
--- a/.github/workflows/java-integration-tests.yml
+++ /dev/null
@@ -1,64 +0,0 @@
-name: Run Java Integration Tests and Samples
-
-on:
- workflow_dispatch:
- push:
- branches: [ "main" ]
- pull_request:
- branches: [ "main" ]
-
-permissions:
- contents: read
-
-jobs:
- java-integration-tests:
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- java-versions: [8, 17]
-
- name: Java Tests on JDK${{ matrix.java-versions }}
-
- steps:
- - name: Checkout
- uses: actions/checkout@v6
-
- # Need to use JDK 11 to build for JDK 8
- - name: Set JDK
- id: set-jdk
- shell: bash
- run: |
- if [[ ${{ matrix.java-versions }} == 8 ]]; then
- echo "JDK_VERSION=11" >> $GITHUB_OUTPUT
- else
- version=${{ matrix.java-versions }}
- echo "JDK_VERSION=$version" >> $GITHUB_OUTPUT
- fi
-
- - uses: actions/setup-java@v5
- with:
- java-version: ${{ steps.set-jdk.outputs.JDK_VERSION }}
- distribution: microsoft
- cache: maven
-
- - name: Build with Maven
- run: ./mvnw -B -Pwith-samples -Pbug-check -DskipTests -Pcompile-jdk${{ matrix.java-versions }} clean install --file pom.xml
- if: ${{ matrix.java-versions >= 17 }}
-
- # API tests run on JDK 17+
- - name: Run integration tests
- run: ../mvnw -B -Pbug-check clean install --file pom.xml
- working-directory: api-test
- env:
- OPENAI_API_KEY: ""
- AZURE_OPENAI_ENDPOINT: ""
- AZURE_OPENAI_API_KEY: ""
- AZURE_OPENAI_DEPLOYMENT_NAME: ""
- if: ${{ matrix.java-versions >= 17 }}
-
- # Samples build on JDK 17+
- - name: Build semantic-kernel samples
- run: ../mvnw -B clean install --file pom.xml
- working-directory: samples
- if: ${{ matrix.java-versions >= 17 }}
diff --git a/.github/workflows/java-publish-package.yml b/.github/workflows/java-publish-package.yml
deleted file mode 100644
index 8cc4e00c0..000000000
--- a/.github/workflows/java-publish-package.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-name: Deploy Java Semantic Kernel Package
-
-# Triggers the workflow on merging a PR
-on:
- pull_request:
- types:
- - closed
- branches: [ "main" ]
-
-permissions:
- contents: read
- packages: write
-
-jobs:
- if_merged:
- if: github.event.pull_request.merged == true
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v6
-
- # Sets up the specified JDK version from the matrix
- - uses: actions/setup-java@v5
- with:
- java-version: 11
- distribution: microsoft
- cache: maven
-
- - name: Build artifacts
- run: ./mvnw -B -DskipTests -Pcompile-jdk8 -P-compile-jdk17 clean deploy --file pom.xml -DaltDeploymentRepository=local::file:///tmp/target/staging-deploy
-
- - name: Upload Artifacts
- uses: actions/upload-artifact@v6
- with:
- name: Artifacts
- path: /tmp/target/staging-deploy
-
- - name: Deploy to github packages
- run: ./mvnw -B -DskipTests -Pcompile-jdk8 -P-compile-jdk17 -Pgithub-packages clean deploy --file pom.xml
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/label-issues.yml b/.github/workflows/label-issues.yml
deleted file mode 100644
index cf901ade0..000000000
--- a/.github/workflows/label-issues.yml
+++ /dev/null
@@ -1,54 +0,0 @@
-name: Label issues
-on:
- issues:
- types:
- - reopened
- - opened
-
-jobs:
- label_issues:
- name: "Issue: add labels"
- if: ${{ github.event.action == 'opened' || github.event.action == 'reopened' }}
- runs-on: ubuntu-latest
- permissions:
- issues: write
- steps:
- - uses: actions/github-script@v8
- with:
- github-token: ${{ secrets.GH_ACTIONS_PR_WRITE }}
- script: |
- // Get the issue body and title
- const body = context.payload.issue.body
- let title = context.payload.issue.title
-
- // Define the labels array
- let labels = ["triage"]
-
- // Check if the body or the title contains the word 'python' (case-insensitive)
- if ((body != null && body.match(/python/i)) || (title != null && title.match(/python/i))) {
- // Add the 'python' label to the array
- labels.push("python")
- }
-
- // Check if the body or the title contains the word 'java' (case-insensitive)
- if ((body != null && body.match(/java/i)) || (title != null && title.match(/java/i))) {
- // Add the 'java' label to the array
- labels.push("java")
- }
-
- // Check if the body or the title contains the words 'dotnet', '.net', 'c#' or 'csharp' (case-insensitive)
- if ((body != null && body.match(/.net/i)) || (title != null && title.match(/.net/i)) ||
- (body != null && body.match(/dotnet/i)) || (title != null && title.match(/dotnet/i)) ||
- (body != null && body.match(/C#/i)) || (title != null && title.match(/C#/i)) ||
- (body != null && body.match(/csharp/i)) || (title != null && title.match(/csharp/i))) {
- // Add the '.NET' label to the array
- labels.push(".NET")
- }
-
- // Add the labels to the issue
- github.rest.issues.addLabels({
- issue_number: context.issue.number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- labels: labels
- });
diff --git a/.github/workflows/label-title-prefix.yml b/.github/workflows/label-title-prefix.yml
deleted file mode 100644
index 81369deed..000000000
--- a/.github/workflows/label-title-prefix.yml
+++ /dev/null
@@ -1,73 +0,0 @@
-name: Label title prefix
-on:
- issues:
- types: [labeled]
- pull_request_target:
- types: [labeled]
-
-jobs:
- add_title_prefix:
- name: "Issue/PR: add title prefix"
- continue-on-error: true
- runs-on: ubuntu-latest
- permissions:
- issues: write
- pull-requests: write
-
- steps:
- - uses: actions/github-script@v8
- name: "Issue/PR: update title"
- with:
- github-token: ${{ secrets.GITHUB_TOKEN }}
- script: |
- let prefixLabels = {
- "python": "Python",
- "java": "Java",
- ".NET": ".Net"
- };
-
- function addTitlePrefix(title, prefix)
- {
- // Update the title based on the label and prefix
- // Check if the title starts with the prefix (case-sensitive)
- if (!title.startsWith(prefix + ": ")) {
- // If not, check if the first word is the label (case-insensitive)
- if (title.match(new RegExp(`^${prefix}`, 'i'))) {
- // If yes, replace it with the prefix (case-sensitive)
- title = title.replace(new RegExp(`^${prefix}`, 'i'), prefix);
- } else {
- // If not, prepend the prefix to the title
- title = prefix + ": " + title;
- }
- }
-
- return title;
- }
-
- labelAdded = context.payload.label.name
-
- // Check if the issue or PR has the label
- if (labelAdded in prefixLabels) {
- let prefix = prefixLabels[labelAdded];
- switch(context.eventName) {
- case 'issues':
- github.rest.issues.update({
- issue_number: context.issue.number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- title: addTitlePrefix(context.payload.issue.title, prefix)
- });
- break
-
- case 'pull_request_target':
- github.rest.pulls.update({
- pull_number: context.issue.number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- title: addTitlePrefix(context.payload.pull_request.title, prefix)
- });
- break
- default:
- core.setFailed('Unrecognited eventName: ' + context.eventName);
- }
- }
diff --git a/.github/workflows/markdown-link-check-config.json b/.github/workflows/markdown-link-check-config.json
deleted file mode 100644
index 50ada4911..000000000
--- a/.github/workflows/markdown-link-check-config.json
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "ignorePatterns": [
- {
- "pattern": "/github/"
- },
- {
- "pattern": "./actions"
- },
- {
- "pattern": "./blob"
- },
- {
- "pattern": "./issues"
- },
- {
- "pattern": "./discussions"
- },
- {
- "pattern": "./pulls"
- },
- {
- "pattern": "^http://localhost"
- },
- {
- "pattern": "^https://localhost"
- },
- {
- "pattern": "^https://platform.openai.com"
- },
- {
- "pattern": "^https://outlook.office.com/bookings"
- }
- ],
- "timeout": "20s",
- "retryOn429": true,
- "retryCount": 3,
- "fallbackRetryDelay": "30s",
- "aliveStatusCodes": [200, 206, 429, 500, 503]
-}
diff --git a/.github/workflows/markdown-link-check.yml b/.github/workflows/markdown-link-check.yml
deleted file mode 100644
index bc1a1fa72..000000000
--- a/.github/workflows/markdown-link-check.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-name: Check .md links
-
-on:
- workflow_dispatch:
- pull_request:
- branches: [ "main" ]
-
-permissions:
- contents: read
-
-jobs:
- markdown-link-check:
- runs-on: ubuntu-latest
- # check out the latest version of the code
- steps:
- - uses: actions/checkout@v6
-
- # Checks the status of hyperlinks in .md files in verbose mode
- - name: Check links
- uses: gaurav-nelson/github-action-markdown-link-check@v1
- with:
- use-verbose-mode: "yes"
- config-file: ".github/workflows/markdown-link-check-config.json"
diff --git a/.github/workflows/merge-gatekeeper.yml b/.github/workflows/merge-gatekeeper.yml
deleted file mode 100644
index adb6811ab..000000000
--- a/.github/workflows/merge-gatekeeper.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-name: Merge Gatekeeper
-
-on:
- pull_request:
- branches: [ "main", "feature*" ]
- merge_group:
- branches: ["main"]
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
- cancel-in-progress: true
-
-jobs:
- merge-gatekeeper:
- runs-on: ubuntu-latest
- # Restrict permissions of the GITHUB_TOKEN.
- # Docs: https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs
- permissions:
- checks: read
- statuses: read
- steps:
- - name: Run Merge Gatekeeper
- # NOTE: v1 is updated to reflect the latest v1.x.y. Please use any tag/branch that suits your needs:
- # https://github.com/upsidr/merge-gatekeeper/tags
- # https://github.com/upsidr/merge-gatekeeper/branches
- uses: upsidr/merge-gatekeeper@v1
- if: github.event_name == 'pull_request'
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
- timeout: 3600
diff --git a/.github/workflows/typos.yaml b/.github/workflows/typos.yaml
deleted file mode 100644
index 532f4d5dc..000000000
--- a/.github/workflows/typos.yaml
+++ /dev/null
@@ -1,29 +0,0 @@
-# Check pull requests for typos.
-#
-# Configuration: .github/_typos.toml
-#
-# Info: https://github.com/marketplace/actions/typos-action
-# Local install: brew install typos-cli
-# Local install: conda install typos
-# Local run: typos -c .github/_typos.toml
-
-name: Spell Check
-
-on:
- workflow_dispatch:
- pull_request:
- branches: [ "main" ]
-
-jobs:
- run:
- name: Spell Check with Typos
- runs-on: ubuntu-latest
- steps:
- - name: Check out code
- uses: actions/checkout@v6
-
- - name: Use custom config file
- uses: crate-ci/typos@master
- with:
- config: .github/_typos.toml
- write_changes: false
diff --git a/.github/workflows/update-version.sh b/.github/workflows/update-version.sh
deleted file mode 100755
index 5db3622f6..000000000
--- a/.github/workflows/update-version.sh
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/bin/bash
-
-POSITIONAL_ARGS=()
-
-while [[ $# -gt 0 ]]; do
- case $1 in
- -f|--file)
- file="$2"
- shift # past argument
- shift # past value
- ;;
- -p|--propsFile)
- propsFile="$2"
- shift # past argument
- shift # past value
- ;;
- -b|--buildAndRevisionNumber)
- buildAndRevisionNumber="$2"
- shift # past argument
- shift # past value
- ;;
- -*|--*)
- echo "Unknown option $1"
- exit 1
- ;;
- *)
- POSITIONAL_ARGS+=("$1") # save positional arg
- shift # past argument
- ;;
- esac
-done
-
-set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
-
-if [ -z "$file" ]; then
- echo "ERROR: Parameter file (-f|--file) not provided"
- exit 1;
-elif [ ! -f "$file" ]; then
- echo "ERROR: file ${file} not found"
- exit 1;
-fi
-
-if [ -n "$(cat $file | grep -i "false")" ]; then
- echo "Project is marked as NOT packable - skipping."
- exit 0;
-fi
-
-if [ -z "$propsFile" ]; then
- echo "ERROR: Parameter propsFile (-f|--file) not provided"
- exit 1;
-elif [ ! -f "$propsFile" ]; then
- echo "ERROR: propsFile ${file} not found"
- exit 1;
-fi
-
-if [ -z "$buildAndRevisionNumber" ]; then
- echo "ERROR: Parameter buildAndRevisionNumber (-b|--buildAndRevisionNumber) not provided"
- exit 1;
-fi
-
-propsVersionString=$(cat $propsFile | grep -i "");
-regex="([0-9.]*)<\/Version>"
-if [[ $propsVersionString =~ $regex ]]; then
- propsVersion=${BASH_REMATCH[1]}
-else
- echo "ERROR: Version tag not found in propsFile"
- exit 1;
-fi
-
-if [ -z "$propsVersion" ]; then
- echo "ERROR: Version tag not found in propsFile"
- exit 1;
-elif [[ ! "$propsVersion" =~ ^0.* ]]; then
- echo "ERROR: Version expected to start with 0. Actual: ${propsVersion}"
- exit 1;
-fi
-
-fullVersionString="${propsVersion}.${buildAndRevisionNumber}-preview"
-
-if [[ ! "$fullVersionString" =~ ^0.* ]]; then
- echo "ERROR: Version expected to start with 0. Actual: ${fullVersionString}"
- exit 1;
-fi
-
-echo "==== Project: ${file} ====";
-echo "propsFile = ${propsFile}"
-echo "buildAndRevisionNumber = ${buildAndRevisionNumber}"
-echo "version prefix from propsFile = ${propsVersion}"
-echo "full version string: ${fullVersionString}"
-
-versionInProj=$(cat $file | grep -i "");
-if [ -n "$versionInProj" ]; then
- # Version tag already exists in the csproj. Let's replace it.
- echo "Updating version tag..."
- content=$(cat $file | sed --expression="s/\([0-9]*.[0-9]*\)<\/Version>/$fullVersionString<\/Version>/g");
-else
- # Version tag not found in the csproj. Let's add it.
- echo "Project is packable - adding version tag..."
- content=$(cat $file | sed --expression="s/<\/Project>/$fullVersionString<\/Version><\/PropertyGroup><\/Project>/g");
-fi
-
-if [ $? -ne 0 ]; then exit 1; fi
-echo "$content" && echo "$content" > $file;
-if [ $? -ne 0 ]; then exit 1; fi
-
-echo "DONE";
-echo "";
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index 44c24fc06..000000000
--- a/.gitignore
+++ /dev/null
@@ -1,64 +0,0 @@
-# VS Code files for those working on multiple tools
-.vscode/*
-!.vscode/settings.json
-!.vscode/tasks.json
-!.vscode/launch.json
-!.vscode/extensions.json
-*.code-workspace
-
-# Local History for Visual Studio Code
-.history/
-
-# JetBrains IntelliJ
-.idea
-*.ipr
-*.iml
-*.iws
-
-# Maven settings
-conf.properties
-java/**/target
-target/
-pom.xml.tag
-pom.xml.releaseBackup
-pom.xml.versionsBackup
-pom.xml.next
-release.properties
-dependency-reduced-pom.xml
-buildNumber.properties
-.mvn/timing.properties
-.mvn/wrapper/maven-wrapper.jar
-
-# Eclipse m2e generated files
-# Eclipse Core
-.project
-# JDT-specific (Eclipse Java Development Tools)
-.classpath
-
-# Other
-.env
-certs/
-launchSettings.json
-config.development.yaml
-*.development.config
-*.development.json
-.DS_Store
-node_modules/
-obj/
-bin/
-_dev/
-.dev/
-*.devis.*
-.vs/
-*.user
-**/.vscode/chrome
-**/.vscode/.ropeproject/objectdb
-*.pyc
-.ipynb_checkpoints
-.jython_cache/
-__pycache__/
-.mypy_cache/
-__pypackages__/
-.pdm.toml
-global.json
-.java-version
diff --git a/.mvn/jvm.config b/.mvn/jvm.config
deleted file mode 100644
index 32599cefe..000000000
--- a/.mvn/jvm.config
+++ /dev/null
@@ -1,10 +0,0 @@
---add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED
---add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED
---add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED
---add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED
---add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED
---add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED
---add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED
---add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED
---add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED
---add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED
diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
deleted file mode 100644
index d58dfb70b..000000000
--- a/.mvn/wrapper/maven-wrapper.properties
+++ /dev/null
@@ -1,19 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-wrapperVersion=3.3.2
-distributionType=only-script
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
deleted file mode 100644
index d24ed88a4..000000000
--- a/.vscode/extensions.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- // See https://go.microsoft.com/fwlink/?LinkId=827846
- // for the documentation about the extensions.json format
- "recommendations": [
- "ms-java.vscode-java-pack",
- ]
-}
diff --git a/.vscode/launch.json b/.vscode/launch.json
deleted file mode 100644
index bd5534a58..000000000
--- a/.vscode/launch.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "version": "0.2.0",
- "configurations": [
- ]
-}
diff --git a/.vscode/settings.json b/.vscode/settings.json
deleted file mode 100644
index 6f3bd17c1..000000000
--- a/.vscode/settings.json
+++ /dev/null
@@ -1,26 +0,0 @@
-{
- "editor.formatOnType": true,
- "editor.formatOnSave": true,
- "editor.formatOnPaste": true,
- "editor.bracketPairColorization.enabled": true,
- "editor.guides.bracketPairs": "active",
- "notebook.output.textLineLimit": 500,
- "files.exclude": {
- "**/.git": true,
- "**/.svn": true,
- "**/.hg": true,
- "**/CVS": true,
- "**/.DS_Store": true,
- "**/Thumbs.db": true
- },
- "[java]": {
- "editor.formatOnSave": false,
- "editor.tabSize": 4,
- "editor.codeActionsOnSave": {
- "source.fixAll": "never"
- },
- },
- "java.debug.settings.onBuildFailureProceed": true,
- "java.compile.nullAnalysis.mode": "disabled",
- "java.configuration.updateBuildConfiguration": "interactive"
-}
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
deleted file mode 100644
index feea3bb00..000000000
--- a/.vscode/tasks.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "version": "2.0.0",
- "inputs": [
- {
- "id": "filter",
- "type": "promptString",
- "default": "",
- "description": "Enter a filter to pass as argument or filter"
- }
- ]
-}
diff --git a/BUILD.md b/BUILD.md
deleted file mode 100644
index bc0c61ce5..000000000
--- a/BUILD.md
+++ /dev/null
@@ -1,117 +0,0 @@
-# Semantic Kernel for Java
-
-Semantic Kernel (SK) is a lightweight foundation that lets you easily mix conventional programming languages with the
-latest in
-Large Language Model (LLM) AI "prompts" with templating, chaining, and planning capabilities out-of-the-box.
-
-To learn more about Microsoft Semantic Kernel, visit
-the [Microsoft Semantic Kernel documentation](https://learn.microsoft.com/en-us/semantic-kernel/whatissk).
-
-The Microsoft Semantic Kernel for Java is a library that implements the key concepts and foundations of Microsoft
-Semantic Kernel. It is designed
-to be used in Java applications in both client (desktop, mobile, CLIs) and server environments in an idiomatic way, and
-to be easily integrated with other Java libraries
-and frameworks.
-
-## Quickstart
-
-To get an idea of how to use the Semantic Kernel for Java, you can check
-the [syntax-examples](samples/semantickernel-concepts/semantickernel-syntax-examples/src/main/java/com/microsoft/semantickernel/samples/syntaxexamples) folder for
-examples of common AI-enabled scenarios.
-
-## Get started
-
-To run the LLM prompts and semantic functions in this kernel, make sure you have
-an [Open AI API Key](https://platform.openai.com/)
-or [Azure Open AI service key](https://learn.microsoft.com/azure/cognitive-services/openai/).
-
-### Requirements
-
-To build the Semantic Kernel for Java, you will need:
-
-- **Required**:
- - [OpenJDK 17](https://microsoft.com/openjdk/) or newer
-
-### Build the Semantic Kernel
-
-1. Clone this repository
-
- git clone https://github.com/microsoft/semantic-kernel-java
-
-2. Build the project with the Maven Wrapper
-
- cd semantic-kernel
- ./mvnw install
-
-3. (Optional) To run a FULL build including static analysis and end-to-end tests that might require a valid OpenAI key,
- run the following command:
-
- ./mvnw clean install -Prelease,bug-check,with-samples
-
-## Using the Semantic Kernel for Java
-
-The library is organized in a set of dependencies published to Maven Central. For a list of the Maven dependencies and
-how to use each of them, see [PACKAGES.md](PACKAGES.md).
-
-Alternatively, check the `samples` folder for examples of common AI-enabled scenarios implemented with Semantic Kernel
-for Java.
-
-## Discord community
-
-Join the [Microsoft Semantic Kernel Discord community](https://aka.ms/java-sk-discord) to discuss the Semantic Kernel
-and get help from the community. We have a `#java` channel for Java-specific questions.
-
-## Contributing
-
-### Testing locally
-
-The project may contain end-to-end tests that require an OpenAI key to run. To run these tests locally, you
-will need to set the following environment variable:
-
-- `CLIENT_KEY` - the OpenAI API key.
-
-If you are using Azure OpenAI, you will also need to set the following environment variables:
-
-- `CLIENT_ENDPOINT` - the Azure OpenAI endpoint found in **Keys * Endpoint** section of the Azure OpenAI service.
-- `AZURE_CLIENT_KEY` - the Azure OpenAI API key found in **Keys * Endpoint** section of the Azure OpenAI service.
-- `MODEL_ID` - the custom name you chose for your deployment when you deployed a model. It can be
- found under **Resource Management > Deployments** in the Azure Portal.
-
-For more information, see the Azure OpenAI documentation
-on [how to get your Azure OpenAI credentials](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart?pivots=rest-api&tabs=command-line#retrieve-key-and-endpoint).
-
-To run the unit tests only, run the following command:
-
- ./mvnw package
-
-To run all tests, including integration tests that require an OpenAI key, run the following command:
-
- ./mvnw verify -Prelease,bug-check,with-samples
-
-### Submitting a pull request
-
-Before submitting a pull request, please make sure that you have run the project with the command:
-
-```shell
-./mvnw clean package -Pbug-check
-```
-
-The bug-check profile will detect some static analysis issues that will prevent merging as well as apply formatting
-requirements to the code base.
-
-Also ensure that:
-
-- All new code is covered by unit tests
-- All new code is covered by integration tests
-
-Once your proposal is ready, submit a pull request. The pull request will be reviewed by the project maintainers.
-
-Make sure your pull request has an objective title and a clear description explaining the problem and solution.
-
-## License
-
-This project is licensed under the [MIT License](LICENSE).
-
-## Code of Conduct
-
-This project has adopted the [Microsoft Open Source Code of Conduct](CODE_OF_CONDUCT.md).
diff --git a/CHANGELOG.md b/CHANGELOG.md
deleted file mode 100644
index 0267d08f7..000000000
--- a/CHANGELOG.md
+++ /dev/null
@@ -1,135 +0,0 @@
-# 1.4.4-RC2
-
-- Upgrade many dependencies to recent versions
-- Migrate from "Tool calls" to "Function calls" terminology, deprecated OpenAIFunctionToolCall
-- Refactored Data storage area to be more extensible and added Oracle Database support (Thank you to the contributors
- from Oracle for the contribution)
-
-# 1.4.4-RC1
-
-- Add Agent framework abstractions.
-- Add ChatCompletionAgent implementation.
-- Add FunctionChoiceBehavior for OpenAI, replacing the older ToolCallBehavior.
-
-# 1.4.3
-
-- Bug fix for execution on Android (https://github.com/microsoft/semantic-kernel-java/pull/284)
-- Upgrade to azure-ai-openai 1.0.0-beta.14
-
-# 1.4.2
-
-- Fix bug effecting using native Java methods with OpenAI tool calling
-
-# 1.4.1
-
-- Add Otel Telemetry on function invocations
-- Fix bug to add type information to OpenAI function parameters
-- Improve efficiency of cosine similarity calculation
-- Fix concurrency bugs on database creation
-- Add sample demonstrating a text splitter for chunking text for embedding
-- Add hybridSearchAsync support to Azure AI Search
-
-# 1.4.0
-
-- Upgrade to azure-ai-openai 1.0.0-beta.12
-- Add vector stores with vector search support for Azure AI Search, Redis, JDBC with Postgres, MySQL, SQLite and HSQLDB.
- Moving these features out of the experimental stage.
-
-# 1.3.0
-
-- Added support for Json Schema to Open AI Chat Completions
-- Upgraded to openai sdk 1.0.0-beta.11
-- Added convenience method `FunctionInvocation.withResultTypeAutoConversion` which sets the return type and registers a
- type converter based on Jackson for the return type.
-- Added localization support for error/debug messages
-- Add vector search to experimental vector stores.
- - Approximate vector search for Azure AI Search, Redis and JDBC with Postgres.
- - Exhaustive vector search for VolatileVectorStore and default JDBC query provider, MySQL, SQLite and HSQLDB.
-
-### Bug Fixes
-
-- Fixed type converters not being passed on to be used in tool invocations
-
-### Breaking Changes
-
-- To support the new Json Schema feature, ResponseFormat has changed from an enum to a class.
-
-# 1.2.2
-
-- Fix bug in `FunctionInvocation` not using per-invocation type conversion when calling `withResultType`.
-- Fix bug in Global Hooks not being invoked under certain circumstances.
-- Add fluent returns to `ChatHistory` `addXMessage` methods.
-- Add user agent opt-out for OpenAI requests by setting the property `semantic-kernel.useragent-disable` to `true`.
-- Add several convenience `invokePromptAsync` methods to `Kernel`.
-- Allow Handlebars templates to call Javabean getters to extract data from invocation arguments.
-- Improve thread safety of `ChatHistory`.
-
-#### Experimental Changes
-
-- Add JDBC vector store
-
-#### Non-API Changes
-
-- Add custom type Conversion example, `CustomTypes_Example`
-- Dependency updates and pom cleanup
-- Documentation updates
-
-# 1.2.0
-
-- Add ability to use image_url as content for a OpenAi chat completion
- - As part of this `ChatMessageTextContent` and `ChatMessageImageContent` was added that extends the
- existing `ChatMessageContent` class. `ChatMessageContent` for now defaults to a text content type for backwards
- compatibility. However, users are encouraged to migrate to using the builders on `ChatMessageTextContent` to
- create text based chat messages.
- - Constructors of `ChatMessageContent` were also modified to support this change.
-- Added preliminary hugging face implementation that is still in development/beta.
-- Added Gemini support
-- Added OpenTelemetry spans for OpenAI requests
-- Update the user agent for OpenAI requests
-- Move XML parsing classes to implementation package as they are not expected to be used by users.
-
-#### Non-API Changes
-
-- Reorganized the repository when moving to the new Github location
-- Removed non-Java files
-- Update readmes
-- Update build scripts
-- Bring back Spring example project
-
-# 1.1.5
-
-- Fix bug with removing new lines on function parameters on Windows
-- Fix bug forming serializing arguments to tool calls
-
-# 1.1.3
-
-- Fix bug appending plugin name to tool calls
-- Improve exception handling in OpenAIChatCompletion
-
-# 1.1.2
-
-- Upgrade azure-identity to 1.12.1
-- Remove fixed netty version in bom
-
-# 1.1.1
-
-- Upgrade azure-ai-openai to 1.0.0-beta.8
-
-# 1.1.0
-
-### Breaking Changes
-
-- `ChatHistory` no longer has a default message, see below for more details.
-
-### Api Changes
-
-- Allow setting deployment name in addition to modelId on AI services.
-- Remove default message of "Assistant is a large language model" from ChatHistory
- - **This is a breaking change if you were relying on the default message in your code**
-- Add InvocationReturnMode and rework OpenAi chat completion to allow configuring what data is returned from Chat
- requests
-
-### Other
-
-- Reorganize example projects and documentation structure.
-- Number of sample updates and bug fixes.
\ No newline at end of file
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
deleted file mode 100644
index f9ba8cf65..000000000
--- a/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,9 +0,0 @@
-# Microsoft Open Source Code of Conduct
-
-This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
-
-Resources:
-
-- [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
-- [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
-- Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
diff --git a/COMMUNITY.md b/COMMUNITY.md
deleted file mode 100644
index 7afdb02cb..000000000
--- a/COMMUNITY.md
+++ /dev/null
@@ -1,16 +0,0 @@
-# Welcome to the Semantic Kernel Community!
-
-Below are some ways that you can get involved in the SK Community.
-
-## Engage on Github
-
-File issues, submit PRs, and provide feedback and ideas to what you'd like to see from the Semantic Kernel.
-We do our best to respond to each submission.
-
-## Join the conversation on Discord
-
-We have a growing and active channel on Discord where you can get help, engage in lively discussion,
-and share what you've built with Semantic Kernel!
-
-Join our Discord:
-[https://aka.ms/SKDiscord](https://aka.ms/SKDiscord)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
deleted file mode 100644
index 14cbb9be6..000000000
--- a/CONTRIBUTING.md
+++ /dev/null
@@ -1,147 +0,0 @@
-# Contributing to Semantic Kernel
-
-You can contribute to Semantic Kernel with issues and pull requests (PRs). Simply
-filing issues for problems you encounter is a great way to contribute. Contributing
-code is greatly appreciated.
-
-## Reporting Issues
-
-We always welcome bug reports, API proposals and overall feedback. Here are a few
-tips on how you can make reporting your issue as effective as possible.
-
-### Where to Report
-
-New issues can be reported in our [list of issues](https://github.com/microsoft/semantic-kernel/issues).
-
-Before filing a new issue, please search the list of issues to make sure it does
-not already exist.
-
-If you do find an existing issue for what you wanted to report, please include
-your own feedback in the discussion. Do consider upvoting (👍 reaction) the original
-post, as this helps us prioritize popular issues in our backlog.
-
-### Writing a Good Bug Report
-
-Good bug reports make it easier for maintainers to verify and root cause the
-underlying problem.
-The better a bug report, the faster the problem will be resolved. Ideally, a bug
-report should contain the following information:
-
-- A high-level description of the problem.
-- A _minimal reproduction_, i.e. the smallest size of code/configuration required
- to reproduce the wrong behavior.
-- A description of the _expected behavior_, contrasted with the _actual behavior_ observed.
-- Information on the environment: OS/distribution, CPU architecture, SDK version, etc.
-- Additional information, e.g. Is it a regression from previous versions? Are there
- any known workarounds?
-
-## Contributing Changes
-
-Project maintainers will merge accepted code changes from contributors.
-
-### DOs and DON'Ts
-
-DO's:
-
-- **DO** follow the standard coding conventions
-
- - [.NET](https://learn.microsoft.com/dotnet/csharp/fundamentals/coding-style/coding-conventions)
- - [Python](https://pypi.org/project/black/)
- - [Typescript](https://typescript-eslint.io/rules/)/[React](https://github.com/jsx-eslint/eslint-plugin-react/tree/master/docs/rules)
-
-- **DO** give priority to the current style of the project or file you're changing
- if it diverges from the general guidelines.
-- **DO** include tests when adding new features. When fixing bugs, start with
- adding a test that highlights how the current behavior is broken.
-- **DO** keep the discussions focused. When a new or related topic comes up
- it's often better to create new issue than to side track the discussion.
-- **DO** clearly state on an issue that you are going to take on implementing it.
-- **DO** blog and tweet (or whatever) about your contributions, frequently!
-
-DON'Ts:
-
-- **DON'T** surprise us with big pull requests. Instead, file an issue and start
- a discussion so we can agree on a direction before you invest a large amount of time.
-- **DON'T** commit code that you didn't write. If you find code that you think is a good
- fit to add to Semantic Kernel, file an issue and start a discussion before proceeding.
-- **DON'T** submit PRs that alter licensing related files or headers. If you believe
- there's a problem with them, file an issue and we'll be happy to discuss it.
-- **DON'T** make new APIs without filing an issue and discussing with us first.
-
-### Breaking Changes
-
-Contributions must maintain API signature and behavioral compatibility. Contributions
-that include breaking changes will be rejected. Please file an issue to discuss
-your idea or change if you believe that a breaking change is warranted.
-
-### Suggested Workflow
-
-We use and recommend the following workflow:
-
-1. Create an issue for your work.
- - You can skip this step for trivial changes.
- - Reuse an existing issue on the topic, if there is one.
- - Get agreement from the team and the community that your proposed change is
- a good one.
- - Clearly state that you are going to take on implementing it, if that's the case.
- You can request that the issue be assigned to you. Note: The issue filer and
- the implementer don't have to be the same person.
-2. Create a personal fork of the repository on GitHub (if you don't already have one).
-3. In your fork, create a branch off of main (`git checkout -b mybranch`).
- - Name the branch so that it clearly communicates your intentions, such as
- "issue-123" or "githubhandle-issue".
-4. Make and commit your changes to your branch.
-5. Add new tests corresponding to your change, if applicable.
-6. Run the relevant scripts in [the section below](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#dev-scripts) to ensure that your build is clean and all tests are passing.
-7. Create a PR against the repository's **main** branch.
- - State in the description what issue or improvement your change is addressing.
- - Verify that all the Continuous Integration checks are passing.
-8. Wait for feedback or approval of your changes from the code maintainers.
-9. When area owners have signed off, and all checks are green, your PR will be merged.
-
-### Development scripts
-
-The scripts below are used to build, test, and lint within the project.
-
-- Python: see [python/DEV_SETUP.md](https://github.com/microsoft/semantic-kernel/blob/main/python/DEV_SETUP.md#pipeline-checks).
-- .NET:
- - Build/Test: `run build.cmd` or `bash build.sh`
- - Linting (auto-fix): `dotnet format`
-- Typescript:
- - Build/Test: `yarn build`
- - Linting (auto-fix): `yarn lint:fix`
-
-### Adding Plugins and Memory Connectors
-
-When considering contributions to plugins and memory connectors for Semantic
-Kernel, please note the following guidelines:
-
-#### Plugins
-
-We appreciate your interest in extending Semantic Kernel's functionality through
-plugins. However, we want to clarify our approach to hosting plugins within our
-GitHub repository. To maintain a clean and manageable codebase, we will not be
-hosting plugins directly in the Semantic Kernel GitHub repository.
-Instead, we encourage contributors to host their plugin code in separate
-repositories under their own GitHub accounts or organization. You can then
-provide a link to your plugin repository in the relevant discussions, issues,
-or documentation within the Semantic Kernel repository. This approach ensures
-that each plugin can be maintained independently and allows for easier tracking
-of updates and issues specific to each plugin.
-
-#### Memory Connectors
-
-For memory connectors, while we won't be directly adding hosting for them within
-the Semantic Kernel repository, we highly recommend building memory connectors
-as separate plugins. Memory connectors play a crucial role in interfacing with
-external memory systems, and treating them as plugins enhances modularity and
-maintainability.
-
-### PR - CI Process
-
-The continuous integration (CI) system will automatically perform the required
-builds and run tests (including the ones you are expected to run) for PRs. Builds
-and test runs must be clean.
-
-If the CI build fails for any reason, the PR issue will be updated with a link
-that can be used to determine the cause of the failure.
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index 9e841e7a2..000000000
--- a/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
- MIT License
-
- Copyright (c) Microsoft Corporation.
-
- Permission is hereby granted, free of charge, to any person obtaining a copy
- of this software and associated documentation files (the "Software"), to deal
- in the Software without restriction, including without limitation the rights
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- copies of the Software, and to permit persons to whom the Software is
- furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included in all
- copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
- SOFTWARE
diff --git a/PACKAGES.md b/PACKAGES.md
deleted file mode 100644
index a6e2dc7a4..000000000
--- a/PACKAGES.md
+++ /dev/null
@@ -1,76 +0,0 @@
-# Semantic Kernel for Java Packages
-
-The Semantic Kernel has the packages below, all are under the groupId `com.microsoft.semantic-kernel`, and can be imported
-to maven.
-
-```xml
-
- com.microsoft.semantic-kernel
- semantickernel-api
-
-```
-
-A BOM is provided that can be used to define the versions of all Semantic Kernel packages.
-
-```xml
-
-
-
- com.microsoft.semantic-kernel
- semantickernel-bom
- ${semantickernel.version}
- import
- pom
-
-
-
-```
-
-## Common Packages
-
-`semantickernel-bom`
-: A Maven project BOM that can be used to define the versions of all Semantic Kernel packages.
-
-`semantickernel-api`
-: Package that defines the core public API for the Semantic Kernel for a Maven project.
-
-## Services
-
-`semantickernel-aiservices-openai`
-: Provides a connector that can be used to interact with the OpenAI API.
-
-## Example Configurations
-
-### Example: OpenAI + SQLite
-
-POM XML for a simple project that uses OpenAI.
-
-```xml
-
-
-
-
-
- com.microsoft.semantic-kernel
- semantickernel-bom
- ${semantickernel.version}
- import
- pom
-
-
-
-
-
- com.microsoft.semantic-kernel
- semantickernel-api
-
-
- com.microsoft.semantic-kernel
- semantickernel-connectors-ai-openai
-
-
-
-```
-
-
-
diff --git a/README.md b/README.md
deleted file mode 100644
index 27585762d..000000000
--- a/README.md
+++ /dev/null
@@ -1,75 +0,0 @@
-[](https://github.com/microsoft/semantic-kernel-java/actions/workflows/java-build.yml)
-[](https://github.com/microsoft/semantic-kernel-java/blob/main/LICENSE)
-[](https://aka.ms/SKDiscord)
-
-# Semantic Kernel for Java
-
-Welcome to the Semantic Kernel for Java. For detailed documentation, visit [Microsoft Learn](https://learn.microsoft.com/en-us/semantic-kernel/overview/?tabs=Java&pivots=programming-language-java).
-
-[Semantic Kernel](https://learn.microsoft.com/en-us/semantic-kernel/overview/) is an SDK that integrates Large Language Models (LLMs) like [OpenAI](https://platform.openai.com/docs/introduction), [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service), and [Hugging Face](https://huggingface.co/)
-with conventional programming languages like C#, Python, and Java. Semantic Kernel achieves this by allowing you to define [plugins](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/plugins??tabs=Java&pivots=programming-language-java) that can be chained together in just a [few lines of code](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/chaining-functions?tabs=Java&pivots=programming-language-java#using-the-runasync-method-to-simplify-your-code).
-
-What makes Semantic Kernel _special_, however, is its ability to _automatically_ orchestrate plugins with AI. With Semantic Kernel [planners](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/planner?tabs=Java&pivots=programming-language-java), you can ask an LLM to generate a plan that achieves a user's unique goal. Afterwards, Semantic Kernel will execute the plan for the user.
-
-For C#, Python and other language support, see [microsoft/semantic-kernel](https://github.com/microsoft/semantic-kernel).
-
-#### Please star the repo to show your support for this project!
-
-
-
-## Getting started with Semantic Kernel for Java
-
-The quickest way to get started with the basics is to get an API key from either OpenAI or Azure OpenAI and to run one of the Java console applications/scripts below.
-
-1. Clone the repository: `git clone https://github.com/microsoft/semantic-kernel-java.git`
-2. Follow the instructions [Start learning how to use Semantic Kernel](https://learn.microsoft.com/en-us/semantic-kernel/get-started/quick-start-guide?tabs=Java&pivots=programming-language-java).
-
-## Documentation: Learning how to use Semantic Kernel
-
-The fastest way to learn how to use Semantic Kernel is with our walkthroughs
-on our Learn site.
-
-1. 📖 [Overview of the kernel](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/?tabs=Java&pivots=programming-language-java)
-1. 🔌 [Understanding AI plugins](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/plugins?tabs=Java&pivots=programming-language-java)
-1. 👄 [Creating semantic functions](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/semantic-functions?tabs=Java&pivots=programming-language-java)
-1. 💽 [Creating native functions](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/native-functions?tabs=Java&pivots=programming-language-java)
-1. ⛓️ [Chaining functions together](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/chaining-functions?tabs=Java&pivots=programming-language-java)
-1. 🤖 [Auto create plans with planner](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/planner?tabs=Java&pivots=programming-language-java)
-1. 💡 [Create and run a ChatGPT plugin](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/chatgpt-plugins?tabs=Java&pivots=programming-language-java)
-
-## Join the community
-
-We welcome your contributions and suggestions to SK community! One of the easiest
-ways to participate is to engage in discussions in the GitHub repository.
-Bug reports and fixes are welcome!
-
-For new features, components, or extensions, please open an issue and discuss with
-us before sending a PR. This is to avoid rejection as we might be taking the core
-in a different direction, but also to consider the impact on the larger ecosystem.
-
-To learn more and get started:
-
-- Read the [documentation](https://learn.microsoft.com/en-us/semantic-kernel/overview/?tabs=Java&pivots=programming-language-java)
-- Learn how to [contribute](https://learn.microsoft.com/en-us/semantic-kernel/support/contributing?tabs=Java&pivots=programming-language-java) to the project
-- Join the [Discord community](https://aka.ms/SKDiscord)
-- Attend [regular office hours and SK community events](COMMUNITY.md)
-- Follow the team on our [blog](https://aka.ms/sk/blog)
-
-## Contributor Wall of Fame
-
-[](https://github.com/microsoft/semantic-kernel-java/graphs/contributors)
-
-## Code of Conduct
-
-This project has adopted the
-[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
-For more information see the
-[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
-or contact [opencode@microsoft.com](mailto:opencode@microsoft.com)
-with any additional questions or comments.
-
-## License
-
-Copyright (c) Microsoft Corporation. All rights reserved.
-
-Licensed under the [MIT](LICENSE) license.
diff --git a/SECURITY.md b/SECURITY.md
deleted file mode 100644
index eed215e18..000000000
--- a/SECURITY.md
+++ /dev/null
@@ -1,41 +0,0 @@
-
-
-## Security
-
-Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
-
-If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://www.microsoft.com/en-us/msrc/definition-of-a-security-vulnerability?rtc=1), please report it to us as described below.
-
-## Reporting Security Issues
-
-**Please do not report security vulnerabilities through public GitHub issues.**
-
-Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report).
-
-If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc?rtc=2).
-
-You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/en-us/msrc?rtc=2).
-
-Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
-
- * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
- * Full paths of source file(s) related to the manifestation of the issue
- * The location of the affected source code (tag/branch/commit or direct URL)
- * Any special configuration required to reproduce the issue
- * Step-by-step instructions to reproduce the issue
- * Proof-of-concept or exploit code (if possible)
- * Impact of the issue, including how an attacker might exploit the issue
-
-This information will help us triage your report more quickly.
-
-If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://www.microsoft.com/en-us/msrc/bounty?rtc=2) page for more details about our active programs.
-
-## Preferred Languages
-
-We prefer all communications to be in English.
-
-## Policy
-
-Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd?rtc=2).
-
-
diff --git a/agents/semantickernel-agents-core/pom.xml b/agents/semantickernel-agents-core/pom.xml
deleted file mode 100644
index 1270d00d0..000000000
--- a/agents/semantickernel-agents-core/pom.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
- 4.0.0
-
- com.microsoft.semantic-kernel
- semantickernel-parent
- 1.4.4-RC3-SNAPSHOT
- ../../pom.xml
-
-
- semantickernel-agents-core
-
- Semantic Kernel Chat Completion Agent
- Chat Completion Agent for Semantic Kernel
-
-
-
- com.microsoft.semantic-kernel
- semantickernel-api
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-builders
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-ai-services
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-exceptions
-
-
-
-
\ No newline at end of file
diff --git a/agents/semantickernel-agents-core/src/main/java/com/microsoft/semantickernel/agents/chatcompletion/ChatCompletionAgent.java b/agents/semantickernel-agents-core/src/main/java/com/microsoft/semantickernel/agents/chatcompletion/ChatCompletionAgent.java
deleted file mode 100644
index b5294fe6b..000000000
--- a/agents/semantickernel-agents-core/src/main/java/com/microsoft/semantickernel/agents/chatcompletion/ChatCompletionAgent.java
+++ /dev/null
@@ -1,329 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.agents.chatcompletion;
-
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.agents.AgentInvokeOptions;
-import com.microsoft.semantickernel.agents.AgentResponseItem;
-import com.microsoft.semantickernel.agents.AgentThread;
-import com.microsoft.semantickernel.agents.KernelAgent;
-import com.microsoft.semantickernel.builders.SemanticKernelBuilder;
-import com.microsoft.semantickernel.functionchoice.AutoFunctionChoiceBehavior;
-import com.microsoft.semantickernel.orchestration.InvocationContext;
-import com.microsoft.semantickernel.orchestration.InvocationReturnMode;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.semanticfunctions.KernelArguments;
-import com.microsoft.semantickernel.semanticfunctions.PromptTemplate;
-import com.microsoft.semantickernel.semanticfunctions.PromptTemplateConfig;
-import com.microsoft.semantickernel.semanticfunctions.PromptTemplateFactory;
-import com.microsoft.semantickernel.services.ServiceNotFoundException;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
-import com.microsoft.semantickernel.services.chatcompletion.ChatHistory;
-import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent;
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
-import reactor.core.publisher.Flux;
-import reactor.core.publisher.Mono;
-
-import javax.annotation.Nullable;
-import java.util.List;
-import java.util.stream.Collectors;
-
-public class ChatCompletionAgent extends KernelAgent {
-
- private ChatCompletionAgent(
- String id,
- String name,
- String description,
- Kernel kernel,
- KernelArguments kernelArguments,
- InvocationContext context,
- String instructions,
- PromptTemplate template) {
- super(
- id,
- name,
- description,
- kernel,
- kernelArguments,
- context,
- instructions,
- template);
- }
-
- /**
- * Invoke the agent with the given chat history.
- *
- * @param messages The chat history to process
- * @param thread The agent thread to use
- * @param options The options for invoking the agent
- * @return A Mono containing the agent response
- */
- @Override
- public Mono>>> invokeAsync(
- List> messages,
- @Nullable AgentThread thread,
- @Nullable AgentInvokeOptions options) {
- return ensureThreadExistsWithMessagesAsync(messages, thread, ChatHistoryAgentThread::new)
- .cast(ChatHistoryAgentThread.class)
- .flatMap(agentThread -> {
- // Extract the chat history from the thread
- ChatHistory history = new ChatHistory(
- agentThread.getChatHistory().getMessages());
-
- // Invoke the agent with the chat history
- return internalInvokeAsync(
- history,
- agentThread,
- options)
- .map(chatMessageContents -> chatMessageContents.stream()
- .map(message -> new AgentResponseItem>(message,
- agentThread))
- .collect(Collectors.toList()));
- });
- }
-
- private Mono>> internalInvokeAsync(
- ChatHistory history,
- AgentThread thread,
- @Nullable AgentInvokeOptions options) {
- if (options == null) {
- options = new AgentInvokeOptions();
- }
-
- final Kernel kernel = options.getKernel() != null ? options.getKernel() : this.kernel;
- final KernelArguments arguments = mergeArguments(options.getKernelArguments());
- final String additionalInstructions = options.getAdditionalInstructions();
- final InvocationContext invocationContext = options.getInvocationContext() != null
- ? options.getInvocationContext()
- : this.invocationContext;
-
- try {
- ChatCompletionService chatCompletionService = kernel
- .getService(ChatCompletionService.class, arguments);
-
- PromptExecutionSettings executionSettings = invocationContext != null
- && invocationContext.getPromptExecutionSettings() != null
- ? invocationContext.getPromptExecutionSettings()
- : arguments.getExecutionSettings()
- .get(chatCompletionService.getServiceId());
-
- // Build base invocation context
- InvocationContext.Builder builder = InvocationContext.builder()
- .withPromptExecutionSettings(executionSettings)
- .withReturnMode(InvocationReturnMode.NEW_MESSAGES_ONLY);
-
- if (invocationContext != null) {
- builder = builder
- .withTelemetry(invocationContext.getTelemetry())
- .withFunctionChoiceBehavior(invocationContext.getFunctionChoiceBehavior())
- .withToolCallBehavior(invocationContext.getToolCallBehavior())
- .withContextVariableConverter(invocationContext.getContextVariableTypes())
- .withKernelHooks(invocationContext.getKernelHooks());
- }
-
- InvocationContext agentInvocationContext = builder.build();
-
- return renderInstructionsAsync(kernel, arguments, agentInvocationContext).flatMap(
- instructions -> {
- // Create a new chat history with the instructions
- ChatHistory chat = new ChatHistory(
- instructions);
-
- // Add agent additional instructions
- if (additionalInstructions != null) {
- chat.addMessage(new ChatMessageContent<>(
- AuthorRole.SYSTEM,
- additionalInstructions));
- }
-
- // Add the chat history to the new chat
- chat.addAll(history);
-
- // Retrieve the chat message contents asynchronously and notify the thread
- if (shouldNotifyFunctionCalls(agentInvocationContext)) {
- // Notify all messages including function calls
- return chatCompletionService
- .getChatMessageContentsAsync(chat, kernel, agentInvocationContext)
- .flatMapMany(Flux::fromIterable)
- .concatMap(message -> notifyThreadOfNewMessageAsync(thread, message)
- .thenReturn(message))
- // Filter out function calls and their results
- .filter(message -> message.getContent() != null
- && message.getAuthorRole() != AuthorRole.TOOL)
- .collect(Collectors.toList());
- }
-
- // Return chat completion messages without notifying the thread
- // We shouldn't add the function call content to the thread, since
- // we don't know if the user will execute the call. They should add it themselves.
- return chatCompletionService.getChatMessageContentsAsync(chat, kernel,
- agentInvocationContext);
- });
-
- } catch (ServiceNotFoundException e) {
- return Mono.error(e);
- }
- }
-
- boolean shouldNotifyFunctionCalls(InvocationContext invocationContext) {
- if (invocationContext == null) {
- return false;
- }
-
- if (invocationContext.getFunctionChoiceBehavior() != null && invocationContext
- .getFunctionChoiceBehavior() instanceof AutoFunctionChoiceBehavior) {
- return ((AutoFunctionChoiceBehavior) invocationContext.getFunctionChoiceBehavior())
- .isAutoInvoke();
- }
-
- if (invocationContext.getToolCallBehavior() != null) {
- return invocationContext.getToolCallBehavior().isAutoInvokeAllowed();
- }
-
- return false;
- }
-
- @Override
- public Mono notifyThreadOfNewMessageAsync(AgentThread thread,
- ChatMessageContent> message) {
- return Mono.defer(() -> {
- return thread.onNewMessageAsync(message);
- });
- }
-
- /**
- * Builder for creating instances of ChatCompletionAgent.
- */
- public static Builder builder() {
- return new Builder();
- }
-
- public static class Builder implements SemanticKernelBuilder {
- private String id;
- private String name;
- private String description;
- private Kernel kernel;
- private KernelArguments kernelArguments;
- private InvocationContext invocationContext;
- private String instructions;
- private PromptTemplate template;
-
- /**
- * Set the ID of the agent.
- *
- * @param id The ID of the agent.
- */
- public Builder withId(String id) {
- this.id = id;
- return this;
- }
-
- /**
- * Set the name of the agent.
- *
- * @param name The name of the agent.
- */
- public Builder withName(String name) {
- this.name = name;
- return this;
- }
-
- /**
- * Set the description of the agent.
- *
- * @param description The description of the agent.
- */
- public Builder withDescription(String description) {
- this.description = description;
- return this;
- }
-
- /**
- * Set the kernel to use for the agent.
- *
- * @param kernel The kernel to use.
- */
- public Builder withKernel(Kernel kernel) {
- this.kernel = kernel;
- return this;
- }
-
- /**
- * Set the kernel arguments to use for the agent.
- *
- * @param KernelArguments The kernel arguments to use.
- */
- @SuppressFBWarnings("EI_EXPOSE_REP2")
- public Builder withKernelArguments(KernelArguments KernelArguments) {
- this.kernelArguments = KernelArguments;
- return this;
- }
-
- /**
- * Set the instructions for the agent.
- *
- * @param instructions The instructions for the agent.
- */
- public Builder withInstructions(String instructions) {
- this.instructions = instructions;
- return this;
- }
-
- /**
- * Set the invocation context for the agent.
- *
- * @param invocationContext The invocation context to use.
- */
- public Builder withInvocationContext(InvocationContext invocationContext) {
- this.invocationContext = invocationContext;
- return this;
- }
-
- /**
- * Set the template for the agent.
- *
- * @param template The template to use.
- */
- public Builder withTemplate(PromptTemplate template) {
- this.template = template;
- return this;
- }
-
- /**
- * Build the ChatCompletionAgent instance.
- *
- * @return The ChatCompletionAgent instance.
- */
- public ChatCompletionAgent build() {
- return new ChatCompletionAgent(
- id,
- name,
- description,
- kernel,
- kernelArguments,
- invocationContext,
- instructions,
- template);
- }
-
- /**
- * Build the ChatCompletionAgent instance with the given prompt template config and factory.
- *
- * @param promptTemplateConfig The prompt template config to use.
- * @param promptTemplateFactory The prompt template factory to use.
- * @return The ChatCompletionAgent instance.
- */
- public ChatCompletionAgent build(PromptTemplateConfig promptTemplateConfig,
- PromptTemplateFactory promptTemplateFactory) {
- return new ChatCompletionAgent(
- id,
- name,
- description,
- kernel,
- kernelArguments,
- invocationContext,
- promptTemplateConfig.getTemplate(),
- promptTemplateFactory.tryCreate(promptTemplateConfig));
- }
- }
-}
diff --git a/agents/semantickernel-agents-core/src/main/java/com/microsoft/semantickernel/agents/chatcompletion/ChatHistoryAgentThread.java b/agents/semantickernel-agents-core/src/main/java/com/microsoft/semantickernel/agents/chatcompletion/ChatHistoryAgentThread.java
deleted file mode 100644
index 6b3f62a9b..000000000
--- a/agents/semantickernel-agents-core/src/main/java/com/microsoft/semantickernel/agents/chatcompletion/ChatHistoryAgentThread.java
+++ /dev/null
@@ -1,129 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.agents.chatcompletion;
-
-import com.microsoft.semantickernel.agents.AgentThread;
-import com.microsoft.semantickernel.agents.BaseAgentThread;
-import com.microsoft.semantickernel.builders.SemanticKernelBuilder;
-import com.microsoft.semantickernel.services.chatcompletion.ChatHistory;
-import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent;
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
-import reactor.core.publisher.Mono;
-
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
-import java.util.List;
-import java.util.UUID;
-
-public class ChatHistoryAgentThread extends BaseAgentThread {
- private ChatHistory chatHistory;
-
- /**
- * Constructor for ChatHistoryAgentThread.
- *
- */
- public ChatHistoryAgentThread() {
- this(UUID.randomUUID().toString(), new ChatHistory());
- }
-
- /**
- * Constructor for ChatHistoryAgentThread.
- *
- * @param chatHistory The chat history.
- */
- public ChatHistoryAgentThread(@Nullable ChatHistory chatHistory) {
- this(UUID.randomUUID().toString(), chatHistory);
- }
-
- /**
- * Constructor for ChatHistoryAgentThread.
- *
- * @param id The ID of the thread.
- * @param chatHistory The chat history.
- */
- public ChatHistoryAgentThread(String id, @Nullable ChatHistory chatHistory) {
- super(id);
- this.chatHistory = chatHistory != null ? chatHistory : new ChatHistory();
- }
-
- /**
- * Get the chat history.
- *
- * @return The chat history.
- */
- @SuppressFBWarnings("EI_EXPOSE_REP")
- public ChatHistory getChatHistory() {
- return chatHistory;
- }
-
- @Override
- public Mono createAsync() {
- if (this.id == null) {
- this.id = UUID.randomUUID().toString();
- chatHistory = new ChatHistory();
- }
- return Mono.just(id);
- }
-
- @Override
- public Mono deleteAsync() {
- return Mono.fromRunnable(chatHistory::clear);
- }
-
- /**
- * Create a copy of the thread.
- *
- * @return A new instance of the thread.
- */
- @Override
- public ChatHistoryAgentThread copy() {
- return new ChatHistoryAgentThread(this.id, new ChatHistory(chatHistory.getMessages()));
- }
-
- @Override
- public Mono onNewMessageAsync(ChatMessageContent> newMessage) {
- return Mono.fromRunnable(() -> {
- chatHistory.addMessage(newMessage);
- });
- }
-
- public List> getMessages() {
- return chatHistory.getMessages();
- }
-
- public static Builder builder() {
- return new Builder();
- }
-
- public static class Builder implements SemanticKernelBuilder {
- private String id;
- private ChatHistory chatHistory;
-
- /**
- * Set the ID of the thread.
- *
- * @param id The ID of the thread.
- * @return The builder instance.
- */
- public Builder withId(String id) {
- this.id = id;
- return this;
- }
-
- /**
- * Set the chat history.
- *
- * @param chatHistory The chat history.
- * @return The builder instance.
- */
- @SuppressFBWarnings("EI_EXPOSE_REP2")
- public Builder withChatHistory(ChatHistory chatHistory) {
- this.chatHistory = chatHistory;
- return this;
- }
-
- @Override
- public ChatHistoryAgentThread build() {
- return new ChatHistoryAgentThread(id, chatHistory);
- }
- }
-}
diff --git a/aiservices/google/pom.xml b/aiservices/google/pom.xml
deleted file mode 100644
index 145ee5494..000000000
--- a/aiservices/google/pom.xml
+++ /dev/null
@@ -1,73 +0,0 @@
-
-
- 4.0.0
-
- com.microsoft.semantic-kernel
- semantickernel-parent
- 1.4.4-RC3-SNAPSHOT
- ../../pom.xml
-
-
- semantickernel-aiservices-google
- Semantic Kernel Google Services
- Google services for Semantic Kernel
-
-
-
-
- com.google.cloud
- libraries-bom
- 26.49.0
- pom
- import
-
-
-
-
-
-
- com.google.cloud
- google-cloud-vertexai
-
-
-
- com.microsoft.semantic-kernel
- semantickernel-api
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-builders
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-ai-services
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-exceptions
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-localization
-
-
-
- com.fasterxml.jackson.core
- jackson-databind
- compile
-
-
- com.fasterxml.jackson.core
- jackson-core
- compile
-
-
-
-
- javax.xml.stream
- stax-api
- provided
-
-
-
-
\ No newline at end of file
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/GeminiService.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/GeminiService.java
deleted file mode 100644
index a65e96afb..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/GeminiService.java
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google;
-
-import com.google.cloud.vertexai.VertexAI;
-import com.microsoft.semantickernel.services.AIService;
-
-import javax.annotation.Nullable;
-
-/**
- * Makes a Gemini service available to the Semantic Kernel.
- */
-public class GeminiService implements AIService {
- private final VertexAI client;
- private final String modelId;
-
- /**
- * Creates a new Gemini service.
- * @param client The VertexAI client
- * @param modelId The Gemini model ID
- */
- protected GeminiService(VertexAI client, String modelId) {
- this.client = client;
- this.modelId = modelId;
- }
-
- @Nullable
- @Override
- public String getModelId() {
- return modelId;
- }
-
- @Nullable
- @Override
- public String getServiceId() {
- return null;
- }
-
- /**
- * Gets the VertexAI client.
- * @return The VertexAI client
- */
- protected VertexAI getClient() {
- return client;
- }
-}
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/GeminiServiceBuilder.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/GeminiServiceBuilder.java
deleted file mode 100644
index 3312299cc..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/GeminiServiceBuilder.java
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google;
-
-import com.google.cloud.vertexai.VertexAI;
-import com.microsoft.semantickernel.builders.SemanticKernelBuilder;
-import javax.annotation.Nullable;
-
-/**
- * Builder for a Gemini service.
- * @param The type of the service
- * @param The type of the builder
- */
-public abstract class GeminiServiceBuilder> implements
- SemanticKernelBuilder {
-
- @Nullable
- protected String modelId;
- @Nullable
- protected VertexAI client;
-
- /**
- * Sets the model ID for the service
- *
- * @param modelId The model ID
- * @return The builder
- */
- public U withModelId(String modelId) {
- this.modelId = modelId;
- return (U) this;
- }
-
- /**
- * Sets the VertexAI client for the service
- *
- * @param client The VertexAI client
- * @return The builder
- */
- public U withVertexAIClient(VertexAI client) {
- this.client = client;
- return (U) this;
- }
-
- @Override
- public abstract T build();
-}
\ No newline at end of file
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiChatCompletion.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiChatCompletion.java
deleted file mode 100644
index 2bd45c022..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiChatCompletion.java
+++ /dev/null
@@ -1,471 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google.chatcompletion;
-
-import com.google.cloud.vertexai.VertexAI;
-import com.google.cloud.vertexai.api.Content;
-import com.google.cloud.vertexai.api.FunctionDeclaration;
-import com.google.cloud.vertexai.api.FunctionResponse;
-import com.google.cloud.vertexai.api.GenerateContentResponse;
-import com.google.cloud.vertexai.api.GenerationConfig;
-import com.google.cloud.vertexai.api.Part;
-import com.google.cloud.vertexai.api.Schema;
-import com.google.cloud.vertexai.api.Tool;
-import com.google.cloud.vertexai.api.Type;
-import com.google.cloud.vertexai.generativeai.GenerativeModel;
-import com.google.protobuf.Struct;
-import com.google.protobuf.Value;
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.aiservices.google.GeminiService;
-import com.microsoft.semantickernel.aiservices.google.GeminiServiceBuilder;
-import com.microsoft.semantickernel.aiservices.google.implementation.MonoConverter;
-import com.microsoft.semantickernel.contextvariables.ContextVariableTypes;
-import com.microsoft.semantickernel.exceptions.AIException;
-import com.microsoft.semantickernel.exceptions.SKCheckedException;
-import com.microsoft.semantickernel.exceptions.SKException;
-import com.microsoft.semantickernel.localization.SemanticKernelResources;
-import com.microsoft.semantickernel.orchestration.FunctionResult;
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.orchestration.InvocationContext;
-import com.microsoft.semantickernel.orchestration.InvocationReturnMode;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.orchestration.ToolCallBehavior;
-import com.microsoft.semantickernel.plugin.KernelPlugin;
-import com.microsoft.semantickernel.semanticfunctions.InputVariable;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
-import com.microsoft.semantickernel.semanticfunctions.KernelArguments;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
-import com.microsoft.semantickernel.services.chatcompletion.ChatHistory;
-import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent;
-import com.microsoft.semantickernel.services.chatcompletion.StreamingChatContent;
-import java.io.IOException;
-import java.time.OffsetDateTime;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.UUID;
-import java.util.stream.Collectors;
-import javax.annotation.Nullable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import reactor.core.publisher.Flux;
-import reactor.core.publisher.Mono;
-
-/**
- * A chat completion service that uses the Gemini model to generate chat completions.
- */
-public class GeminiChatCompletion extends GeminiService implements ChatCompletionService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(GeminiChatCompletion.class);
-
- /**
- * Constructor for {@link GeminiChatCompletion}.
- * @param client The VertexAI client
- * @param modelId The model ID
- */
- public GeminiChatCompletion(VertexAI client, String modelId) {
- super(client, modelId);
- }
-
- /**
- * Create a new instance of {@link GeminiChatCompletion.Builder}.
- *
- * @return a new instance of {@link GeminiChatCompletion.Builder}
- */
- public static Builder builder() {
- return new Builder();
- }
-
- @Override
- public Mono>> getChatMessageContentsAsync(String prompt,
- @Nullable Kernel kernel, @Nullable InvocationContext invocationContext) {
- GeminiXMLPromptParser.GeminiParsedPrompt parsedPrompt = GeminiXMLPromptParser.parse(prompt);
-
- return this.getChatMessageContentsAsync(parsedPrompt.getChatHistory(), kernel,
- invocationContext);
- }
-
- @Override
- public Flux> getStreamingChatMessageContentsAsync(
- ChatHistory chatHistory,
- @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext) {
-
- LOGGER.warn("Streaming has been called on GeminiChatCompletion service. "
- + "This is currently not supported in Gemini. "
- + "The results will be returned in a non streaming fashion.");
-
- return getChatMessageContentsAsync(chatHistory, kernel, invocationContext)
- .flatMapIterable(chatMessageContents -> chatMessageContents)
- .map(content -> {
- return new GeminiStreamingChatMessageContent(
- content.getAuthorRole(),
- content.getContent(),
- getModelId(),
- content.getInnerContent(),
- content.getEncoding(),
- content.getMetadata(),
- null,
- UUID.randomUUID().toString());
- });
- }
-
- @Override
- public Flux> getStreamingChatMessageContentsAsync(String prompt,
- @Nullable Kernel kernel, @Nullable InvocationContext invocationContext) {
- LOGGER.warn("Streaming has been called on GeminiChatCompletion service. "
- + "This is currently not supported in Gemini. "
- + "The results will be returned in a non streaming fashion.");
-
- return getChatMessageContentsAsync(prompt, kernel, invocationContext)
- .flatMapIterable(chatMessageContents -> chatMessageContents)
- .map(content -> {
- return new GeminiStreamingChatMessageContent(
- content.getAuthorRole(),
- content.getContent(),
- getModelId(),
- content.getInnerContent(),
- content.getEncoding(),
- content.getMetadata(),
- null,
- UUID.randomUUID().toString());
- });
- }
-
- @Override
- public Mono>> getChatMessageContentsAsync(ChatHistory chatHistory,
- @Nullable Kernel kernel, @Nullable InvocationContext invocationContext) {
- return internalChatMessageContentsAsync(
- new ChatHistory(chatHistory.getMessages()),
- new ChatHistory(),
- kernel,
- invocationContext,
- Math.min(MAXIMUM_INFLIGHT_AUTO_INVOKES,
- invocationContext != null && invocationContext.getToolCallBehavior() != null
- ? invocationContext.getToolCallBehavior().getMaximumAutoInvokeAttempts()
- : 0));
- }
-
- private Mono>> internalChatMessageContentsAsync(
- ChatHistory fullHistory, ChatHistory newHistory, @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext, int invocationAttempts) {
-
- List contents = getContents(fullHistory);
-
- try {
- GenerativeModel model = getGenerativeModel(kernel, invocationContext);
- return MonoConverter.fromApiFuture(model.generateContentAsync(contents))
- .doOnError(e -> LOGGER.error(
- SemanticKernelResources.getString("error.generating.chat.completion"), e))
- .flatMap(result -> {
- // Get ChatMessageContent from the response
- GeminiChatMessageContent> response = getGeminiChatMessageContentFromResponse(
- result);
-
- // Add assistant response to the chat history
- fullHistory.addMessage(response);
- newHistory.addMessage(response);
-
- // Just return the result:
- // If we don't want to attempt to invoke any functions or if we have no function calls
- if (invocationAttempts <= 0 || response.getGeminiFunctionCalls().isEmpty()) {
- if (invocationContext != null && invocationContext
- .returnMode() == InvocationReturnMode.FULL_HISTORY) {
- return Mono.just(fullHistory.getMessages());
- }
- if (invocationContext != null && invocationContext
- .returnMode() == InvocationReturnMode.LAST_MESSAGE_ONLY) {
- ChatHistory lastMessage = new ChatHistory();
- lastMessage.addMessage(response);
-
- return Mono.just(lastMessage.getMessages());
- }
-
- return Mono.just(newHistory.getMessages());
- }
-
- // Perform the function calls
- List> functionResults = response
- .getGeminiFunctionCalls().stream()
- .map(geminiFunctionCall -> performFunctionCall(kernel, invocationContext,
- geminiFunctionCall))
- .collect(Collectors.toList());
-
- Mono> combinedResults = Flux
- .fromIterable(functionResults)
- .flatMap(mono -> mono)
- .collectList();
-
- // Add the function responses to the chat history
- return combinedResults.flatMap(results -> {
- ChatMessageContent> functionResponsesMessage = new GeminiChatMessageContent<>(
- AuthorRole.USER,
- "", null, null, null, null, results);
-
- fullHistory.addMessage(functionResponsesMessage);
- newHistory.addMessage(functionResponsesMessage);
-
- return internalChatMessageContentsAsync(fullHistory, newHistory, kernel,
- invocationContext, invocationAttempts - 1);
- });
- });
- } catch (SKCheckedException | IOException e) {
- return Mono.error(new SKException("Error generating chat completion", e));
- }
- }
-
- // Convert from ChatHistory to List
- private List getContents(ChatHistory chatHistory) {
- List contents = new ArrayList<>();
- chatHistory.forEach(chatMessageContent -> {
- Content.Builder contentBuilder = Content.newBuilder();
-
- if (chatMessageContent.getAuthorRole() == AuthorRole.USER) {
- contentBuilder.setRole(GeminiRole.USER.toString());
-
- if (chatMessageContent instanceof GeminiChatMessageContent) {
- GeminiChatMessageContent> message = (GeminiChatMessageContent>) chatMessageContent;
-
- message.getGeminiFunctionCalls().forEach(geminiFunction -> {
- FunctionResult> functionResult = geminiFunction.getFunctionResult();
- if (functionResult == null || functionResult.getResult() == null) {
- throw new SKException("Gemini failed to return a result");
- }
-
- FunctionResponse functionResponse = FunctionResponse.newBuilder()
- .setName(geminiFunction.getFunctionCall().getName())
- .setResponse(Struct.newBuilder().putFields("result",
- Value.newBuilder()
- .setStringValue(
- (String) functionResult.getResult())
- .build()))
- .build();
-
- contentBuilder
- .addParts(Part.newBuilder().setFunctionResponse(functionResponse));
- });
- }
- } else if (chatMessageContent.getAuthorRole() == AuthorRole.ASSISTANT) {
- contentBuilder.setRole(GeminiRole.MODEL.toString());
-
- if (chatMessageContent instanceof GeminiChatMessageContent) {
- GeminiChatMessageContent> message = (GeminiChatMessageContent>) chatMessageContent;
-
- message.getGeminiFunctionCalls().forEach(geminiFunctionCall -> {
- contentBuilder.addParts(Part.newBuilder()
- .setFunctionCall(geminiFunctionCall.getFunctionCall()));
- });
- }
- }
-
- if (chatMessageContent.getContent() != null
- && !chatMessageContent.getContent().isEmpty()) {
- contentBuilder.addParts(Part.newBuilder().setText(chatMessageContent.getContent()));
- }
-
- contents.add(contentBuilder.build());
- });
-
- return contents;
- }
-
- private GeminiChatMessageContent> getGeminiChatMessageContentFromResponse(
- GenerateContentResponse response) {
- StringBuilder message = new StringBuilder();
- List functionCalls = new ArrayList<>();
-
- response.getCandidatesList().forEach(
- candidate -> {
- Content content = candidate.getContent();
- if (content.getPartsCount() == 0) {
- return;
- }
-
- content.getPartsList().forEach(part -> {
- if (!part.getFunctionCall().getName().isEmpty()) {
- // We only care about the function call here
- // Execution of the function call will be done later
- functionCalls.add(new GeminiFunctionCall(part.getFunctionCall(), null));
- }
- if (!part.getText().isEmpty()) {
- message.append(part.getText());
- }
- });
- });
-
- FunctionResultMetadata metadata = FunctionResultMetadata
- .build(UUID.randomUUID().toString(), response.getUsageMetadata(), OffsetDateTime.now());
-
- return new GeminiChatMessageContent<>(AuthorRole.ASSISTANT,
- message.toString(), null, null, null, metadata, functionCalls);
- }
-
- private GenerativeModel getGenerativeModel(@Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext) throws SKCheckedException {
- GenerativeModel.Builder modelBuilder = new GenerativeModel.Builder()
- .setModelName(getModelId())
- .setVertexAi(getClient());
-
- if (invocationContext != null) {
- if (invocationContext.getPromptExecutionSettings() != null) {
- PromptExecutionSettings settings = invocationContext.getPromptExecutionSettings();
-
- if (settings.getResultsPerPrompt() < 1
- || settings.getResultsPerPrompt() > MAX_RESULTS_PER_PROMPT) {
- throw SKCheckedException.build(
- SemanticKernelResources.getString("error.building.generative.model"),
- new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- String.format(
- "Results per prompt must be in range between 1 and %d, inclusive.",
- MAX_RESULTS_PER_PROMPT)));
- }
-
- GenerationConfig config = GenerationConfig.newBuilder()
- .setMaxOutputTokens(settings.getMaxTokens())
- .setTemperature((float) settings.getTemperature())
- .setTopP((float) settings.getTopP())
- .setCandidateCount(settings.getResultsPerPrompt())
- .build();
-
- modelBuilder.setGenerationConfig(config);
- }
-
- if (invocationContext.getToolCallBehavior() != null && kernel != null) {
- List tools = new ArrayList<>();
- Tool tool = getTool(kernel, invocationContext.getToolCallBehavior());
- if (tool != null) {
- tools.add(tool);
- }
- modelBuilder.setTools(tools);
- }
- }
-
- return modelBuilder.build();
- }
-
- private FunctionDeclaration buildFunctionDeclaration(KernelFunction> function) {
- FunctionDeclaration.Builder functionBuilder = FunctionDeclaration.newBuilder();
- functionBuilder.setName(
- ToolCallBehavior.formFullFunctionName(function.getPluginName(), function.getName()));
- functionBuilder.setDescription(function.getDescription());
-
- List parameters = function.getMetadata().getParameters();
- if (parameters != null && !parameters.isEmpty()) {
- Schema.Builder parametersBuilder = Schema.newBuilder();
-
- function.getMetadata().getParameters().forEach(parameter -> {
- parametersBuilder.setType(Type.OBJECT);
- parametersBuilder.putProperties(
- parameter.getName(),
- Schema.newBuilder().setType(Type.STRING)
- .setDescription(parameter.getDescription()).build());
- });
-
- functionBuilder.setParameters(parametersBuilder.build());
- }
-
- return functionBuilder.build();
- }
-
- @Nullable
- private Tool getTool(@Nullable Kernel kernel, @Nullable ToolCallBehavior toolCallBehavior) {
- if (kernel == null || toolCallBehavior == null) {
- return null;
- }
-
- Tool.Builder toolBuilder = Tool.newBuilder();
-
- // If a specific function is required to be called
- if (toolCallBehavior instanceof ToolCallBehavior.RequiredKernelFunction) {
- KernelFunction> kernelFunction = ((ToolCallBehavior.RequiredKernelFunction) toolCallBehavior)
- .getRequiredFunction();
-
- toolBuilder.addFunctionDeclarations(buildFunctionDeclaration(kernelFunction));
- }
- // If a set of functions are enabled to be called
- if (toolCallBehavior instanceof ToolCallBehavior.AllowedKernelFunctions) {
- ToolCallBehavior.AllowedKernelFunctions enabledKernelFunctions = (ToolCallBehavior.AllowedKernelFunctions) toolCallBehavior;
-
- kernel.getPlugins()
- .forEach(plugin -> plugin.getFunctions().forEach((name, function) -> {
- // check if all kernel functions are enabled or if the specific function is enabled
- if (enabledKernelFunctions.isAllKernelFunctionsAllowed() ||
- enabledKernelFunctions.isFunctionAllowed(function.getPluginName(),
- function.getName())) {
- toolBuilder.addFunctionDeclarations(buildFunctionDeclaration(function));
- }
- }));
- }
-
- return toolBuilder.build();
- }
-
- /**
- * Invoke the Gemini function call.
- * @param kernel The semantic kernel
- * @param invocationContext Additional context for the invocation
- * @param geminiFunction The Gemini function call
- * @return The result of the function call
- */
- public Mono performFunctionCall(@Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext, GeminiFunctionCall geminiFunction) {
- if (kernel == null) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "Kernel must be provided to perform function call");
- }
-
- String[] name = geminiFunction.getFunctionCall().getName()
- .split(ToolCallBehavior.FUNCTION_NAME_SEPARATOR);
-
- String pluginName = name[0];
- String functionName = name[1];
-
- KernelPlugin plugin = kernel.getPlugin(pluginName);
- if (plugin == null) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- String.format("Plugin %s not found in kernel", pluginName));
- }
- KernelFunction> function = plugin.get(functionName);
-
- if (function == null) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- String.format("Kernel function %s not found in plugin %s", functionName,
- pluginName));
- }
-
- ContextVariableTypes contextVariableTypes = invocationContext == null
- ? new ContextVariableTypes()
- : invocationContext.getContextVariableTypes();
-
- KernelArguments.Builder arguments = KernelArguments.builder();
- geminiFunction.getFunctionCall().getArgs().getFieldsMap().forEach((key, value) -> {
- arguments.withVariable(key, value.getStringValue());
- });
-
- return function
- .invokeAsync(kernel)
- .withArguments(arguments.build())
- .withResultType(contextVariableTypes.getVariableTypeForClass(String.class))
- .map(result -> new GeminiFunctionCall(geminiFunction.getFunctionCall(), result));
- }
-
- /**
- * Builder for {@link GeminiChatCompletion}.
- */
- public static class Builder extends GeminiServiceBuilder {
-
- @Override
- public GeminiChatCompletion build() {
- if (this.client == null) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "VertexAI client must be provided");
- }
-
- if (this.modelId == null || modelId.isEmpty()) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "Gemini model id must be provided");
- }
-
- return new GeminiChatCompletion(client, modelId);
- }
- }
-}
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiChatMessageContent.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiChatMessageContent.java
deleted file mode 100644
index 28cae4fb5..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiChatMessageContent.java
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google.chatcompletion;
-
-import com.google.cloud.vertexai.api.FunctionCall;
-import com.google.cloud.vertexai.api.FunctionResponse;
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent;
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
-
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
-import java.nio.charset.Charset;
-import java.util.Collections;
-import java.util.List;
-import java.util.stream.Collectors;
-
-/**
- * Represents the content of a chat message.
- *
- * @param The type of the inner content.
- */
-public class GeminiChatMessageContent extends ChatMessageContent {
- @Nonnull
- private final List geminiFunctionCalls;
-
- /**
- * Creates a new instance of the {@link GeminiChatMessageContent} class.
- *
- * @param authorRole The author role that generated the content.
- * @param content The content.
- * @param modelId The model id.
- * @param innerContent The inner content.
- * @param encoding The encoding.
- * @param metadata The metadata.
- * @param geminiFunctionCalls The function calls.
- */
- public GeminiChatMessageContent(
- AuthorRole authorRole,
- String content,
- @Nullable String modelId,
- @Nullable T innerContent,
- @Nullable Charset encoding,
- @Nullable FunctionResultMetadata metadata,
- @Nullable List geminiFunctionCalls) {
- super(authorRole, content, modelId, innerContent, encoding, metadata);
- if (geminiFunctionCalls == null) {
- this.geminiFunctionCalls = Collections.emptyList();
- } else {
- this.geminiFunctionCalls = Collections.unmodifiableList(geminiFunctionCalls);
- }
- }
-
- /**
- * Gets the function calls.
- *
- * @return The function calls.
- */
- @Nonnull
- public List getGeminiFunctionCalls() {
- return Collections.unmodifiableList(geminiFunctionCalls);
- }
-}
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiFunctionCall.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiFunctionCall.java
deleted file mode 100644
index 0a6e87d4e..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiFunctionCall.java
+++ /dev/null
@@ -1,73 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google.chatcompletion;
-
-import com.google.cloud.vertexai.api.FunctionCall;
-import com.microsoft.semantickernel.orchestration.FunctionResult;
-import com.microsoft.semantickernel.orchestration.ToolCallBehavior;
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
-
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
-
-/**
- * Represents a function call in Gemini.
- */
-public class GeminiFunctionCall {
- @Nonnull
- private final FunctionCall functionCall;
- @Nullable
- private final FunctionResult> functionResult;
- private final String pluginName;
- private final String functionName;
-
- /**
- * Creates a new Gemini function call.
- * @param functionCall The function call
- * @param functionResult The result of the function invocation
- */
- @SuppressFBWarnings("EI_EXPOSE_REP2")
- public GeminiFunctionCall(
- @Nonnull FunctionCall functionCall,
- @Nullable FunctionResult> functionResult) {
- this.functionCall = functionCall;
- this.functionResult = functionResult;
-
- String[] name = functionCall.getName().split(ToolCallBehavior.FUNCTION_NAME_SEPARATOR);
- this.pluginName = name[0];
- this.functionName = name[1];
- }
-
- /**
- * Gets the plugin name.
- * @return The plugin name
- */
- public String getPluginName() {
- return pluginName;
- }
-
- /**
- * Gets the function name.
- * @return The function name
- */
- public String getFunctionName() {
- return functionName;
- }
-
- /**
- * Gets the function call.
- * @return The function call
- */
- @SuppressFBWarnings("EI_EXPOSE_REP")
- public FunctionCall getFunctionCall() {
- return functionCall;
- }
-
- /**
- * Gets the function result.
- * @return The function result
- */
- @Nullable
- public FunctionResult> getFunctionResult() {
- return functionResult;
- }
-}
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiRole.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiRole.java
deleted file mode 100644
index 03983553b..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiRole.java
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google.chatcompletion;
-
-/**
- * Represents the role of a message in a Gemini conversation.
- */
-public enum GeminiRole {
- /**
- * A user message is a message generated by the user.
- */
- USER("user"),
- /**
- * A model message is a message generated by the model.
- */
- MODEL("model");
-
- private final String role;
-
- private GeminiRole(String role) {
- this.role = role;
- }
-
- @Override
- public String toString() {
- return role;
- }
-}
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiStreamingChatMessageContent.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiStreamingChatMessageContent.java
deleted file mode 100644
index e6f0d5511..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiStreamingChatMessageContent.java
+++ /dev/null
@@ -1,46 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google.chatcompletion;
-
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.StreamingChatContent;
-import java.nio.charset.Charset;
-import java.util.List;
-import javax.annotation.Nullable;
-
-/**
- * Represents the content of a chat message.
- *
- * @param The type of the inner content.
- */
-public class GeminiStreamingChatMessageContent extends GeminiChatMessageContent implements
- StreamingChatContent {
-
- private final String id;
-
- /**
- * Creates a new instance of the {@link GeminiChatMessageContent} class.
- *
- * @param authorRole The author role that generated the content.
- * @param content The content.
- * @param modelId The model id.
- * @param innerContent The inner content.
- * @param encoding The encoding.
- * @param metadata The metadata.
- * @param id The id of the message.
- * @param geminiFunctionCalls The function calls.
- */
- public GeminiStreamingChatMessageContent(AuthorRole authorRole, String content,
- @Nullable String modelId, @Nullable T innerContent, @Nullable Charset encoding,
- @Nullable FunctionResultMetadata metadata,
- @Nullable List geminiFunctionCalls,
- String id) {
- super(authorRole, content, modelId, innerContent, encoding, metadata, geminiFunctionCalls);
- this.id = id;
- }
-
- @Override
- public String getId() {
- return id;
- }
-}
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiXMLPromptParser.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiXMLPromptParser.java
deleted file mode 100644
index e43032dc1..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/chatcompletion/GeminiXMLPromptParser.java
+++ /dev/null
@@ -1,165 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google.chatcompletion;
-
-import com.azure.core.util.BinaryData;
-import com.google.cloud.vertexai.api.FunctionDeclaration;
-import com.google.cloud.vertexai.api.Schema;
-import com.microsoft.semantickernel.implementation.chatcompletion.ChatPromptParseVisitor;
-import com.microsoft.semantickernel.implementation.chatcompletion.ChatXMLPromptParser;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.ChatHistory;
-import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent;
-import com.microsoft.semantickernel.services.chatcompletion.message.ChatMessageTextContent;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import javax.annotation.Nullable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Parses an XML prompt for a Gemini chat.
- */
-public class GeminiXMLPromptParser {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(GeminiXMLPromptParser.class);
-
- /**
- * Represents a parsed prompt for Gemini chat.
- */
- public static class GeminiParsedPrompt {
-
- private final ChatHistory chatHistory;
- private final List functions;
-
- /**
- * Creates a new parsed prompt.
- * @param parsedChatHistory The chat history
- * @param parsedFunctions The functions declarations.
- */
- protected GeminiParsedPrompt(
- ChatHistory parsedChatHistory,
- @Nullable List parsedFunctions) {
- this.chatHistory = parsedChatHistory;
- if (parsedFunctions == null) {
- parsedFunctions = new ArrayList<>();
- }
- this.functions = parsedFunctions;
- }
-
- /**
- * Gets the chat history.
- * @return A copy of the chat history.
- */
- public ChatHistory getChatHistory() {
- return new ChatHistory(chatHistory.getMessages());
- }
-
- /**
- * Gets the functions declarations.
- * @return A copy of the functions declarations.
- */
- public List getFunctions() {
- return Collections.unmodifiableList(functions);
- }
- }
-
- private static AuthorRole getAuthorRole(String role) {
- switch (role) {
- case "user":
- return AuthorRole.USER;
- case "assistant":
- return AuthorRole.ASSISTANT;
- case "system":
- return AuthorRole.SYSTEM;
- case "tool":
- return AuthorRole.TOOL;
- default:
- LOGGER.error("Unknown role: " + role);
- return AuthorRole.USER;
- }
- }
-
- private static class GeminiChatPromptParseVisitor
- implements ChatPromptParseVisitor {
-
- @Nullable
- private GeminiParsedPrompt parsedRaw = null;
- private final List functionDefinitions = new ArrayList<>();
- private final ChatHistory chatHistory = new ChatHistory();
-
- @Override
- public ChatPromptParseVisitor addMessage(
- String role,
- String content) {
- chatHistory.addMessage(
- ChatMessageTextContent.builder()
- .withContent(content)
- .withAuthorRole(getAuthorRole(role))
- .build());
- return this;
- }
-
- @Override
- public ChatPromptParseVisitor addFunction(
- String name,
- @Nullable String description,
- @Nullable BinaryData parameters) {
-
- // TODO: Build the parameters schema
- Schema.Builder parametersBuilder = Schema.newBuilder();
-
- FunctionDeclaration.Builder function = FunctionDeclaration.newBuilder()
- .setName(name)
- .setDescription(description)
- .setParameters(parametersBuilder.build());
-
- functionDefinitions.add(function.build());
- return this;
- }
-
- @Override
- public boolean areMessagesEmpty() {
- return chatHistory.getMessages().isEmpty();
- }
-
- @Override
- public ChatPromptParseVisitor fromRawPrompt(
- String rawPrompt) {
-
- ChatMessageContent> message = ChatMessageTextContent.userMessage(rawPrompt);
-
- this.parsedRaw = new GeminiParsedPrompt(
- new ChatHistory(Collections.singletonList(message)), null);
-
- return this;
- }
-
- @Override
- public GeminiParsedPrompt get() {
- if (parsedRaw != null) {
- return parsedRaw;
- }
-
- return new GeminiParsedPrompt(chatHistory, functionDefinitions);
- }
-
- @Override
- public ChatPromptParseVisitor reset() {
- return new GeminiChatPromptParseVisitor();
- }
- }
-
- /**
- * Create a GeminiParsedPrompt by parsing a raw prompt.
- * @param rawPrompt the raw prompt to parse.
- * @return The parsed prompt.
- */
- public static GeminiParsedPrompt parse(String rawPrompt) {
- ChatPromptParseVisitor visitor = ChatXMLPromptParser.parse(
- rawPrompt,
- new GeminiChatPromptParseVisitor());
-
- return visitor.get();
- }
-}
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/implementation/MonoConverter.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/implementation/MonoConverter.java
deleted file mode 100644
index 3209a03c8..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/implementation/MonoConverter.java
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google.implementation;
-
-import com.google.api.core.ApiFuture;
-import reactor.core.publisher.Mono;
-
-public class MonoConverter {
- public static Mono fromApiFuture(ApiFuture apiFuture) {
- return Mono.create(sink -> {
- apiFuture.addListener(() -> {
- try {
- T result = apiFuture.get();
- sink.success(result);
- } catch (Exception e) {
- sink.error(e);
- }
- }, runnable -> new Thread(runnable).start());
- });
- }
-}
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/textcompletion/GeminiStreamingTextContent.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/textcompletion/GeminiStreamingTextContent.java
deleted file mode 100644
index 9bf9a6fd3..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/textcompletion/GeminiStreamingTextContent.java
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google.textcompletion;
-
-import com.microsoft.semantickernel.services.StreamingTextContent;
-import com.microsoft.semantickernel.services.textcompletion.TextContent;
-import javax.annotation.Nullable;
-
-/**
- * StreamingTextContent is a wrapper for TextContent that allows for streaming.
- */
-public class GeminiStreamingTextContent extends StreamingTextContent {
-
- /**
- * Initializes a new instance of the {@code StreamingTextContent} class with a provided text
- * content.
- *
- * @param content The text content.
- */
- public GeminiStreamingTextContent(TextContent content) {
- super(content, 0, null, null);
- }
-
- @Override
- @Nullable
- public String getContent() {
- TextContent content = getInnerContent();
- if (content == null) {
- return null;
- }
- return content.getContent();
- }
-
-}
diff --git a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/textcompletion/GeminiTextGenerationService.java b/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/textcompletion/GeminiTextGenerationService.java
deleted file mode 100644
index 5e5551161..000000000
--- a/aiservices/google/src/main/java/com/microsoft/semantickernel/aiservices/google/textcompletion/GeminiTextGenerationService.java
+++ /dev/null
@@ -1,158 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.google.textcompletion;
-
-import com.google.cloud.vertexai.VertexAI;
-import com.google.cloud.vertexai.api.GenerateContentResponse;
-import com.google.cloud.vertexai.api.GenerationConfig;
-import com.google.cloud.vertexai.generativeai.GenerativeModel;
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.aiservices.google.GeminiService;
-import com.microsoft.semantickernel.aiservices.google.GeminiServiceBuilder;
-import com.microsoft.semantickernel.aiservices.google.implementation.MonoConverter;
-import com.microsoft.semantickernel.exceptions.AIException;
-import com.microsoft.semantickernel.exceptions.SKCheckedException;
-import com.microsoft.semantickernel.exceptions.SKException;
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.services.StreamingTextContent;
-import com.microsoft.semantickernel.services.textcompletion.TextContent;
-import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
-import java.io.IOException;
-import java.time.OffsetDateTime;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.UUID;
-import javax.annotation.Nullable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import reactor.core.publisher.Flux;
-import reactor.core.publisher.Mono;
-
-/**
- * A Gemini service for text generation.
- * @see TextGenerationService
- */
-public class GeminiTextGenerationService extends GeminiService implements TextGenerationService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(GeminiTextGenerationService.class);
-
- /**
- * Creates a new Gemini text generation service.
- * @param client The VertexAI client
- * @param modelId The Gemini model ID
- */
- public GeminiTextGenerationService(VertexAI client, String modelId) {
- super(client, modelId);
- }
-
- /**
- * Creates a new builder for a Gemini text generation service.
- * @return The builder
- */
- public static Builder builder() {
- return new Builder();
- }
-
- @Override
- public Mono> getTextContentsAsync(
- String prompt,
- @Nullable PromptExecutionSettings executionSettings,
- @Nullable Kernel kernel) {
- return this.internalGetTextAsync(prompt, executionSettings);
- }
-
- @Override
- public Flux getStreamingTextContentsAsync(
- String prompt,
- @Nullable PromptExecutionSettings executionSettings,
- @Nullable Kernel kernel) {
- return this
- .internalGetTextAsync(prompt, executionSettings)
- .flatMapMany(it -> Flux.fromStream(it.stream())
- .map(GeminiStreamingTextContent::new));
- }
-
- private Mono> internalGetTextAsync(String prompt,
- @Nullable PromptExecutionSettings executionSettings) {
-
- try {
- GenerativeModel model = getGenerativeModel(executionSettings);
- return MonoConverter.fromApiFuture(model.generateContentAsync(prompt))
- .doOnError(e -> LOGGER.error("Error generating text", e))
- .flatMap(result -> {
- List textContents = new ArrayList<>();
-
- FunctionResultMetadata metadata = FunctionResultMetadata
- .build(
- UUID.randomUUID().toString(),
- result.getUsageMetadata(),
- OffsetDateTime.now());
-
- result.getCandidatesList().forEach(
- candidate -> {
- candidate.getContent().getPartsList().forEach(part -> {
- if (!part.getText().isEmpty()) {
- textContents.add(
- new TextContent(part.getText(), getModelId(), metadata));
- }
- });
- });
-
- return Mono.just(textContents);
- });
- } catch (SKCheckedException | IOException e) {
- return Mono.error(new SKException("Error generating text", e));
- }
- }
-
- private GenerativeModel getGenerativeModel(
- @Nullable PromptExecutionSettings executionSettings) throws SKCheckedException {
- GenerativeModel.Builder modelBuilder = new GenerativeModel.Builder()
- .setModelName(getModelId())
- .setVertexAi(getClient());
-
- if (executionSettings != null) {
- if (executionSettings.getResultsPerPrompt() < 1
- || executionSettings.getResultsPerPrompt() > MAX_RESULTS_PER_PROMPT) {
- throw SKCheckedException.build("Error building generative model.",
- new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- String.format(
- "Results per prompt must be in range between 1 and %d, inclusive.",
- MAX_RESULTS_PER_PROMPT)));
- }
-
- GenerationConfig config = GenerationConfig.newBuilder()
- .setMaxOutputTokens(executionSettings.getMaxTokens())
- .setTemperature((float) executionSettings.getTemperature())
- .setTopP((float) executionSettings.getTopP())
- .setCandidateCount(executionSettings.getResultsPerPrompt())
- .build();
-
- modelBuilder.setGenerationConfig(config);
- }
-
- return modelBuilder.build();
- }
-
- /**
- * Builder for a Gemini text generation service.
- */
- public static class Builder extends
- GeminiServiceBuilder {
-
- @Override
- public GeminiTextGenerationService build() {
- if (this.client == null) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "VertexAI client must be provided");
- }
-
- if (this.modelId == null || modelId.isEmpty()) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "Gemini model id must be provided");
- }
-
- return new GeminiTextGenerationService(client, modelId);
- }
- }
-}
diff --git a/aiservices/huggingface/pom.xml b/aiservices/huggingface/pom.xml
deleted file mode 100644
index 152aba546..000000000
--- a/aiservices/huggingface/pom.xml
+++ /dev/null
@@ -1,74 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.microsoft.semantic-kernel
- semantickernel-parent
- 1.4.4-RC3-SNAPSHOT
- ../../pom.xml
-
-
- semantickernel-aiservices-huggingface
- Semantic Kernel Huggingface Services
- Huggingface services for Semantic Kernel
-
-
-
-
- com.microsoft.semantic-kernel
- semantickernel-bom
- ${project.version}
- pom
- import
-
-
-
-
-
-
- com.microsoft.semantic-kernel
- semantickernel-api
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-exceptions
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-ai-services
-
-
- com.azure
- azure-core
-
-
- com.fasterxml.jackson.core
- jackson-databind
- compile
-
-
- com.fasterxml.jackson.core
- jackson-core
- compile
-
-
-
-
- javax.xml.stream
- stax-api
- provided
-
-
-
-
-
-
- src/main/resources
- true
-
-
-
-
-
diff --git a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/HuggingFaceClient.java b/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/HuggingFaceClient.java
deleted file mode 100644
index 4e8f9bb4d..000000000
--- a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/HuggingFaceClient.java
+++ /dev/null
@@ -1,219 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.huggingface;
-
-import com.azure.core.credential.KeyCredential;
-import com.azure.core.http.HttpClient;
-import com.azure.core.http.HttpHeaderName;
-import com.azure.core.http.HttpMethod;
-import com.azure.core.http.HttpRequest;
-import com.azure.core.http.HttpResponse;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JavaType;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.microsoft.semantickernel.aiservices.huggingface.models.GeneratedTextItem;
-import com.microsoft.semantickernel.aiservices.huggingface.models.TextGenerationRequest;
-import com.microsoft.semantickernel.exceptions.SKException;
-import java.nio.charset.StandardCharsets;
-import java.util.List;
-import reactor.core.publisher.Mono;
-import javax.annotation.Nullable;
-
-/**
- * A client for the Hugging Face API.
- */
-public class HuggingFaceClient {
-
- private final KeyCredential key;
- private final String endpoint;
- private final HttpClient httpClient;
-
- /**
- * Creates a new Hugging Face client.
- * @param key The key credential for endpoint authentication.
- * @param endpoint The endpoint for the Hugging Face API.
- * @param httpClient The HTTP client to use for requests.
- */
- public HuggingFaceClient(
- KeyCredential key,
- String endpoint,
- HttpClient httpClient) {
- this.key = key;
- this.endpoint = endpoint;
- this.httpClient = httpClient;
- }
-
- /*
- * TODO: TGI
- * public Mono getChatMessageContentsAsync(
- * String modelId,
- * ChatCompletionRequest chatCompletionRequest
- * ) {
- * try {
- * String body = new ObjectMapper().writeValueAsString(chatCompletionRequest);
- * return performRequest(modelId, body)
- * .handle((response, sink) -> {
- * ObjectMapper mapper = new ObjectMapper();
- * JavaType type = mapper.getTypeFactory().
- * constructCollectionType(List.class, GeneratedTextItem.class);
- * try {
- * sink.next(mapper.readValue(response, type));
- * } catch (JsonProcessingException e) {
- * sink.error(
- * new SKException("Failed to deserialize response from Hugging Face",
- * e));
- * }
- * });
- * } catch (JsonProcessingException e) {
- * return Mono.error(new SKException("Failed to serialize request body", e));
- * }
- * }
- *
- */
-
- private static class GeneratedTextItemList {
-
- private final List> generatedTextItems;
-
- @JsonCreator
- public GeneratedTextItemList(
- List> generatedTextItems) {
- this.generatedTextItems = generatedTextItems;
- }
-
- }
-
- /**
- * Gets the text contents from the Hugging Face API.
- * @param modelId The model ID.
- * @param textGenerationRequest The text generation request.
- * @return The generated text items.
- */
- public Mono> getTextContentsAsync(
- String modelId,
- TextGenerationRequest textGenerationRequest) {
- try {
- String body = new ObjectMapper().writeValueAsString(textGenerationRequest);
- return performRequest(modelId, body)
- .handle((response, sink) -> {
- try {
- ObjectMapper mapper = new ObjectMapper();
- JavaType type = mapper.getTypeFactory().constructCollectionType(List.class,
- GeneratedTextItemList.class);
- GeneratedTextItemList data = mapper.readValue(response,
- GeneratedTextItemList.class);
- sink.next(data.generatedTextItems.get(0));
- } catch (Exception e) {
- sink.error(
- new SKException("Failed to deserialize response from Hugging Face",
- e));
- }
- });
- } catch (JsonProcessingException e) {
- return Mono.error(new SKException("Failed to serialize request body", e));
- }
- }
-
- private Mono performRequest(String modelId,
- String body) {
- HttpRequest request = new HttpRequest(HttpMethod.POST, endpoint)
- .setHeader(HttpHeaderName.AUTHORIZATION, "Bearer " + key.getKey())
- .setHeader(HttpHeaderName.CONTENT_TYPE, "application/json")
- .setHeader(HttpHeaderName.fromString("azureml-model-deployment"), modelId);
-
- request.setBody(body.getBytes(StandardCharsets.UTF_8));
-
- Mono responseBody = httpClient
- .send(request)
- .onErrorResume(
- e -> {
- return Mono.error(
- new SKException("Failed to send request to Hugging Face", e));
- })
- .flatMap(httpResponse -> {
- if (httpResponse.getStatusCode() >= 400) {
- return httpResponse.getBodyAsString()
- .flatMap(errorBody -> {
- return Mono.error(new SKException(
- "Failed to get text content from Hugging Face. Status code: "
- + httpResponse.getStatusCode() + " " + errorBody));
- });
- } else {
- return Mono.just(httpResponse);
- }
- })
- .flatMap(HttpResponse::getBodyAsString);
- return responseBody;
- }
-
- /**
- * Creates a new builder for a Hugging Face client.
- * @return The builder
- */
- public static Builder builder() {
- return new Builder();
- }
-
- /**
- * Builder for a Hugging Face client.
- */
- public static class Builder {
-
- @Nullable
- private KeyCredential key = null;
- @Nullable
- private String endpoint = null;
- @Nullable
- private HttpClient httpClient = null;
-
- /**
- * Builds the Hugging Face client.
- * @return The client
- */
- public HuggingFaceClient build() {
- if (httpClient == null) {
- httpClient = HttpClient.createDefault();
- }
- if (key == null) {
- throw new SKException("Key credential is required");
- }
- if (endpoint == null) {
- throw new SKException("Endpoint is required");
- }
- return new HuggingFaceClient(
- key,
- endpoint,
- httpClient);
- }
-
- /**
- * Sets the key credential for the client.
- * @param key The key credential
- * @return The builder
- */
- public Builder credential(KeyCredential key) {
- this.key = key;
- return this;
- }
-
- /**
- * Sets the endpoint for the client.
- * @param endpoint The endpoint
- * @return The builder
- */
- public Builder endpoint(String endpoint) {
- this.endpoint = endpoint;
- return this;
- }
-
- /**
- * Sets the HTTP client for the client.
- * @param httpClient The HTTP client
- * @return The builder
- */
- public Builder httpClient(HttpClient httpClient) {
- this.httpClient = httpClient;
- return this;
- }
- }
-}
diff --git a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/GeneratedTextItem.java b/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/GeneratedTextItem.java
deleted file mode 100644
index 12ed5be60..000000000
--- a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/GeneratedTextItem.java
+++ /dev/null
@@ -1,244 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.huggingface.models;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import javax.annotation.Nullable;
-
-/**
- * Represents a generated text item deserialized from a JSON response.
- */
-public class GeneratedTextItem {
-
- @Nullable
- @JsonProperty("generated_text")
- private final String generatedText;
-
- @Nullable
- @JsonProperty("details")
- private final TextGenerationDetails details;
-
- /**
- * Constructor used by Jackson to deserialize a generated text item.
- * @param generatedText The generated text.
- * @param details The details of the generation.
- */
- @JsonCreator
- public GeneratedTextItem(
- @JsonProperty("generated_text") @Nullable String generatedText,
- @JsonProperty("details") @Nullable TextGenerationDetails details) {
- this.generatedText = generatedText;
- this.details = details;
- }
-
- /**
- * Gets the generated text.
- * @return The generated text.
- */
- @Nullable
- public String getGeneratedText() {
- return generatedText;
- }
-
- /**
- * Gets the details of the generation.
- * @return The details of the generation.
- */
- @Nullable
- public TextGenerationDetails getDetails() {
- return details;
- }
-
- /**
- * Represents the details of a text generation deserialized from a JSON response.
- */
- public static class TextGenerationDetails {
-
- @Nullable
- @JsonProperty("finish_reason")
- private final String finishReason;
-
- @JsonProperty("generated_tokens")
- private final int generatedTokens;
-
- @Nullable
- @JsonProperty("seed")
- private final Long seed;
-
- @Nullable
- @JsonProperty("prefill")
- private final List prefill;
-
- @Nullable
- @JsonProperty("tokens")
- private final List tokens;
-
- /**
- * Constructor used by Jackson to deserialize text generation details.
- * @param finishReason The reason the generation finished.
- * @param generatedTokens The number of tokens generated.
- * @param seed The seed used for generation.
- * @param prefill The prefill tokens.
- * @param tokens The generated tokens.
- */
- @JsonCreator
- public TextGenerationDetails(
- @JsonProperty("finish_reason") @Nullable String finishReason,
- @JsonProperty("generated_tokens") int generatedTokens,
- @JsonProperty("seed") @Nullable Long seed,
- @JsonProperty("prefill") @Nullable List prefill,
- @JsonProperty("tokens") @Nullable List tokens) {
- this.finishReason = finishReason;
- this.generatedTokens = generatedTokens;
- this.seed = seed;
- if (prefill != null) {
- this.prefill = new ArrayList<>(prefill);
- } else {
- this.prefill = null;
- }
- if (tokens != null) {
- this.tokens = new ArrayList<>(tokens);
- } else {
- this.tokens = null;
- }
- }
-
- /**
- * Gets the reason the generation finished.
- * @return The reason the generation finished.
- */
- @Nullable
- public String getFinishReason() {
- return finishReason;
- }
-
- /**
- * Gets the number of tokens generated.
- * @return The number of tokens generated.
- */
- public int getGeneratedTokens() {
- return generatedTokens;
- }
-
- /**
- * Gets the seed used for generation.
- * @return The seed used for generation.
- */
- @Nullable
- public Long getSeed() {
- return seed;
- }
-
- /**
- * Gets the prefill tokens.
- * @return The prefill tokens.
- */
- @Nullable
- public List getPrefill() {
- return Collections.unmodifiableList(prefill);
- }
-
- /**
- * Gets the generated tokens.
- * @return The generated tokens.
- */
- @Nullable
- public List getTokens() {
- return Collections.unmodifiableList(tokens);
- }
- }
-
- /**
- * Represents a prefill token deserialized from a JSON response.
- */
- public static class TextGenerationPrefillToken {
-
- @JsonProperty("id")
- private final int id;
-
- @Nullable
- @JsonProperty("text")
- private final String text;
-
- @JsonProperty("logprob")
- private final double logProb;
-
- /**
- * Constructor used by Jackson to deserialize a prefill token.
- * @param id The token ID.
- * @param text The token text.
- * @param logProb The log probability of the token.
- */
- @JsonCreator
- public TextGenerationPrefillToken(
- @JsonProperty("id") int id,
- @JsonProperty("text") @Nullable String text,
- @JsonProperty("logprob") double logProb) {
- this.id = id;
- this.text = text;
- this.logProb = logProb;
- }
-
- /**
- * Gets the token ID.
- * @return The token ID.
- */
- public int getId() {
- return id;
- }
-
- /**
- * Gets the token text.
- * @return The token text.
- */
- @Nullable
- public String getText() {
- return text;
- }
-
- /**
- * Gets the log probability of the token.
- * @return The log probability of the token.
- */
- public double getLogProb() {
- return logProb;
- }
- }
-
- /**
- * Represents a generated token deserialized from a JSON response.
- */
- public static class TextGenerationToken extends TextGenerationPrefillToken {
-
- @JsonProperty("special")
- private final boolean special;
-
- /**
- * Constructor used by Jackson to deserialize a generated token.
- * @param special Whether the token is special.
- * @param id The token ID.
- * @param text The token text.
- * @param logProb The log probability of the token.
- */
- @JsonCreator
- public TextGenerationToken(
- @JsonProperty("special") boolean special,
- @JsonProperty("id") int id,
- @JsonProperty("text") @Nullable String text,
- @JsonProperty("logprob") double logProb) {
- super(id, text, logProb);
- this.special = special;
- }
-
- /**
- * Gets whether the token is special.
- * @return Whether the token is special.
- */
- public boolean isSpecial() {
- return special;
- }
- }
-}
diff --git a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/TextGenerationRequest.java b/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/TextGenerationRequest.java
deleted file mode 100644
index 0e52b5681..000000000
--- a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/TextGenerationRequest.java
+++ /dev/null
@@ -1,342 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.huggingface.models;
-
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonInclude.Include;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.microsoft.semantickernel.aiservices.huggingface.services.HuggingFacePromptExecutionSettings;
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
-import javax.annotation.Nullable;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Represents a request to generate text using the Hugging Face API.
- */
-@JsonInclude(Include.NON_NULL)
-public class TextGenerationRequest {
-
- ///
- /// The input string to generate text for.
- ///
- @Nullable
- @JsonProperty("inputs")
- private final List inputs;
-
- ///
- /// Enable streaming
- ///
- @JsonProperty("stream")
- private final boolean stream;
-
- ///
- /// Parameters used by the model for generation.
- ///
-
- @Nullable
- @JsonProperty("parameters")
- private final HuggingFaceTextParameters parameters;
-
- ///
- /// Options used by the model for generation.
- ///
- @Nullable
- @JsonProperty("options")
- private final HuggingFaceTextOptions options;
-
- /**
- * Create a new instance of TextGenerationRequest.
- * @param inputs The input string to generate text for.
- * @param stream Enable streaming.
- * @param parameters Parameters used by the model for generation.
- * @param options Options used by the model for generation.
- */
- public TextGenerationRequest(
- @Nullable String inputs,
- boolean stream,
- @Nullable HuggingFaceTextParameters parameters,
- @Nullable HuggingFaceTextOptions options) {
- this.inputs = Arrays.asList(inputs);
- this.stream = stream;
- this.parameters = parameters;
- this.options = options;
- }
-
- /**
- * Create a new instance of TextGenerationRequest.
- * @param prompt The prompt to generate text for.
- * @param executionSettings The settings for executing the prompt.
- * @return A new instance of TextGenerationRequest.
- */
- public static TextGenerationRequest fromPromptAndExecutionSettings(String prompt,
- HuggingFacePromptExecutionSettings executionSettings) {
- return new TextGenerationRequest(
- prompt,
- false,
- new HuggingFaceTextParameters(
- executionSettings.getTopK(),
- executionSettings.getTopP(),
- executionSettings.getTemperature(),
- executionSettings.getRepetitionPenalty(),
- executionSettings.getMaxTokens(),
- executionSettings.getMaxTime(),
- true,
- null,
- null,
- executionSettings.getDetails()),
- new HuggingFaceTextOptions());
- }
-
- /**
- * Parameters used by the model for generation.
- */
- public static class HuggingFaceTextParameters {
-
- ///
- /// (Default: None). Number to define the top tokens considered within the sample operation to create new text.
- ///
- @Nullable
- @JsonProperty("top_k")
- private final Integer topK;
-
- ///
- /// (Default: None). Define the tokens that are within the sample operation of text generation.
- /// Add tokens in the sample for more probable to least probable until the sum of the probabilities
- /// is greater than top_p.
- ///
- @Nullable
- @JsonProperty("top_p")
- private final Double topP;
-
- ///
- /// (Default: 1.0). Range (0.0-100.0). The temperature of the sampling operation.
- /// 1 means regular sampling, 0 means always take the highest score,
- /// 100.0 is getting closer to uniform probability.
- ///
- @Nullable
- @JsonProperty("temperature")
- private final Double temperature;
-
- ///
- /// (Default: None). (0.0-100.0). The more a token is used within generation
- /// the more it is penalized to not be picked in successive generation passes.
- ///
- @Nullable
- @JsonProperty("repetition_penalty")
- private final Double repetitionPenalty;
-
- ///
- /// (Default: None). Range (0-250). The amount of new tokens to be generated,
- /// this does not include the input length it is a estimate of the size of generated text you want.
- /// Each new tokens slows down the request, so look for balance between response times
- /// and length of text generated.
- ///
- @Nullable
- @JsonProperty("max_new_tokens")
- private final Integer maxNewTokens;
-
- ///
- /// (Default: None). Range (0-120.0). The amount of time in seconds that the query should take maximum.
- /// Network can cause some overhead so it will be a soft limit.
- /// Use that in combination with max_new_tokens for best results.
- ///
- @Nullable
- @JsonProperty("max_time")
- private final Double maxTime;
-
- ///
- /// (Default: True). If set to False, the return results will not contain the original query making it easier for prompting.
- ///
- @JsonProperty("return_full_text")
- private final boolean returnFullText;
-
- ///
- /// (Default: 1). The number of proposition you want to be returned.
- ///
- @Nullable
- @JsonProperty("num_return_sequences")
- private final Integer numReturnSequences;
-
- ///
- /// (Optional: True). Whether or not to use sampling, use greedy decoding otherwise.
- ///
- @Nullable
- @JsonProperty("do_sample")
- private final Boolean doSample;
-
- ///
- /// (Optional: True) Whether or not to include the details of the generation.
- ///
- ///
- /// Disabling this won't provide information about token usage.
- ///
- @Nullable
- @JsonProperty("details")
- private final Boolean details;
-
- /**
- * Creator method for jackson deserialization.
- * @param topK The number of top tokens considered within the sample operation to create new text.
- * @param topP The tokens that are within the sample operation of text generation.
- * @param temperature The temperature of the sampling operation.
- * @param repetitionPenalty The repetition penalty.
- * @param maxNewTokens The amount of new tokens to be generated.
- * @param maxTime The amount of time in seconds that the query should take maximum.
- * @param returnFullText A value indicating whether the return results will contain the original query.
- * @param numReturnSequences The number of propositions to be returned.
- * @param doSample A value indicating whether to use sampling.
- * @param details A value indicating whether to include the details of the generation.
- */
- public HuggingFaceTextParameters(
- @JsonProperty("top_k") @Nullable Integer topK,
- @JsonProperty("top_p") @Nullable Double topP,
- @JsonProperty("temperature") @Nullable Double temperature,
- @JsonProperty("repetition_penalty") @Nullable Double repetitionPenalty,
- @JsonProperty("max_new_tokens") @Nullable Integer maxNewTokens,
- @JsonProperty("max_time") @Nullable Double maxTime,
- @JsonProperty("return_full_text") boolean returnFullText,
- @JsonProperty("num_return_sequences") @Nullable Integer numReturnSequences,
- @JsonProperty("do_sample") @Nullable Boolean doSample,
- @JsonProperty("details") @Nullable Boolean details) {
- this.topK = topK;
- this.topP = topP;
- this.temperature = temperature;
- this.repetitionPenalty = repetitionPenalty;
- this.maxNewTokens = maxNewTokens;
- this.maxTime = maxTime;
- this.returnFullText = returnFullText;
- this.numReturnSequences = numReturnSequences;
- this.doSample = doSample;
- this.details = details;
- }
-
- /**
- * Gets the number of top tokens considered within the sample operation to create new text.
- * @return The number of top tokens considered within the sample operation to create new text.
- */
- @Nullable
- public Integer getTopK() {
- return topK;
- }
-
- /**
- * Gets the tokens that are within the sample operation of text generation.
- * @return The tokens that are within the sample operation of text generation.
- */
- @Nullable
- public Double getTopP() {
- return topP;
- }
-
- /**
- * Gets the temperature of the sampling operation.
- * @return The temperature of the sampling operation.
- */
- @Nullable
- public Double getTemperature() {
- return temperature;
- }
-
- /**
- * Gets the repetition penalty.
- * @return The repetition penalty.
- */
- @Nullable
- public Double getRepetitionPenalty() {
- return repetitionPenalty;
- }
-
- /**
- * Gets the amount of new tokens to be generated.
- * @return The amount of new tokens to be generated.
- */
- @Nullable
- public Integer getMaxNewTokens() {
- return maxNewTokens;
- }
-
- /**
- * Gets the amount of time in seconds that the query should take maximum.
- * @return The amount of time in seconds that the query should take maximum.
- */
- @Nullable
- public Double getMaxTime() {
- return maxTime;
- }
-
- /**
- * Gets a value indicating whether the return results will contain the original query.
- * @return A value indicating whether the return results will contain the original query.
- */
- public boolean isReturnFullText() {
- return returnFullText;
- }
-
- /**
- * Gets the number of propositions to be returned.
- * @return The number of propositions to be returned.
- */
- @Nullable
- public Integer getNumReturnSequences() {
- return numReturnSequences;
- }
-
- /**
- * Gets a value indicating whether to use sampling.
- * @return A value indicating whether to use sampling.
- */
- @Nullable
- public Boolean getDoSample() {
- return doSample;
- }
-
- /**
- * Gets a value indicating whether to include the details of the generation.
- * @return A value indicating whether to include the details of the generation.
- */
- @Nullable
- public Boolean getDetails() {
- return details;
- }
- }
-
- /**
- * Options used by the model for generation.
- */
- @SuppressFBWarnings("SS_SHOULD_BE_STATIC")
- public static class HuggingFaceTextOptions {
-
- ///
- /// (Default: true). There is a cache layer on the inference API to speedup requests we have already seen.
- /// Most models can use those results as is as models are deterministic (meaning the results will be the same anyway).
- /// However if you use a non deterministic model, you can set this parameter to prevent the caching mechanism from being
- /// used resulting in a real new query.
- ///
- @JsonProperty("use_cache")
- private final boolean useCache = true;
-
- ///
- /// (Default: false) If the model is not ready, wait for it instead of receiving 503.
- /// It limits the number of requests required to get your inference done.
- /// It is advised to only set this flag to true after receiving a 503 error as it will limit hanging in your application to known places.
- ///
- @JsonProperty("wait_for_model")
- private final boolean waitForModel = false;
-
- /**
- * Gets a value indicating whether to use the cache layer on the inference API.
- * @return A value indicating whether to use the cache layer on the inference API.
- */
- public boolean isUseCache() {
- return useCache;
- }
-
- /**
- * Gets a value indicating whether to wait for the model if it is not ready.
- * @return A value indicating whether to wait for the model if it is not ready.
- */
- public boolean isWaitForModel() {
- return waitForModel;
- }
- }
-}
diff --git a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/chat/ChatCompletionRequest.java.ignore b/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/chat/ChatCompletionRequest.java.ignore
deleted file mode 100644
index 999728192..000000000
--- a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/chat/ChatCompletionRequest.java.ignore
+++ /dev/null
@@ -1,201 +0,0 @@
-package com.microsoft.semantickernel.aiservices.huggingface.models;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.List;
-import javax.annotation.Nullable;
-
-// TODO Support TGI
-public class ChatCompletionRequest {
-
-
- ///
- /// This is the default name when using TGI and will be ignored as the TGI will only target the current activated model.
- ///
- public static final String TextGenerationInferenceDefaultModel = "tgi";
-
-
- ///
- /// Model name to use for generation.
- ///
- ///
- /// When using TGI this parameter will be ignored.
- ///
- @Nullable
- public final String model;
-
- ///
- /// Indicates whether to get the response as stream or not.
- ///
- public final boolean stream;
-
- @Nullable
- public final List messages;
-
- ///
- /// Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each
- /// output token returned in the content of message.
- ///
- @Nullable
- public final Boolean logprobs;
-
- ///
- /// An integer between 0 and 5 specifying the number of most likely tokens to return at each token position, each with
- /// an associated log probability. logprobs must be set to true if this parameter is used.
- ///
- @Nullable
- public final Integer topLogProbs;
-
- ///
- /// The maximum number of tokens that can be generated in the chat completion.
- ///
- @Nullable
- public final Integer maxTokens;
-
- ///
- /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far,
- /// increasing the model's likelihood to talk about new topics
- ///
-
- @Nullable
- public final Float presencePenalty;
- ///
- /// Up to 4 sequences where the API will stop generating further tokens.
- ///
- @Nullable
- public final List stop;
-
- ///
- /// The seed to use for generating a similar output.
- ///
- @Nullable
- public final Long seed;
-
- ///
- /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while
- /// lower values like 0.2 will make it more focused and deterministic.
- ///
- /// We generally recommend altering this or `top_p` but not both.
- ///
- @Nullable
- public final Float temperature;
-
- ///
- /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the
- /// tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
- ///
- @Nullable
- public final Float topP;
-
- public ChatCompletionRequest(
- @JsonProperty("model") String model,
- @JsonProperty("stream") boolean stream,
- @JsonProperty("messages") List messages,
- @JsonProperty("logprobs") Boolean logprobs,
-
- @Nullable
- @JsonProperty("top_logprobs") Integer topLogProbs,
-
- @Nullable
- @JsonProperty("max_tokens") Integer maxTokens,
-
- @Nullable
- @JsonProperty("presence_penalty") Float presencePenalty,
- @Nullable
- @JsonProperty("stop") List stop,
-
- @Nullable
- @JsonProperty("seed") Long seed,
- @Nullable
- @JsonProperty("temperature") Float temperature,
- @Nullable
- @JsonProperty("top_p")
- Float topP
- ) {
-
- this.model = model;
- this.stream = stream;
- this.messages = messages;
- this.logprobs = logprobs;
- this.topLogProbs = topLogProbs;
- this.maxTokens = maxTokens;
- this.presencePenalty = presencePenalty;
- this.stop = stop;
- this.seed = seed;
- this.temperature = temperature;
- this.topP = topP;
- }
-
- public static class ChatMessageToolCall {
-
- @Nullable
- private final String id;
-
- @Nullable
- private final String type;
-
- private final ChatMessageFunction function;
-
- public ChatMessageToolCall(
- @Nullable
- @JsonProperty("id") String id,
- @Nullable
- @JsonProperty("type") String type,
- @Nullable
- @JsonProperty("function") ChatMessageFunction function
- ) {
- this.id = id;
- this.type = type;
- this.function = function;
- }
- }
-
- public static class ChatMessageFunction {
-
- @Nullable
- public final String description;
- @Nullable
- public final String name;
- @Nullable
- public final String parameters;
-
- public ChatMessageFunction(
- @JsonProperty("description") String description,
- @JsonProperty("name") String name,
- @JsonProperty("parameters") String parameters
- ) {
- this.description = description;
- this.name = name;
- this.parameters = parameters;
- }
- }
-
- public static class ChatMessage {
-
- @Nullable
- public final String role;
- @Nullable
- public final String content;
- @Nullable
- public final String name;
- @Nullable
- public final List toolCalls;
-
-
- public ChatMessage(
- @Nullable
- @JsonProperty("role") String role,
- @Nullable
- @JsonProperty("content") String content,
- @Nullable
- @JsonProperty("name") String name,
- @Nullable
- @JsonProperty("tool_calls") List toolCalls
- ) {
- this.role = role;
- this.content = content;
- this.name = name;
- this.toolCalls = toolCalls;
- }
-
- }
-}
diff --git a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/chat/HuggingFaceChatCompletionService.java.ignore b/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/chat/HuggingFaceChatCompletionService.java.ignore
deleted file mode 100644
index f6f05a9e0..000000000
--- a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/chat/HuggingFaceChatCompletionService.java.ignore
+++ /dev/null
@@ -1,181 +0,0 @@
-package com.microsoft.semantickernel.aiservices.huggingface.services;
-
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.aiservices.huggingface.HuggingFaceClient;
-import com.microsoft.semantickernel.aiservices.huggingface.models.ChatCompletionRequest;
-import com.microsoft.semantickernel.aiservices.huggingface.models.HuggingFaceXMLPromptParser;
-import com.microsoft.semantickernel.aiservices.huggingface.models.HuggingFaceXMLPromptParser.HuggingFaceParsedPrompt;
-import com.microsoft.semantickernel.orchestration.InvocationContext;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
-import com.microsoft.semantickernel.services.chatcompletion.ChatHistory;
-import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent;
-import java.util.Collections;
-import java.util.List;
-import java.util.Locale;
-import java.util.stream.Collectors;
-import javax.annotation.Nullable;
-import reactor.core.publisher.Mono;
-
-// TODO Support TGI
-public class HuggingFaceChatCompletionService implements ChatCompletionService {
-
- private final String modelId;
- private final String serviceId;
- private final HuggingFaceClient client;
-
- public HuggingFaceChatCompletionService(
- String modelId,
- String serviceId,
- HuggingFaceClient client) {
- this.modelId = modelId;
- this.serviceId = serviceId;
- this.client = client;
- }
-
- public Mono>> getChatMessageContentsAsync(
- ChatHistory chatHistory,
- @Nullable Kernel kernel,
- @Nullable HuggingFacePromptExecutionSettings executionSettings) {
-
- String model = modelId;
- if (executionSettings.getModelId() != null && !executionSettings.getModelId().isEmpty()) {
- model = executionSettings.getModelId();
- }
-
- ChatCompletionRequest request = new ChatCompletionRequest(
- model,
- false,
- chatHistory
- .getMessages()
- .stream()
- .map(
- message -> {
- return new ChatCompletionRequest.ChatMessage(
- message.getAuthorRole().name(),
- message.getContent(),
- null,
- null
- );
- }
- )
- .collect(Collectors.toList()),
- executionSettings.getLogprobs(),
- executionSettings.getTopLogProbs(),
- executionSettings.getMaxTokens(),
- new Float(executionSettings.getPresencePenalty()),
- executionSettings.getStopSequences(),
- executionSettings.getSeed(),
- new Float(executionSettings.getTemperature()),
- new Float(executionSettings.getTopP())
- );
-
- return client
- .getChatMessageContentsAsync(modelId, request)
- .map(result -> {
- return Collections.singletonList(new ChatMessageContent<>(
- AuthorRole.SYSTEM,
- result)
- );
- });
- }
-
- @Override
- public Mono>> getChatMessageContentsAsync(
- ChatHistory chatHistory,
- @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext) {
-
- HuggingFacePromptExecutionSettings executionSettings;
- if (invocationContext != null && invocationContext.getPromptExecutionSettings() != null) {
- executionSettings = HuggingFacePromptExecutionSettings.fromExecutionSettings(
- invocationContext.getPromptExecutionSettings());
- } else {
- executionSettings = new HuggingFacePromptExecutionSettings(
- PromptExecutionSettings.builder().build());
- }
-
- return getChatMessageContentsAsync(chatHistory, kernel, executionSettings);
-
- }
-
- @Override
- public Mono>> getChatMessageContentsAsync(
- String prompt,
- @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext) {
- HuggingFaceParsedPrompt parsed = HuggingFaceXMLPromptParser.parse(prompt);
-
- ChatHistory history = new ChatHistory();
- parsed.getChatRequestMessages()
- .forEach(message -> {
- history.addMessage(AuthorRole.valueOf(message.role.toUpperCase(Locale.ROOT)),
- message.content);
- });
-
- return getChatMessageContentsAsync(history, kernel, invocationContext);
-
- }
-
- @Nullable
- @Override
- public String getModelId() {
- return modelId;
- }
-
- @Nullable
- @Override
- public String getServiceId() {
- return serviceId;
- }
-
- public static Builder builder() {
- return new Builder();
- }
-
- public static class Builder {
-
- @Nullable
- private String modelId;
- @Nullable
- private HuggingFaceClient client;
- @Nullable
- private String serviceId;
-
- /**
- * Sets the model ID for the service
- *
- * @param modelId The model ID
- * @return The builder
- */
- public Builder withModelId(String modelId) {
- this.modelId = modelId;
- return this;
- }
-
- /**
- * Sets the service ID for the service
- *
- * @param serviceId The service ID
- * @return The builder
- */
- public Builder withServiceId(String serviceId) {
- this.serviceId = serviceId;
- return this;
- }
-
- public Builder withHuggingFaceClient(HuggingFaceClient client) {
- this.client = client;
- return this;
- }
-
- public ChatCompletionService build() {
- return new HuggingFaceChatCompletionService(
- this.modelId,
- this.serviceId,
- this.client);
- }
- }
-
-}
diff --git a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/chat/HuggingFaceXMLPromptParser.java.ignore b/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/chat/HuggingFaceXMLPromptParser.java.ignore
deleted file mode 100644
index 6e6474852..000000000
--- a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/models/chat/HuggingFaceXMLPromptParser.java.ignore
+++ /dev/null
@@ -1,138 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.huggingface.models;
-
-import com.azure.core.util.BinaryData;
-import com.microsoft.semantickernel.aiservices.huggingface.models.ChatCompletionRequest.ChatMessage;
-import com.microsoft.semantickernel.aiservices.huggingface.models.ChatCompletionRequest.ChatMessageFunction;
-import com.microsoft.semantickernel.aiservices.huggingface.models.ChatCompletionRequest.ChatMessageToolCall;
-import com.microsoft.semantickernel.services.chatcompletion.ChatPromptParseVisitor;
-import com.microsoft.semantickernel.services.chatcompletion.ChatXMLPromptParser;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import javax.annotation.Nullable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-// TODO Support TGI
-public class HuggingFaceXMLPromptParser {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(HuggingFaceXMLPromptParser.class);
-
- public static class HuggingFaceParsedPrompt {
-
- private final List chatRequestMessages;
- private final List functions;
-
- protected HuggingFaceParsedPrompt(
- List parsedMessages,
- @Nullable List parsedFunctions
- ) {
- this.chatRequestMessages = parsedMessages;
- if (parsedFunctions == null) {
- parsedFunctions = new ArrayList<>();
- }
- this.functions = parsedFunctions;
- }
-
- public List getChatRequestMessages() {
- return chatRequestMessages;
- }
-
- public List getFunctions() {
- return functions;
- }
- }
-
- private static class HuggingFaceChatPromptParseVisitor implements
- ChatPromptParseVisitor {
-
- private HuggingFaceParsedPrompt parsedRaw;
- private final List functionDefinitions = new ArrayList<>();
- private final List messages = new ArrayList<>();
-
- @Override
- public ChatPromptParseVisitor addMessage(
- String role,
- String content) {
- messages.add(new ChatMessage(
- role,
- content,
- null,
- null));
- return this;
- }
-
- @Override
- public ChatPromptParseVisitor addFunction(
- String name,
- @Nullable
- String description,
- @Nullable
- BinaryData parameters) {
-
- String paramString = null;
- if (parameters != null) {
- paramString = parameters.toString();
- }
-
- ChatMessageToolCall function = new ChatMessageToolCall(
- name,
- null,
- new ChatMessageFunction(
- description,
- name,
- paramString
- )
- );
-
- functionDefinitions.add(function);
-
- return this;
- }
-
- @Override
- public boolean areMessagesEmpty() {
- return messages.isEmpty();
- }
-
- @Override
- public ChatPromptParseVisitor fromRawPrompt(
- String rawPrompt) {
-
- ChatMessage message = new ChatMessage(
- "user",
- rawPrompt,
- null,
- null
- );
-
- this.parsedRaw = new HuggingFaceParsedPrompt(Collections.singletonList(message),
- null);
-
- return this;
- }
-
- @Override
- public HuggingFaceParsedPrompt get() {
- if (parsedRaw != null) {
- return parsedRaw;
- }
-
- return new HuggingFaceParsedPrompt(messages, functionDefinitions);
- }
-
- @Override
- public ChatPromptParseVisitor reset() {
- return new HuggingFaceChatPromptParseVisitor();
- }
- }
-
- public static HuggingFaceParsedPrompt parse(String rawPrompt) {
- ChatPromptParseVisitor visitor = ChatXMLPromptParser.parse(
- rawPrompt,
- new HuggingFaceChatPromptParseVisitor());
-
- return visitor.get();
- }
-}
\ No newline at end of file
diff --git a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/services/HuggingFacePromptExecutionSettings.java b/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/services/HuggingFacePromptExecutionSettings.java
deleted file mode 100644
index adcabe7c4..000000000
--- a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/services/HuggingFacePromptExecutionSettings.java
+++ /dev/null
@@ -1,246 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.huggingface.services;
-
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.orchestration.responseformat.ResponseFormat;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import javax.annotation.Nullable;
-
-/**
- * Represents the settings for executing a prompt with the Hugging Face API.
- */
-public class HuggingFacePromptExecutionSettings extends PromptExecutionSettings {
-
- @Nullable
- private final Integer topK;
- @Nullable
- private final Double repetitionPenalty;
- @Nullable
- private final Double maxTime;
- @Nullable
- private final Boolean details;
- @Nullable
- private final Boolean logProbs;
- @Nullable
- private final Integer topLogProbs;
- @Nullable
- private final Long seed;
-
- /**
- * Create a new instance of HuggingFacePromptExecutionSettings.
- *
- * @param copy The PromptExecutionSettings to copy.
- */
- public HuggingFacePromptExecutionSettings(PromptExecutionSettings copy) {
- super(
- copy.getServiceId(),
- copy.getModelId(),
- copy.getTemperature(),
- copy.getTopP(),
- copy.getPresencePenalty(),
- copy.getFrequencyPenalty(),
- copy.getMaxTokens(),
- copy.getResultsPerPrompt(),
- copy.getBestOf(),
- copy.getUser(),
- copy.getStopSequences(),
- copy.getTokenSelectionBiases(),
- copy.getResponseFormat() == null ? null : copy.getResponseFormat());
- this.topK = null;
- this.repetitionPenalty = null;
- this.maxTime = null;
- this.details = null;
- this.logProbs = null;
- this.topLogProbs = null;
- this.seed = null;
- }
-
- /**
- * Create a new instance of PromptExecutionSettings.
- *
- * @param serviceId The id of the AI service to use for prompt execution.
- * @param modelId The id of the model to use for prompt execution.
- * @param temperature The temperature setting for prompt execution.
- * @param topP The topP setting for prompt execution.
- * @param presencePenalty The presence penalty setting for prompt execution.
- * @param frequencyPenalty The frequency penalty setting for prompt execution.
- * @param maxTokens The maximum number of tokens to generate in the output.
- * @param resultsPerPrompt The number of results to generate for each prompt.
- * @param bestOf The best of setting for prompt execution.
- * @param user The user to associate with the prompt execution.
- * @param stopSequences The stop sequences to use for prompt execution.
- * @param tokenSelectionBiases The token selection biases to use for prompt execution.
- * @param responseFormat The response format to use for prompt execution
- * @param topK The topK setting for prompt execution.
- * @param repetitionPenalty The repetition penalty setting for prompt execution.
- * @param maxTime The max time setting for prompt execution.
- * @param details The details setting for prompt execution.
- * @param logProbs The logprobs setting for prompt execution.
- * @param topLogProbs The top log probs setting for prompt execution.
- * @param seed The seed setting for prompt execution
- */
- public HuggingFacePromptExecutionSettings(
- String serviceId,
- String modelId,
- Double temperature,
- Double topP,
- Double presencePenalty,
- Double frequencyPenalty,
- Integer maxTokens,
- Integer resultsPerPrompt,
- Integer bestOf,
- String user,
- @Nullable List stopSequences,
- @Nullable Map tokenSelectionBiases,
- @Nullable ResponseFormat responseFormat,
- @Nullable Integer topK,
- @Nullable Double repetitionPenalty,
- @Nullable Double maxTime,
- @Nullable Boolean details,
- @Nullable Boolean logProbs,
- @Nullable Integer topLogProbs,
- @Nullable Long seed) {
- super(
- serviceId, modelId, temperature, topP, presencePenalty, frequencyPenalty, maxTokens,
- resultsPerPrompt, bestOf, user, stopSequences, tokenSelectionBiases, responseFormat);
-
- this.topK = topK;
- this.repetitionPenalty = repetitionPenalty;
- this.maxTime = maxTime;
- this.details = details;
- this.logProbs = logProbs;
- this.topLogProbs = topLogProbs;
- this.seed = seed;
- }
-
- /**
- * Create a new instance of PromptExecutionSettings from a PromptExecutionSettings.
- * This method handles the whether the PromptExecutionSettings is already a
- * HuggingFacePromptExecutionSettings or a new instance needs to be created
- * from the provided PromptExecutionSettings.
- * @param promptExecutionSettings The PromptExecutionSettings to copy.
- * @return The PromptExecutionSettings mapped to a HuggingFacePromptExecutionSettings.
- */
- public static HuggingFacePromptExecutionSettings fromExecutionSettings(
- PromptExecutionSettings promptExecutionSettings) {
- if (promptExecutionSettings instanceof HuggingFacePromptExecutionSettings) {
- return (HuggingFacePromptExecutionSettings) promptExecutionSettings;
- }
-
- return new HuggingFacePromptExecutionSettings(
- promptExecutionSettings.getServiceId(),
- promptExecutionSettings.getModelId(),
- promptExecutionSettings.getTemperature(),
- promptExecutionSettings.getTopP(),
- promptExecutionSettings.getPresencePenalty(),
- promptExecutionSettings.getFrequencyPenalty(),
- promptExecutionSettings.getMaxTokens(),
- promptExecutionSettings.getResultsPerPrompt(),
- promptExecutionSettings.getBestOf(),
- promptExecutionSettings.getUser(),
- promptExecutionSettings.getStopSequences(),
- promptExecutionSettings.getTokenSelectionBiases(),
- promptExecutionSettings.getResponseFormat() != null
- ? promptExecutionSettings.getResponseFormat()
- : null,
- null,
- null,
- null,
- null,
- null,
- null,
- null);
- }
-
- /**
- * Gets the topK setting for prompt execution.
- * @return The topK setting for prompt execution
- */
- @Nullable
- public Integer getTopK() {
- return topK;
- }
-
- /**
- * Gets the repetition penalty setting for prompt execution.
- * @return The repetition penalty setting for prompt execution
- */
- @Nullable
- public Double getRepetitionPenalty() {
- return repetitionPenalty;
- }
-
- /**
- * Gets the max time setting for prompt execution.
- * @return The max time setting for prompt execution
- */
- @Nullable
- public Double getMaxTime() {
- return maxTime;
- }
-
- /**
- * Gets the details setting for prompt execution.
- * @return The details setting for prompt execution
- */
- @Nullable
- public Boolean getDetails() {
- return details;
- }
-
- /**
- * Gets the logprobs setting for prompt execution.
- * @return The logprobs setting for prompt execution
- */
- @Nullable
- public Boolean getLogprobs() {
- return logProbs;
- }
-
- /**
- * Gets the top log probs setting for prompt execution.
- * @return The top log probs setting for prompt execution
- */
- @Nullable
- public Integer getTopLogProbs() {
- return topLogProbs;
- }
-
- /**
- * Gets the seed setting for prompt execution.
- * @return The seed setting for prompt execution
- */
- @Nullable
- public Long getSeed() {
- return seed;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || !(o instanceof HuggingFacePromptExecutionSettings)) {
- return false;
- }
- if (!super.equals(o)) {
- return false;
- }
- HuggingFacePromptExecutionSettings that = (HuggingFacePromptExecutionSettings) o;
- return Objects.equals(topK, that.topK) &&
- Objects.equals(repetitionPenalty, that.repetitionPenalty) &&
- Objects.equals(maxTime, that.maxTime) &&
- Objects.equals(details, that.details) &&
- Objects.equals(logProbs, that.logProbs) &&
- Objects.equals(topLogProbs, that.topLogProbs) &&
- Objects.equals(seed, that.seed);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(super.hashCode(), topK, repetitionPenalty, maxTime, details, logProbs,
- topLogProbs, seed);
- }
-}
diff --git a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/services/HuggingFaceTextGenerationService.java b/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/services/HuggingFaceTextGenerationService.java
deleted file mode 100644
index a08b42326..000000000
--- a/aiservices/huggingface/src/main/java/com/microsoft/semantickernel/aiservices/huggingface/services/HuggingFaceTextGenerationService.java
+++ /dev/null
@@ -1,215 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.huggingface.services;
-
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.aiservices.huggingface.HuggingFaceClient;
-import com.microsoft.semantickernel.aiservices.huggingface.models.TextGenerationRequest;
-import com.microsoft.semantickernel.aiservices.huggingface.models.TextGenerationRequest.HuggingFaceTextOptions;
-import com.microsoft.semantickernel.aiservices.huggingface.models.TextGenerationRequest.HuggingFaceTextParameters;
-import com.microsoft.semantickernel.exceptions.SKException;
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.services.StreamingTextContent;
-import com.microsoft.semantickernel.services.textcompletion.TextContent;
-import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
-import java.util.List;
-import java.util.UUID;
-import java.util.stream.Collectors;
-import javax.annotation.Nullable;
-import reactor.core.publisher.Flux;
-import reactor.core.publisher.Mono;
-
-/**
- * A service that generates text using the Hugging Face API.
- */
-public class HuggingFaceTextGenerationService implements TextGenerationService {
-
- private final String modelId;
- private final String serviceId;
- private final HuggingFaceClient client;
-
- /**
- * Create a new instance of HuggingFaceTextGenerationService.
- * @param modelId The model ID.
- * @param serviceId The service ID.
- * @param client The Hugging Face client.
- */
- public HuggingFaceTextGenerationService(
- String modelId,
- String serviceId,
- HuggingFaceClient client) {
- this.modelId = modelId;
- this.serviceId = serviceId;
- this.client = client;
- }
-
- /**
- * Get the response to a prompt.
- * @param prompt The prompt.
- * @param huggingFacePromptExecutionSettings The settings for executing the prompt.
- * @param kernel The semantic kernel.
- * @return The response to the prompt.
- */
- public Mono> getTextContentsAsync(
- String prompt,
- @Nullable HuggingFacePromptExecutionSettings huggingFacePromptExecutionSettings,
- @Nullable Kernel kernel) {
-
- HuggingFaceTextParameters textParameters = getHuggingFaceTextParameters(
- huggingFacePromptExecutionSettings);
-
- TextGenerationRequest textGenerationRequest = new TextGenerationRequest(
- prompt,
- false,
- textParameters,
- new HuggingFaceTextOptions());
-
- return client
- .getTextContentsAsync(modelId, textGenerationRequest)
- .map(result -> result
- .stream()
- .map(item -> new TextContent(
- item.getGeneratedText() != null ? item.getGeneratedText() : "",
- modelId,
- FunctionResultMetadata.build(UUID.randomUUID().toString())))
- .collect(Collectors.toList()));
- }
-
- @Override
- public Mono> getTextContentsAsync(
- String prompt,
- @Nullable PromptExecutionSettings executionSettings,
- @Nullable Kernel kernel) {
-
- HuggingFacePromptExecutionSettings huggingFacePromptExecutionSettings = null;
-
- if (executionSettings != null) {
- huggingFacePromptExecutionSettings = HuggingFacePromptExecutionSettings
- .fromExecutionSettings(
- executionSettings);
- }
-
- return getTextContentsAsync(
- prompt,
- huggingFacePromptExecutionSettings,
- kernel);
-
- }
-
- @Override
- public Flux getStreamingTextContentsAsync(String prompt,
- @Nullable PromptExecutionSettings executionSettings, @Nullable Kernel kernel) {
- throw new SKException("Streaming text content is not supported");
- }
-
- private static @Nullable HuggingFaceTextParameters getHuggingFaceTextParameters(
- @Nullable HuggingFacePromptExecutionSettings executionSettings) {
- HuggingFaceTextParameters textParameters = null;
- if (executionSettings != null) {
- textParameters = new HuggingFaceTextParameters(
- executionSettings.getTopK(),
- executionSettings.getTopP(),
- executionSettings.getTemperature(),
- executionSettings.getRepetitionPenalty(),
- executionSettings.getMaxTokens(),
- executionSettings.getMaxTime(),
- true,
- executionSettings.getResultsPerPrompt(),
- null,
- executionSettings.getDetails());
- }
- return textParameters;
- }
-
- @Nullable
- @Override
- public String getModelId() {
- return modelId;
- }
-
- @Nullable
- @Override
- public String getServiceId() {
- return serviceId;
- }
-
- /**
- * Create a new builder for HuggingFaceTextGenerationService.
- * @return The builder.
- */
- public static Builder builder() {
- return new Builder();
- }
-
- /**
- * A builder for HuggingFaceTextGenerationService.
- */
- public static class Builder {
-
- @Nullable
- protected String modelId;
- @Nullable
- protected HuggingFaceClient client;
- @Nullable
- protected String serviceId;
-
- /**
- * Sets the model ID for the service
- *
- * @param modelId The model ID
- * @return The builder
- */
- public Builder withModelId(String modelId) {
- this.modelId = modelId;
- return this;
- }
-
- /**
- * Sets the service ID for the service
- *
- * @param serviceId The service ID
- * @return The builder
- */
- public Builder withServiceId(String serviceId) {
- this.serviceId = serviceId;
- return this;
- }
-
- /**
- * Sets the HuggingFaceClient for the service
- * @param client The HuggingFaceClient
- * @return The builder
- */
- public Builder withHuggingFaceClient(HuggingFaceClient client) {
- this.client = client;
- return this;
- }
-
- /**
- * Builds the HuggingFaceTextGenerationService
- * @return The HuggingFaceTextGenerationService
- */
- public HuggingFaceTextGenerationService build() {
-
- if (this.modelId == null) {
- throw new SKException(
- "Model ID is required to build HuggingFaceTextGenerationService");
- }
-
- if (this.serviceId == null) {
- throw new SKException(
- "Service ID is required to build HuggingFaceTextGenerationService");
- }
-
- if (this.client == null) {
- throw new SKException(
- "HuggingFaceClient is required to build HuggingFaceTextGenerationService");
- }
-
- return new HuggingFaceTextGenerationService(
- this.modelId,
- this.serviceId,
- this.client);
- }
- }
-}
diff --git a/aiservices/openai/pom.xml b/aiservices/openai/pom.xml
deleted file mode 100644
index 992629a43..000000000
--- a/aiservices/openai/pom.xml
+++ /dev/null
@@ -1,104 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.microsoft.semantic-kernel
- semantickernel-parent
- 1.4.4-RC3-SNAPSHOT
- ../../pom.xml
-
-
- semantickernel-aiservices-openai
- Semantic Kernel OpenAI Services
- OpenAI services for Semantic Kernel
-
-
-
- com.microsoft.semantic-kernel
- semantickernel-api
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-data
- provided
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-exceptions
- provided
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-ai-services
- provided
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-builders
-
-
- com.microsoft.semantic-kernel
- semantickernel-api-textembedding-services
-
-
-
- com.azure
- azure-ai-openai
-
-
- com.fasterxml.jackson.core
- jackson-databind
- compile
-
-
- com.fasterxml.jackson.core
- jackson-core
- compile
-
-
-
-
- javax.xml.stream
- stax-api
- provided
-
-
- io.opentelemetry
- opentelemetry-sdk
- test
-
-
- org.junit.jupiter
- junit-jupiter-api
- test
-
-
- org.mockito
- mockito-core
- test
-
-
-
- com.github.victools
- jsonschema-generator
- true
-
-
- com.github.victools
- jsonschema-module-jackson
- true
-
-
-
-
-
-
- src/main/resources
- true
-
-
-
-
-
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/OpenAiService.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/OpenAiService.java
deleted file mode 100644
index 0edee4767..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/OpenAiService.java
+++ /dev/null
@@ -1,57 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai;
-
-import com.microsoft.semantickernel.services.AIService;
-import javax.annotation.Nullable;
-
-/**
- * Provides OpenAI service.
- * @param the client type
- */
-public abstract class OpenAiService implements AIService {
-
- private final Client client;
- @Nullable
- private final String serviceId;
- private final String modelId;
- private final String deploymentName;
-
- protected OpenAiService(
- Client client,
- @Nullable String serviceId,
- String modelId,
- String deploymentName) {
- this.client = client;
- this.serviceId = serviceId;
- this.modelId = modelId;
- this.deploymentName = deploymentName;
- }
-
- @Nullable
- @Override
- public String getModelId() {
- return modelId;
- }
-
- @Override
- @Nullable
- public String getServiceId() {
- return serviceId;
- }
-
- /**
- * Gets the client.
- * @return the client
- */
- protected Client getClient() {
- return client;
- }
-
- /**
- * Gets the deployment name.
- * @return the deployment name
- */
- public String getDeploymentName() {
- return deploymentName;
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/audio/OpenAiAudioToTextService.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/audio/OpenAiAudioToTextService.java
deleted file mode 100644
index 7a126f6d8..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/audio/OpenAiAudioToTextService.java
+++ /dev/null
@@ -1,125 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.audio;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.models.AudioTranscription;
-import com.azure.ai.openai.models.AudioTranscriptionFormat;
-import com.azure.ai.openai.models.AudioTranscriptionOptions;
-import com.microsoft.semantickernel.aiservices.openai.OpenAiService;
-import com.microsoft.semantickernel.exceptions.SKException;
-import com.microsoft.semantickernel.services.audio.AudioContent;
-import com.microsoft.semantickernel.services.audio.AudioToTextExecutionSettings;
-import com.microsoft.semantickernel.services.audio.AudioToTextService;
-import javax.annotation.Nullable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import reactor.core.publisher.Mono;
-
-/**
- * Provides OpenAi implementation of audio to text service.
- */
-public class OpenAiAudioToTextService extends OpenAiService
- implements AudioToTextService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(OpenAiAudioToTextService.class);
-
- /**
- * Creates an instance of OpenAi audio to text service.
- *
- * @param client OpenAI client.
- * @param modelId The model ID.
- * @param deploymentName The deployment name.
- */
- public OpenAiAudioToTextService(
- OpenAIAsyncClient client,
- String modelId,
- String deploymentName) {
- super(client, null, modelId, deploymentName);
- }
-
- @Override
- public Mono getTextContentsAsync(
- AudioContent content,
- @Nullable AudioToTextExecutionSettings executionSettings) {
-
- AudioTranscriptionOptions options = convertOptions(content, executionSettings);
-
- // TODO: Should use getAudioTranscriptionTextWithResponse, and OpenAIRequestSettings.getRequestOptions()
- // however currently this breaks the request
- return getClient()
- .getAudioTranscription(
- getDeploymentName(),
- options.getFilename(),
- options)
- .map(AudioTranscription::getText);
- }
-
- private AudioTranscriptionOptions convertOptions(
- AudioContent content,
- @Nullable AudioToTextExecutionSettings executionSettings) {
- AudioTranscriptionOptions options = new AudioTranscriptionOptions(content.getData());
-
- options.setModel(getModelId());
- if (executionSettings == null) {
- return options;
- }
-
- if (executionSettings.getResponseFormat() != null) {
- options.setResponseFormat(
- AudioTranscriptionFormat.fromString(executionSettings.getResponseFormat()));
- }
-
- if (executionSettings.getFilename() != null) {
- options.setFilename(executionSettings.getFilename());
- }
-
- if (executionSettings.getLanguage() != null) {
- options.setLanguage(executionSettings.getLanguage());
- }
-
- if (executionSettings.getPrompt() != null) {
- options.setPrompt(executionSettings.getPrompt());
- }
-
- if (executionSettings.getTemperature() != null) {
- options.setTemperature(executionSettings.getTemperature());
- }
- return options;
- }
-
- /**
- * Builder for OpenAiAudioToTextService.
- *
- * @return The builder.
- */
- public static Builder builder() {
- return new Builder();
- }
-
- /**
- * Represents a builder for OpenAiAudioToTextService.
- */
- public static class Builder extends AudioToTextService.Builder {
-
- /**
- * builds the OpenAiAudioToTextService.
- */
- @Override
- public AudioToTextService build() {
- if (client == null) {
- throw new SKException("OpenAI client is required");
- }
-
- if (modelId == null) {
- throw new SKException("Model id is required");
- }
-
- if (deploymentName == null) {
- LOGGER.debug("Deployment name is not provided, using model id as deployment name");
- deploymentName = modelId;
- }
-
- return new OpenAiAudioToTextService(client, modelId, deploymentName);
- }
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/audio/OpenAiTextToAudioService.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/audio/OpenAiTextToAudioService.java
deleted file mode 100644
index dc748af7d..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/audio/OpenAiTextToAudioService.java
+++ /dev/null
@@ -1,108 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.audio;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.models.SpeechGenerationOptions;
-import com.azure.ai.openai.models.SpeechGenerationResponseFormat;
-import com.azure.ai.openai.models.SpeechVoice;
-import com.microsoft.semantickernel.aiservices.openai.OpenAiService;
-import com.microsoft.semantickernel.exceptions.SKException;
-import com.microsoft.semantickernel.services.audio.AudioContent;
-import com.microsoft.semantickernel.services.audio.TextToAudioExecutionSettings;
-import com.microsoft.semantickernel.services.audio.TextToAudioService;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import reactor.core.publisher.Mono;
-
-/**
- * Provides OpenAi implementation of text to audio service.
- */
-public class OpenAiTextToAudioService extends OpenAiService
- implements TextToAudioService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(OpenAiTextToAudioService.class);
-
- /**
- * Creates an instance of OpenAi text to audio service.
- *
- * @param client OpenAI client.
- * @param modelId The model ID.
- * @param deploymentName The deployment name.
- */
- public OpenAiTextToAudioService(
- OpenAIAsyncClient client,
- String modelId,
- String deploymentName) {
- super(client, null, modelId, deploymentName);
- }
-
- @Override
- public Mono getAudioContentAsync(
- String text,
- TextToAudioExecutionSettings executionSettings) {
-
- SpeechGenerationOptions options = convertOptions(text, executionSettings);
-
- return getClient().generateSpeechFromText(getDeploymentName(), options)
- .map(response -> new AudioContent(response.toBytes(), getModelId()));
- }
-
- private SpeechGenerationOptions convertOptions(
- String text,
- TextToAudioExecutionSettings executionSettings) {
- SpeechGenerationOptions options = new SpeechGenerationOptions(
- text,
- SpeechVoice.fromString(executionSettings.getVoice()));
-
- options.setModel(getModelId());
-
- if (executionSettings.getResponseFormat() != null) {
- options.setResponseFormat(
- SpeechGenerationResponseFormat.fromString(executionSettings.getResponseFormat()));
- }
-
- if (executionSettings.getSpeed() != null) {
- options.setSpeed(executionSettings.getSpeed());
- }
-
- return options;
- }
-
- /**
- * Creates a new builder.
- *
- * @return The builder.
- */
- public static Builder builder() {
- return new Builder();
- }
-
- /**
- * Represents a builder for OpenAi text to audio service.
- */
- public static class Builder extends TextToAudioService.Builder {
-
- /**
- * Builds the OpenAi text to audio service.
- *
- * @return The OpenAi text to audio service.
- */
- @Override
- public TextToAudioService build() {
- if (client == null) {
- throw new SKException("OpenAI client is required");
- }
-
- if (modelId == null) {
- throw new SKException("Model id is required");
- }
-
- if (deploymentName == null) {
- LOGGER.debug("Deployment name is not provided, using model id as deployment name");
- deploymentName = modelId;
- }
-
- return new OpenAiTextToAudioService(client, modelId, deploymentName);
- }
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/BinaryDataUtils.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/BinaryDataUtils.java
deleted file mode 100644
index 9a3189a7e..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/BinaryDataUtils.java
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.azure.core.util.BinaryData;
-import javax.annotation.Nullable;
-
-public class BinaryDataUtils {
-
- @Nullable
- public static String toString(@Nullable BinaryData b) {
- if (b == null) {
- return null;
- }
- return b.toString();
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatCompletion.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatCompletion.java
deleted file mode 100644
index 8256bb002..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatCompletion.java
+++ /dev/null
@@ -1,1334 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.models.ChatChoice;
-import com.azure.ai.openai.models.ChatCompletions;
-import com.azure.ai.openai.models.ChatCompletionsFunctionToolCall;
-import com.azure.ai.openai.models.ChatCompletionsFunctionToolDefinition;
-import com.azure.ai.openai.models.ChatCompletionsFunctionToolDefinitionFunction;
-import com.azure.ai.openai.models.ChatCompletionsJsonResponseFormat;
-import com.azure.ai.openai.models.ChatCompletionsNamedToolSelection;
-import com.azure.ai.openai.models.ChatCompletionsOptions;
-import com.azure.ai.openai.models.ChatCompletionsTextResponseFormat;
-import com.azure.ai.openai.models.ChatCompletionsToolCall;
-import com.azure.ai.openai.models.ChatCompletionsToolDefinition;
-import com.azure.ai.openai.models.ChatCompletionsToolSelection;
-import com.azure.ai.openai.models.ChatCompletionsToolSelectionPreset;
-import com.azure.ai.openai.models.ChatMessageImageContentItem;
-import com.azure.ai.openai.models.ChatMessageImageDetailLevel;
-import com.azure.ai.openai.models.ChatMessageImageUrl;
-import com.azure.ai.openai.models.ChatRequestAssistantMessage;
-import com.azure.ai.openai.models.ChatRequestFunctionMessage;
-import com.azure.ai.openai.models.ChatRequestMessage;
-import com.azure.ai.openai.models.ChatRequestSystemMessage;
-import com.azure.ai.openai.models.ChatRequestToolMessage;
-import com.azure.ai.openai.models.ChatRequestUserMessage;
-import com.azure.ai.openai.models.ChatResponseMessage;
-import com.azure.ai.openai.models.CompletionsUsage;
-import com.azure.ai.openai.models.FunctionCall;
-import com.azure.ai.openai.models.FunctionDefinition;
-import com.azure.json.JsonOptions;
-import com.azure.json.implementation.DefaultJsonReader;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ContainerNode;
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.aiservices.openai.OpenAiService;
-import com.microsoft.semantickernel.aiservices.openai.chatcompletion.responseformat.ChatCompletionsJsonSchemaResponseFormat;
-import com.microsoft.semantickernel.aiservices.openai.implementation.OpenAIRequestSettings;
-import com.microsoft.semantickernel.contents.FunctionCallContent;
-import com.microsoft.semantickernel.contextvariables.ContextVariable;
-import com.microsoft.semantickernel.contextvariables.ContextVariableTypes;
-import com.microsoft.semantickernel.exceptions.AIException;
-import com.microsoft.semantickernel.exceptions.AIException.ErrorCodes;
-import com.microsoft.semantickernel.exceptions.SKCheckedException;
-import com.microsoft.semantickernel.exceptions.SKException;
-import com.microsoft.semantickernel.functionchoice.AutoFunctionChoiceBehavior;
-import com.microsoft.semantickernel.functionchoice.FunctionChoiceBehavior;
-import com.microsoft.semantickernel.functionchoice.NoneFunctionChoiceBehavior;
-import com.microsoft.semantickernel.functionchoice.RequiredFunctionChoiceBehavior;
-import com.microsoft.semantickernel.hooks.KernelHookEvent;
-import com.microsoft.semantickernel.hooks.KernelHooks;
-import com.microsoft.semantickernel.hooks.PostChatCompletionEvent;
-import com.microsoft.semantickernel.hooks.PreChatCompletionEvent;
-import com.microsoft.semantickernel.hooks.PreToolCallEvent;
-import com.microsoft.semantickernel.implementation.CollectionUtil;
-import com.microsoft.semantickernel.implementation.telemetry.ChatCompletionSpan;
-import com.microsoft.semantickernel.implementation.telemetry.SemanticKernelTelemetry;
-import com.microsoft.semantickernel.orchestration.FunctionResult;
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.orchestration.InvocationContext;
-import com.microsoft.semantickernel.orchestration.InvocationReturnMode;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.orchestration.ToolCallBehavior;
-import com.microsoft.semantickernel.orchestration.responseformat.JsonResponseSchema;
-import com.microsoft.semantickernel.orchestration.responseformat.JsonSchemaResponseFormat;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
-import com.microsoft.semantickernel.semanticfunctions.KernelArguments;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
-import com.microsoft.semantickernel.services.chatcompletion.ChatHistory;
-import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent;
-import com.microsoft.semantickernel.services.chatcompletion.StreamingChatContent;
-import com.microsoft.semantickernel.services.chatcompletion.message.ChatMessageContentType;
-import com.microsoft.semantickernel.services.chatcompletion.message.ChatMessageImageContent;
-import com.microsoft.semantickernel.services.openai.OpenAiServiceBuilder;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Objects;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import javax.annotation.CheckReturnValue;
-import javax.annotation.Nullable;
-import org.apache.commons.text.StringEscapeUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import reactor.core.publisher.Flux;
-import reactor.core.publisher.Mono;
-
-/**
- * OpenAI chat completion service.
- */
-public class OpenAIChatCompletion extends OpenAiService
- implements ChatCompletionService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(OpenAIChatCompletion.class);
-
- protected OpenAIChatCompletion(
- OpenAIAsyncClient client,
- String deploymentName,
- String modelId,
- @Nullable String serviceId) {
- super(client, serviceId, modelId, deploymentName);
- }
-
- /**
- * Create a new instance of {@link OpenAIChatCompletion.Builder}.
- *
- * @return a new instance of {@link OpenAIChatCompletion.Builder}
- */
- public static OpenAIChatCompletion.Builder builder() {
- return new OpenAIChatCompletion.Builder();
- }
-
- @Override
- public Mono>> getChatMessageContentsAsync(
- ChatHistory chatHistory,
- @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext) {
-
- List chatRequestMessages = getChatRequestMessages(chatHistory);
-
- ChatMessages messages = new ChatMessages(chatRequestMessages);
-
- return internalChatMessageContentsAsync(
- messages,
- kernel,
- invocationContext)
- .flatMap(history -> {
- try {
- ChatHistory chatHistoryResult;
-
- if (invocationContext != null
- && invocationContext.returnMode() == InvocationReturnMode.FULL_HISTORY) {
- chatHistoryResult = new ChatHistory(chatHistory.getMessages());
- } else {
- chatHistoryResult = new ChatHistory();
- }
-
- chatHistoryResult.addAll(
- new ChatHistory(toOpenAIChatMessageContent(history.newMessages)));
- chatHistoryResult.addAll(new ChatHistory(history.newChatMessageContent));
-
- if (invocationContext != null
- && invocationContext
- .returnMode() == InvocationReturnMode.LAST_MESSAGE_ONLY) {
- chatHistoryResult = new ChatHistory(
- Collections.singletonList(
- CollectionUtil.getLastOrNull(chatHistoryResult.getMessages())));
- }
-
- return Mono.just(chatHistoryResult.getMessages());
- } catch (Exception e) {
- return Mono.error(e);
- }
- });
- }
-
- @Override
- public Mono>> getChatMessageContentsAsync(
- String prompt,
- @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext) {
- ParsedPrompt parsedPrompt = OpenAiXMLPromptParser.parse(prompt);
-
- ChatMessages messages = new ChatMessages(parsedPrompt.getChatRequestMessages());
-
- return internalChatMessageContentsAsync(
- messages,
- kernel,
- invocationContext)
- .flatMap(m -> {
- try {
- ChatHistory result = new ChatHistory(toOpenAIChatMessageContent(m.allMessages));
-
- result.addAll(new ChatHistory(m.newChatMessageContent));
-
- if (invocationContext != null
- && invocationContext
- .returnMode() == InvocationReturnMode.LAST_MESSAGE_ONLY) {
- result = new ChatHistory(
- Collections.singletonList(
- CollectionUtil.getLastOrNull(result.getMessages())));
- }
-
- return Mono.just(result.getMessages());
- } catch (SKCheckedException e) {
- return Mono.error(e);
- }
- });
- }
-
- @Override
- public Flux> getStreamingChatMessageContentsAsync(
- ChatHistory chatHistory,
- @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext) {
- if (invocationContext != null &&
- invocationContext.getToolCallBehavior() != null &&
- invocationContext.getToolCallBehavior().isAutoInvokeAllowed()) {
- throw new SKException(
- "ToolCallBehavior auto-invoke is not supported for streaming chat message contents");
- }
-
- if (invocationContext != null &&
- invocationContext.getFunctionChoiceBehavior() != null &&
- invocationContext.getFunctionChoiceBehavior() instanceof AutoFunctionChoiceBehavior &&
- ((AutoFunctionChoiceBehavior) invocationContext.getFunctionChoiceBehavior())
- .isAutoInvoke()) {
- throw new SKException(
- "FunctionChoiceBehavior auto-invoke is not supported for streaming chat message contents");
- }
-
- if (invocationContext != null
- && invocationContext.returnMode() != InvocationReturnMode.NEW_MESSAGES_ONLY) {
- throw new SKException(
- "Streaming chat message contents only supports NEW_MESSAGES_ONLY return mode");
- }
-
- List chatRequestMessages = getChatRequestMessages(chatHistory);
-
- ChatMessages messages = new ChatMessages(chatRequestMessages);
-
- List functions = new ArrayList<>();
- if (kernel != null) {
- kernel.getPlugins()
- .forEach(plugin -> plugin.getFunctions().forEach((name, function) -> functions
- .add(OpenAIFunction.build(function.getMetadata(), plugin.getName()))));
- }
-
- OpenAIToolCallConfig toolCallConfig = getToolCallConfig(
- invocationContext,
- functions,
- messages.allMessages,
- 0);
-
- ChatCompletionsOptions options = executeHook(
- invocationContext,
- kernel,
- new PreChatCompletionEvent(
- getCompletionsOptions(
- this,
- messages.allMessages,
- invocationContext,
- toolCallConfig)))
- .getOptions();
-
- return getClient()
- .getChatCompletionsStreamWithResponse(
- getDeploymentName(),
- options,
- OpenAIRequestSettings.getRequestOptions())
- .flatMap(completionsResult -> {
- if (completionsResult.getStatusCode() >= 400) {
- //SemanticKernelTelemetry.endSpanWithError(span);
- return Mono.error(new AIException(ErrorCodes.SERVICE_ERROR,
- "Request failed: " + completionsResult.getStatusCode()));
- }
- //SemanticKernelTelemetry.endSpanWithUsage(span, completionsResult.getValue().getUsage());
-
- return Mono.just(completionsResult.getValue());
- })
- .flatMap(completions -> {
- return Flux.fromIterable(completions.getChoices())
- .map(message -> {
- AuthorRole role = message.getDelta().getRole() == null
- ? AuthorRole.ASSISTANT
- : AuthorRole.valueOf(message.getDelta().getRole().toString()
- .toUpperCase(Locale.ROOT));
-
- return new OpenAIStreamingChatMessageContent<>(
- completions.getId(),
- role,
- message.getDelta().getContent(),
- getModelId(),
- null,
- null,
- null,
- Arrays.asList());
- });
- });
- }
-
- @Override
- public Flux> getStreamingChatMessageContentsAsync(
- String prompt,
- @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext) {
- return getStreamingChatMessageContentsAsync(
- new ChatHistory().addUserMessage(prompt),
- kernel,
- invocationContext);
- }
-
- // Holds messages temporarily as we build up our result
- private static class ChatMessages {
-
- private final List newMessages;
- private final List allMessages;
- private final List> newChatMessageContent;
-
- public ChatMessages(List allMessages) {
- this.allMessages = Collections.unmodifiableList(allMessages);
- this.newMessages = Collections.unmodifiableList(new ArrayList<>());
- this.newChatMessageContent = Collections.unmodifiableList(new ArrayList<>());
- }
-
- private ChatMessages(
- List allMessages,
- List newMessages,
- List> newChatMessageContent) {
- this.allMessages = Collections.unmodifiableList(allMessages);
- this.newMessages = Collections.unmodifiableList(newMessages);
- this.newChatMessageContent = Collections.unmodifiableList(newChatMessageContent);
- }
-
- @CheckReturnValue
- public ChatMessages addAll(List requestMessage) {
- List tmpAllMessages = new ArrayList<>(allMessages);
- List tmpNewMessages = new ArrayList<>(newMessages);
- tmpAllMessages.addAll(requestMessage);
- tmpNewMessages.addAll(requestMessage);
- return new ChatMessages(
- tmpAllMessages,
- tmpNewMessages,
- newChatMessageContent);
- }
-
- @CheckReturnValue
- public ChatMessages add(ChatRequestMessage requestMessage) {
- return addAll(Arrays.asList(requestMessage));
- }
-
- @CheckReturnValue
- public ChatMessages addChatMessage(List> chatMessageContent) {
- ArrayList> tmpChatMessageContent = new ArrayList<>(
- newChatMessageContent);
- tmpChatMessageContent.addAll(chatMessageContent);
-
- return new ChatMessages(
- allMessages,
- newMessages,
- tmpChatMessageContent);
- }
-
- /**
- * Checks that the two messages have a similar history
- *
- * @param messages The messages to merge in
- * @return The merged chat messages
- */
- boolean assertCommonHistory(List messages) {
- int index = 0;
- while (index < messages.size() && index < this.allMessages.size()) {
- ChatRequestMessage a = messages.get(index);
- ChatRequestMessage b = this.allMessages.get(index);
-
- boolean matches = false;
- if (a instanceof ChatRequestAssistantMessage
- && b instanceof ChatRequestAssistantMessage) {
- matches = Objects.equals(((ChatRequestAssistantMessage) a).getContent(),
- ((ChatRequestAssistantMessage) b).getContent());
- } else if (a instanceof ChatRequestSystemMessage
- && b instanceof ChatRequestSystemMessage) {
- matches = Objects.equals(((ChatRequestSystemMessage) a).getContent(),
- ((ChatRequestSystemMessage) b).getContent());
- } else if (a instanceof ChatRequestUserMessage
- && b instanceof ChatRequestUserMessage) {
- matches = Objects.equals(((ChatRequestUserMessage) a).getContent(),
- ((ChatRequestUserMessage) b).getContent());
- } else if (a instanceof ChatRequestFunctionMessage
- && b instanceof ChatRequestFunctionMessage) {
- matches = Objects.equals(((ChatRequestFunctionMessage) a).getContent(),
- ((ChatRequestFunctionMessage) b).getContent());
- } else if (a instanceof ChatRequestToolMessage
- && b instanceof ChatRequestToolMessage) {
- matches = Objects.equals(((ChatRequestToolMessage) a).getContent(),
- ((ChatRequestToolMessage) b).getContent());
- }
-
- if (!matches) {
- LOGGER.warn("Messages do not match at index: " + index
- + " you might be merging unrelated message histories");
- return false;
- }
-
- index++;
- }
-
- return true;
-
- }
- }
-
- private Mono internalChatMessageContentsAsync(
- ChatMessages messages,
- @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext) {
-
- List functions = new ArrayList<>();
- if (kernel != null) {
- kernel.getPlugins()
- .forEach(plugin -> plugin.getFunctions().forEach((name, function) -> functions
- .add(OpenAIFunction.build(function.getMetadata(), plugin.getName()))));
- }
-
- return internalChatMessageContentsAsync(
- messages,
- kernel,
- functions,
- invocationContext,
- 0);
- }
-
- private Mono internalChatMessageContentsAsync(
- ChatMessages messages,
- @Nullable Kernel kernel,
- List functions,
- @Nullable InvocationContext invocationContext,
- int requestIndex) {
-
- OpenAIToolCallConfig toolCallConfig = getToolCallConfig(
- invocationContext,
- functions,
- messages.allMessages,
- requestIndex);
-
- ChatCompletionsOptions options = executeHook(
- invocationContext,
- kernel,
- new PreChatCompletionEvent(
- getCompletionsOptions(
- this,
- messages.allMessages,
- invocationContext,
- toolCallConfig)))
- .getOptions();
-
- return Mono.deferContextual(contextView -> {
- ChatCompletionSpan span = ChatCompletionSpan.startChatCompletionSpan(
- SemanticKernelTelemetry.getTelemetry(invocationContext),
- contextView,
- getModelId(),
- SemanticKernelTelemetry.OPEN_AI_PROVIDER,
- options.getMaxTokens(),
- options.getTemperature(),
- options.getTopP());
-
- return getClient()
- .getChatCompletionsWithResponse(getDeploymentName(), options,
- OpenAIRequestSettings.getRequestOptions())
- .contextWrite(span.getReactorContextModifier())
- .flatMap(completionsResult -> {
- if (completionsResult.getStatusCode() >= 400) {
- return Mono.error(new AIException(ErrorCodes.SERVICE_ERROR,
- "Request failed: " + completionsResult.getStatusCode()));
- }
-
- return Mono.just(completionsResult.getValue());
- })
- .doOnError(span::endSpanWithError)
- .doOnSuccess(span::endSpanWithUsage)
- .doOnTerminate(span::close);
- })
- .flatMap(completions -> {
- List responseMessages = completions
- .getChoices()
- .stream()
- .map(ChatChoice::getMessage)
- .filter(Objects::nonNull)
- .collect(Collectors.toList());
-
- // execute post chat completion hook
- executeHook(invocationContext, kernel, new PostChatCompletionEvent(completions));
-
- // Just return the result:
- // If auto-invoking is not enabled
- // Or if we are auto-invoking, but we somehow end up with other than 1 choice even though only 1 was requested
- if (toolCallConfig == null || !toolCallConfig.isAutoInvoke()
- || responseMessages.size() != 1) {
- List> chatMessageContents = getChatMessageContentsAsync(
- completions);
- return Mono.just(messages.addChatMessage(chatMessageContents));
- }
-
- // Or if there are no tool calls to be done
- ChatResponseMessage response = responseMessages.get(0);
- List toolCalls = response.getToolCalls();
- if (toolCalls == null || toolCalls.isEmpty()) {
- List> chatMessageContents = getChatMessageContentsAsync(
- completions);
- return Mono.just(messages.addChatMessage(chatMessageContents));
- }
-
- ChatRequestAssistantMessage requestMessage = new ChatRequestAssistantMessage(
- response.getContent());
- requestMessage.setToolCalls(toolCalls);
-
- // Add the original assistant message to the chat options; this is required for the service
- // to understand the tool call responses
- ChatMessages messagesWithToolCall = messages.add(requestMessage);
-
- return Flux
- .fromIterable(toolCalls)
- .reduce(
- Mono.just(messagesWithToolCall),
- (requestMessages, toolCall) -> {
- if (toolCall instanceof ChatCompletionsFunctionToolCall) {
- return performToolCall(kernel, invocationContext, requestMessages,
- toolCall);
- }
-
- return requestMessages;
- })
- .flatMap(it -> it)
- .flatMap(msgs -> {
- return internalChatMessageContentsAsync(msgs, kernel, functions,
- invocationContext, requestIndex + 1);
- })
- .onErrorResume(e -> {
-
- LOGGER.warn("Tool invocation attempt failed: ", e);
-
- // If FunctionInvocationError occurred and there are still attempts left, retry, else exit
- if (requestIndex < MAXIMUM_INFLIGHT_AUTO_INVOKES) {
- ChatMessages currentMessages = messages;
- if (e instanceof FunctionInvocationError) {
- currentMessages.assertCommonHistory(
- ((FunctionInvocationError) e).getMessages());
-
- currentMessages = new ChatMessages(
- ((FunctionInvocationError) e).getMessages());
- }
- return internalChatMessageContentsAsync(
- currentMessages,
- kernel,
- functions,
- invocationContext,
- requestIndex + 1);
- } else {
- return Mono.error(e);
- }
- });
- });
- }
-
- private Mono performToolCall(
- @Nullable Kernel kernel,
- @Nullable InvocationContext invocationContext,
- Mono requestMessages,
- ChatCompletionsToolCall toolCall) {
-
- return requestMessages
- .flatMap(messages -> {
- try {
- // OpenAI only supports function tool call at the moment
- ChatCompletionsFunctionToolCall functionToolCall = (ChatCompletionsFunctionToolCall) toolCall;
- if (kernel == null) {
- return Mono.error(new SKException(
- "A tool call was requested, but no kernel was provided to the invocation, this is a unsupported configuration"));
- }
-
- ContextVariableTypes contextVariableTypes = invocationContext == null
- ? new ContextVariableTypes()
- : invocationContext.getContextVariableTypes();
-
- return invokeFunctionTool(
- kernel,
- invocationContext,
- functionToolCall,
- contextVariableTypes)
- .map(functionResult -> {
- // Add chat request tool message to the chat options
- ChatRequestMessage requestToolMessage = new ChatRequestToolMessage(
- functionResult.getResult(),
- functionToolCall.getId());
-
- return messages.add(requestToolMessage);
- })
- .switchIfEmpty(Mono.fromSupplier(
- () -> {
- ChatRequestMessage requestToolMessage = new ChatRequestToolMessage(
- "Completed successfully with no return value",
- functionToolCall.getId());
-
- return messages.add(requestToolMessage);
- }))
- .onErrorResume(e -> emitError(toolCall, messages, e));
- } catch (Exception e) {
- return emitError(toolCall, messages, e);
- }
- });
- }
-
- private Mono emitError(
- ChatCompletionsToolCall toolCall,
- ChatMessages msgs,
- Throwable e) {
- msgs = msgs.add(new ChatRequestToolMessage(
- "Call failed: " + e.getMessage(),
- toolCall.getId()));
-
- return Mono.error(new FunctionInvocationError(e, msgs.allMessages));
- }
-
- /**
- * Exception to be thrown when a function invocation fails.
- */
- private static class FunctionInvocationError extends SKException {
-
- private final List messages;
-
- public FunctionInvocationError(Throwable e, List msgs) {
- super(e.getMessage(), e);
- this.messages = msgs;
- }
-
- public List getMessages() {
- return messages;
- }
- }
-
- @SuppressWarnings("StringSplitter")
- private Mono> invokeFunctionTool(
- Kernel kernel,
- @Nullable InvocationContext invocationContext,
- ChatCompletionsFunctionToolCall toolCall,
- ContextVariableTypes contextVariableTypes) {
-
- try {
- FunctionCallContent functionCallContent = extractFunctionCallContent(toolCall);
- String pluginName = functionCallContent.getPluginName();
- if (pluginName == null || pluginName.isEmpty()) {
- return Mono.error(
- new SKException("Plugin name is required for function tool call"));
- }
-
- KernelFunction> function = kernel.getFunction(
- pluginName,
- functionCallContent.getFunctionName());
-
- PreToolCallEvent hookResult = executeHook(invocationContext, kernel,
- new PreToolCallEvent(
- functionCallContent.getFunctionName(),
- functionCallContent.getArguments(),
- function,
- contextVariableTypes));
-
- function = hookResult.getFunction();
- KernelArguments arguments = hookResult.getArguments();
-
- return function
- .invokeAsync(kernel)
- .withArguments(arguments)
- .withTypes(invocationContext.getContextVariableTypes())
- .withTypes(contextVariableTypes)
- .withResultType(contextVariableTypes.getVariableTypeForClass(String.class));
- } catch (JsonProcessingException e) {
- return Mono.error(new SKException("Failed to parse tool arguments", e));
- }
- }
-
- private static T executeHook(
- @Nullable InvocationContext invocationContext,
- @Nullable Kernel kernel,
- T event) {
- KernelHooks kernelHooks = null;
- if (kernel == null) {
- if (invocationContext != null) {
- kernelHooks = invocationContext.getKernelHooks();
- }
- } else {
- kernelHooks = KernelHooks.merge(
- kernel.getGlobalKernelHooks(),
- invocationContext != null ? invocationContext.getKernelHooks() : null);
- }
- if (kernelHooks == null) {
- return event;
- }
- return kernelHooks.executeHooks(event);
- }
-
- @SuppressWarnings("StringSplitter")
- private FunctionCallContent extractFunctionCallContent(
- ChatCompletionsFunctionToolCall toolCall)
- throws JsonProcessingException {
-
- // Split the full name of a function into plugin and function name
- String name = toolCall.getFunction().getName();
- String[] parts = name.split(OpenAIFunction.getNameSeparator());
- String pluginName = parts.length > 1 ? parts[0] : "";
- String fnName = parts.length > 1 ? parts[1] : parts[0];
-
- KernelArguments arguments = KernelArguments.builder().build();
-
- ObjectMapper mapper = new ObjectMapper();
- JsonNode jsonToolCallArguments = mapper.readTree(toolCall.getFunction().getArguments());
-
- jsonToolCallArguments.fields().forEachRemaining(
- entry -> {
- if (entry.getValue() instanceof ContainerNode) {
- arguments.put(entry.getKey(),
- ContextVariable.of(entry.getValue().toPrettyString()));
- } else {
- arguments.put(entry.getKey(),
- ContextVariable.of(entry.getValue().asText()));
- }
- });
-
- return new FunctionCallContent(
- fnName,
- pluginName,
- toolCall.getId(),
- arguments);
- }
-
- private List> getChatMessageContentsAsync(
- ChatCompletions completions) {
- FunctionResultMetadata completionMetadata = FunctionResultMetadata.build(
- completions.getId(),
- completions.getUsage(),
- completions.getCreatedAt());
-
- List responseMessages = completions
- .getChoices()
- .stream()
- .map(ChatChoice::getMessage)
- .filter(Objects::nonNull)
- .collect(Collectors.toList());
-
- List> chatMessageContent = responseMessages
- .stream()
- .map(response -> {
- try {
- return new OpenAIChatMessageContent<>(
- AuthorRole.ASSISTANT,
- response.getContent(),
- this.getModelId(),
- null,
- null,
- completionMetadata,
- formFunctionCallContents(response));
- } catch (SKCheckedException e) {
- LOGGER.warn("Failed to form chat message content", e);
- return null;
- }
- })
- .filter(Objects::nonNull)
- .collect(Collectors.toList());
-
- return chatMessageContent;
- }
-
- private List> toOpenAIChatMessageContent(
- List requestMessages) throws SKCheckedException {
- try {
- return requestMessages
- .stream()
- .map(message -> {
- if (message instanceof ChatRequestUserMessage) {
- return new OpenAIChatMessageContent<>(
- AuthorRole.USER,
- BinaryDataUtils
- .toString(((ChatRequestUserMessage) message).getContent()),
- null,
- null,
- null,
- null,
- null);
- } else if (message instanceof ChatRequestSystemMessage) {
- return new OpenAIChatMessageContent<>(
- AuthorRole.SYSTEM,
- BinaryDataUtils
- .toString(((ChatRequestSystemMessage) message).getContent()),
- null,
- null,
- null,
- null,
- null);
- } else if (message instanceof ChatRequestAssistantMessage) {
- try {
- List calls = getFunctionCallContents(
- ((ChatRequestAssistantMessage) message).getToolCalls());
- return new OpenAIChatMessageContent<>(
- AuthorRole.ASSISTANT,
- BinaryDataUtils
- .toString(((ChatRequestAssistantMessage) message).getContent()),
- null,
- null,
- null,
- null,
- calls);
- } catch (SKCheckedException e) {
- throw SKException.build("Failed to form assistant message", e);
- }
- } else if (message instanceof ChatRequestToolMessage) {
- return new OpenAIChatMessageContent<>(
- AuthorRole.TOOL,
- BinaryDataUtils
- .toString(((ChatRequestToolMessage) message).getContent()),
- null,
- null,
- null,
- FunctionResultMetadata.build(
- ((ChatRequestToolMessage) message).getToolCallId(),
- null,
- null),
- null);
- }
-
- throw new SKException(
- "Unknown message type: " + message.getClass().getSimpleName());
- })
- .collect(Collectors.toList());
- } catch (SKException e) {
- throw SKCheckedException.build("Failed to form OpenAI chat message content", e);
- }
- }
-
- @Nullable
- private List getFunctionCallContents(
- @Nullable List toolCalls) throws SKCheckedException {
- if (toolCalls == null || toolCalls.isEmpty()) {
- return null;
- }
-
- try {
- return toolCalls
- .stream()
- .map(call -> {
- if (call instanceof ChatCompletionsFunctionToolCall) {
- try {
- return extractFunctionCallContent(
- (ChatCompletionsFunctionToolCall) call);
- } catch (JsonProcessingException e) {
- throw SKException.build("Failed to parse tool arguments", e);
- }
- } else {
- throw new SKException(
- "Unknown tool call type: " + call.getClass().getSimpleName());
- }
- })
- .collect(Collectors.toList());
- } catch (SKException e) {
- throw SKCheckedException.build("Failed to form tool call", e);
- }
- }
-
- @Nullable
- private List formFunctionCallContents(
- ChatResponseMessage response) throws SKCheckedException {
- if (response.getToolCalls() == null || response.getToolCalls().isEmpty()) {
- return null;
- }
- try {
- return response
- .getToolCalls()
- .stream()
- .map(call -> {
- if (call instanceof ChatCompletionsFunctionToolCall) {
- try {
- return extractFunctionCallContent(
- (ChatCompletionsFunctionToolCall) call);
- } catch (JsonProcessingException e) {
- throw SKException.build("Failed to parse tool arguments", e);
- }
- } else {
- return null;
- }
- })
- .filter(Objects::nonNull)
- .collect(Collectors.toList());
- } catch (SKException e) {
- throw SKCheckedException.build("Failed to form tool call", e);
- }
- }
-
- private static ChatCompletionsOptions getCompletionsOptions(
- ChatCompletionService chatCompletionService,
- List chatRequestMessages,
- @Nullable InvocationContext invocationContext,
- @Nullable OpenAIToolCallConfig toolCallConfig) {
-
- chatRequestMessages = chatRequestMessages
- .stream()
- .map(OpenAiXMLPromptParser::unescapeRequest)
- .collect(Collectors.toList());
-
- ChatCompletionsOptions options = new ChatCompletionsOptions(chatRequestMessages)
- .setModel(chatCompletionService.getModelId());
-
- if (toolCallConfig != null) {
- options.setTools(toolCallConfig.getTools());
- options.setToolChoice(toolCallConfig.getToolChoice());
-
- if (toolCallConfig.getOptions() != null) {
- options.setParallelToolCalls(toolCallConfig.getOptions().isParallelCallsAllowed());
- }
- }
-
- PromptExecutionSettings promptExecutionSettings = invocationContext != null
- ? invocationContext.getPromptExecutionSettings()
- : null;
-
- if (promptExecutionSettings == null) {
- return options;
- }
-
- if (promptExecutionSettings.getResultsPerPrompt() < 1
- || promptExecutionSettings.getResultsPerPrompt() > MAX_RESULTS_PER_PROMPT) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- String.format("Results per prompt must be in range between 1 and %d, inclusive.",
- MAX_RESULTS_PER_PROMPT));
- }
-
- Map logit = null;
- if (promptExecutionSettings.getTokenSelectionBiases() != null) {
- logit = promptExecutionSettings
- .getTokenSelectionBiases()
- .entrySet()
- .stream()
- .collect(Collectors.toMap(
- entry -> entry.getKey().toString(),
- Map.Entry::getValue));
- }
-
- options
- .setTemperature(promptExecutionSettings.getTemperature())
- .setTopP(promptExecutionSettings.getTopP())
- .setPresencePenalty(promptExecutionSettings.getPresencePenalty())
- .setFrequencyPenalty(promptExecutionSettings.getFrequencyPenalty())
- .setPresencePenalty(promptExecutionSettings.getPresencePenalty())
- .setMaxTokens(promptExecutionSettings.getMaxTokens())
- .setN(promptExecutionSettings.getResultsPerPrompt())
- // Azure OpenAI WithData API does not allow to send empty array of stop sequences
- // Gives back "Validation error at #/stop/str: Input should be a valid string\nValidation error at #/stop/list[str]: List should have at least 1 item after validation, not 0"
- .setStop(promptExecutionSettings.getStopSequences() == null
- || promptExecutionSettings.getStopSequences().isEmpty() ? null
- : promptExecutionSettings.getStopSequences())
- .setUser(promptExecutionSettings.getUser())
- .setLogitBias(logit);
-
- if (promptExecutionSettings.getResponseFormat() != null) {
- switch (promptExecutionSettings.getResponseFormat().getType()) {
- case JSON_SCHEMA:
- JsonResponseSchema schema = ((JsonSchemaResponseFormat) promptExecutionSettings
- .getResponseFormat())
- .getJsonSchema();
-
- options.setResponseFormat(new ChatCompletionsJsonSchemaResponseFormat(schema));
- break;
- case JSON_OBJECT:
- options.setResponseFormat(new ChatCompletionsJsonResponseFormat());
- break;
- case TEXT:
- options.setResponseFormat(new ChatCompletionsTextResponseFormat());
- break;
-
- default:
- throw new SKException(
- "Unknown response format: " + promptExecutionSettings.getResponseFormat());
- }
- }
-
- return options;
- }
-
- @Nullable
- private static OpenAIToolCallConfig getToolCallConfig(
- @Nullable InvocationContext invocationContext,
- @Nullable List functions,
- List chatRequestMessages,
- int requestIndex) {
-
- if (invocationContext == null || functions == null || functions.isEmpty()) {
- return null;
- }
-
- if (invocationContext.getFunctionChoiceBehavior() == null
- && invocationContext.getToolCallBehavior() == null) {
- return null;
- }
-
- if (invocationContext.getFunctionChoiceBehavior() != null) {
- return getFunctionChoiceBehaviorConfig(
- invocationContext.getFunctionChoiceBehavior(),
- functions,
- requestIndex);
- } else {
- return getToolCallBehaviorConfig(
- invocationContext.getToolCallBehavior(),
- functions,
- chatRequestMessages,
- requestIndex);
- }
- }
-
- @Nullable
- private static OpenAIToolCallConfig getFunctionChoiceBehaviorConfig(
- @Nullable FunctionChoiceBehavior functionChoiceBehavior,
- @Nullable List functions,
- int requestIndex) {
- if (functionChoiceBehavior == null) {
- return null;
- }
-
- if (functions == null || functions.isEmpty()) {
- return null;
- }
-
- ChatCompletionsToolSelection toolChoice;
- boolean autoInvoke;
-
- if (functionChoiceBehavior instanceof RequiredFunctionChoiceBehavior) {
- // After first request a required function must have been called already
- if (requestIndex >= 1) {
- return null;
- }
-
- toolChoice = new ChatCompletionsToolSelection(
- ChatCompletionsToolSelectionPreset.REQUIRED);
- autoInvoke = ((RequiredFunctionChoiceBehavior) functionChoiceBehavior).isAutoInvoke();
- } else if (functionChoiceBehavior instanceof AutoFunctionChoiceBehavior) {
- toolChoice = new ChatCompletionsToolSelection(ChatCompletionsToolSelectionPreset.AUTO);
- autoInvoke = ((AutoFunctionChoiceBehavior) functionChoiceBehavior).isAutoInvoke()
- && requestIndex < MAXIMUM_INFLIGHT_AUTO_INVOKES;
- } else if (functionChoiceBehavior instanceof NoneFunctionChoiceBehavior) {
- toolChoice = new ChatCompletionsToolSelection(ChatCompletionsToolSelectionPreset.NONE);
- autoInvoke = false;
- } else {
- throw new SKException(
- "Unsupported function choice behavior: " + functionChoiceBehavior);
- }
-
- // List of functions advertised to the model
- List toolDefinitions = functions.stream()
- .filter(function -> functionChoiceBehavior.isFunctionAllowed(function.getPluginName(),
- function.getName()))
- .map(OpenAIFunction::getFunctionDefinition)
- .map(it -> new ChatCompletionsFunctionToolDefinitionFunction(it.getName())
- .setDescription(it.getDescription())
- .setParameters(it.getParameters()))
- .map(ChatCompletionsFunctionToolDefinition::new)
- .collect(Collectors.toList());
-
- return new OpenAIToolCallConfig(
- toolDefinitions,
- toolChoice,
- autoInvoke,
- functionChoiceBehavior.getOptions());
- }
-
- @Nullable
- private static OpenAIToolCallConfig getToolCallBehaviorConfig(
- @Nullable ToolCallBehavior toolCallBehavior,
- @Nullable List functions,
- List chatRequestMessages,
- int requestIndex) {
-
- if (toolCallBehavior == null) {
- return null;
- }
-
- if (functions == null || functions.isEmpty()) {
- return null;
- }
-
- List toolDefinitions;
- ChatCompletionsToolSelection toolChoice;
-
- // If a specific function is required to be called
- if (toolCallBehavior instanceof ToolCallBehavior.RequiredKernelFunction) {
- KernelFunction> requiredFunction = ((ToolCallBehavior.RequiredKernelFunction) toolCallBehavior)
- .getRequiredFunction();
-
- String toolChoiceName = String.format("%s%s%s",
- requiredFunction.getPluginName(),
- OpenAIFunction.getNameSeparator(),
- requiredFunction.getName());
-
- // If required tool call has already been called dont ask for it again
- boolean hasBeenExecuted = hasToolCallBeenExecuted(chatRequestMessages, toolChoiceName);
- if (hasBeenExecuted) {
- return null;
- }
-
- FunctionDefinition function = OpenAIFunction.toFunctionDefinition(
- requiredFunction.getMetadata(),
- requiredFunction.getPluginName());
-
- toolDefinitions = new ArrayList<>();
- toolDefinitions.add(new ChatCompletionsFunctionToolDefinition(
- new ChatCompletionsFunctionToolDefinitionFunction(function.getName())
- .setDescription(function.getDescription())
- .setParameters(function.getParameters())));
-
- try {
- String json = String.format(
- "{\"type\":\"function\",\"function\":{\"name\":\"%s\"}}", toolChoiceName);
-
- toolChoice = new ChatCompletionsToolSelection(
- ChatCompletionsNamedToolSelection.fromJson(
- DefaultJsonReader.fromString(
- json,
- new JsonOptions())));
- } catch (JsonProcessingException e) {
- throw SKException.build("Failed to parse tool choice", e);
- } catch (IOException e) {
- throw new SKException(e);
- }
- }
- // If a set of functions are enabled to be called
- else {
- toolChoice = new ChatCompletionsToolSelection(ChatCompletionsToolSelectionPreset.AUTO);
-
- ToolCallBehavior.AllowedKernelFunctions enabledKernelFunctions = (ToolCallBehavior.AllowedKernelFunctions) toolCallBehavior;
- toolDefinitions = functions.stream()
- .filter(function -> {
- // check if all kernel functions are enabled
- if (enabledKernelFunctions.isAllKernelFunctionsAllowed()) {
- return true;
- }
- // otherwise, check for the specific function
- return enabledKernelFunctions.isFunctionAllowed(function.getPluginName(),
- function.getName());
- })
- .map(OpenAIFunction::getFunctionDefinition)
- .map(it -> new ChatCompletionsFunctionToolDefinitionFunction(it.getName())
- .setDescription(it.getDescription())
- .setParameters(it.getParameters()))
- .map(ChatCompletionsFunctionToolDefinition::new)
- .collect(Collectors.toList());
-
- if (toolDefinitions.isEmpty()) {
- return null;
- }
- }
-
- return new OpenAIToolCallConfig(
- toolDefinitions,
- toolChoice,
- toolCallBehavior.isAutoInvokeAllowed()
- && requestIndex < Math.min(MAXIMUM_INFLIGHT_AUTO_INVOKES,
- toolCallBehavior.getMaximumAutoInvokeAttempts()),
- null);
- }
-
- private static boolean hasToolCallBeenExecuted(List chatRequestMessages,
- String toolChoiceName) {
- return chatRequestMessages
- .stream()
- .flatMap(message -> {
- // Extract tool calls
- if (message instanceof ChatRequestAssistantMessage) {
- return ((ChatRequestAssistantMessage) message).getToolCalls().stream();
- }
- return Stream.empty();
- })
- .filter(toolCall -> {
- // Filter if tool call has correct name
- if (toolCall instanceof ChatCompletionsFunctionToolCall) {
- return ((ChatCompletionsFunctionToolCall) toolCall).getFunction().getName()
- .equals(toolChoiceName);
- }
- return false;
- })
- .allMatch(toolcall -> {
- String id = toolcall.getId();
- // True if tool call id has a response message
- return chatRequestMessages
- .stream()
- .filter(
- chatRequestMessage -> chatRequestMessage instanceof ChatRequestToolMessage)
- .anyMatch(
- chatRequestMessage -> ((ChatRequestToolMessage) chatRequestMessage)
- .getToolCallId()
- .equals(id));
- });
- }
-
- private static List getChatRequestMessages(
- List extends ChatMessageContent>> messages) {
- if (messages == null || messages.isEmpty()) {
- return new ArrayList<>();
- }
- return messages.stream()
- .map(OpenAIChatCompletion::getChatRequestMessage)
- .collect(Collectors.toList());
- }
-
- private static List getChatRequestMessages(ChatHistory chatHistory) {
- return getChatRequestMessages(chatHistory.getMessages());
- }
-
- private static ChatRequestMessage getChatRequestMessage(
- ChatMessageContent> message) {
-
- AuthorRole authorRole = message.getAuthorRole();
- String content = message.getContent();
-
- if (message.getContentType() == ChatMessageContentType.IMAGE_URL && content != null) {
- return formImageMessage(message, content);
- }
-
- switch (authorRole) {
- case ASSISTANT:
- return formAssistantMessage(message, content);
- case SYSTEM:
- return new ChatRequestSystemMessage(content);
- case USER:
- return new ChatRequestUserMessage(content);
- case TOOL:
- String id = null;
-
- if (message.getMetadata() != null) {
- id = message.getMetadata().getId();
- }
-
- if (id == null) {
- throw new SKException(
- "Require to create a tool call message, but no tool call id is available");
- }
- return new ChatRequestToolMessage(content, id);
- default:
- LOGGER.debug("Unexpected author role: {}", authorRole);
- throw new SKException("Unexpected author role: " + authorRole);
- }
- }
-
- private static ChatRequestUserMessage formImageMessage(ChatMessageContent> message,
- String content) {
- ChatMessageImageUrl imageUrl = new ChatMessageImageUrl(content);
- if (message instanceof ChatMessageImageContent) {
- ChatMessageImageDetailLevel detail = ChatMessageImageDetailLevel.fromString(
- ((ChatMessageImageContent>) message).getDetail().toString());
- imageUrl.setDetail(detail);
- }
-
- return new ChatRequestUserMessage(
- Collections.singletonList(new ChatMessageImageContentItem(imageUrl)));
- }
-
- private static ChatRequestAssistantMessage formAssistantMessage(
- ChatMessageContent> message,
- @Nullable String content) {
- // TODO: handle tools other than function calls
- ChatRequestAssistantMessage asstMessage = new ChatRequestAssistantMessage(content);
-
- List toolCalls = FunctionCallContent.getFunctionCalls(message);
-
- if (toolCalls != null) {
- asstMessage.setToolCalls(
- toolCalls.stream()
- .map(toolCall -> {
- KernelArguments arguments = toolCall.getArguments();
-
- String args = arguments != null && !arguments.isEmpty()
- ? arguments.entrySet().stream()
- .map(entry -> String.format("\"%s\": \"%s\"",
- StringEscapeUtils.escapeJson(entry.getKey()),
- StringEscapeUtils.escapeJson(
- entry.getValue().toPromptString())))
- .collect(Collectors.joining(",", "{", "}"))
- : "{}";
-
- String prefix = "";
- if (toolCall.getPluginName() != null) {
- prefix = toolCall.getPluginName() + OpenAIFunction.getNameSeparator();
- }
- String name = prefix + toolCall.getFunctionName();
-
- FunctionCall fnCall = new FunctionCall(name, args);
- return new ChatCompletionsFunctionToolCall(toolCall.getId(),
- fnCall);
- })
- .collect(Collectors.toList()));
- }
- return asstMessage;
- }
-
- static ChatRequestMessage getChatRequestMessage(
- AuthorRole authorRole,
- String content) {
-
- switch (authorRole) {
- case ASSISTANT:
- return new ChatRequestAssistantMessage(content);
- case SYSTEM:
- return new ChatRequestSystemMessage(content);
- case USER:
- return new ChatRequestUserMessage(content);
- case TOOL:
- return new ChatRequestToolMessage(content, null);
- default:
- LOGGER.debug("Unexpected author role: " + authorRole);
- throw new SKException("Unexpected author role: " + authorRole);
- }
-
- }
-
- /**
- * Builder for creating a new instance of {@link OpenAIChatCompletion}.
- */
- public static class Builder
- extends OpenAiServiceBuilder {
-
- @Override
- public OpenAIChatCompletion build() {
-
- if (this.client == null) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "OpenAI client must be provided");
- }
-
- if (this.modelId == null || modelId.isEmpty()) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "OpenAI model id must be provided");
- }
-
- if (deploymentName == null) {
- LOGGER.debug("Deployment name is not provided, using model id as deployment name");
- deploymentName = modelId;
- }
-
- return new OpenAIChatCompletion(client, deploymentName, modelId, serviceId);
- }
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatMessageContent.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatMessageContent.java
deleted file mode 100644
index ed1e28329..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatMessageContent.java
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.microsoft.semantickernel.contents.FunctionCallContent;
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.services.KernelContent;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.ChatMessageContent;
-import java.nio.charset.Charset;
-import java.util.Collections;
-import java.util.List;
-import java.util.stream.Collectors;
-import javax.annotation.Nullable;
-
-/**
- * Represents the content of a chat message.
- *
- * @param The type of the inner content.
- */
-public class OpenAIChatMessageContent extends ChatMessageContent {
-
- @Deprecated
- @Nullable
- private final List toolCall;
-
- /**
- * Creates a new instance of the {@link OpenAIChatMessageContent} class.
- *
- * @param authorRole The author role that generated the content.
- * @param content The content.
- * @param modelId The model id.
- * @param innerContent The inner content.
- * @param encoding The encoding.
- * @param metadata The metadata.
- * @param functionCalls The tool call.
- */
- public OpenAIChatMessageContent(
- AuthorRole authorRole,
- String content,
- @Nullable String modelId,
- @Nullable T innerContent,
- @Nullable Charset encoding,
- @Nullable FunctionResultMetadata> metadata,
- @Nullable List extends FunctionCallContent> functionCalls) {
- super(authorRole, content, (List extends KernelContent>) functionCalls, modelId,
- innerContent, encoding, metadata);
-
- if (functionCalls == null) {
- this.toolCall = null;
- } else {
- // Keep OpenAIFunctionToolCall list for legacy
- this.toolCall = Collections.unmodifiableList(functionCalls.stream().map(t -> {
- if (t instanceof OpenAIFunctionToolCall) {
- return (OpenAIFunctionToolCall) t;
- } else {
- return new OpenAIFunctionToolCall(
- t.getId(),
- t.getPluginName(),
- t.getFunctionName(),
- t.getArguments());
- }
- }).collect(Collectors.toList()));
- }
- }
-
- /**
- * Gets any tool calls requested.
- *
- * @return The tool call.
- *
- * @deprecated Use {@link FunctionCallContent#getFunctionCalls(ChatMessageContent)} instead.
- */
- @Deprecated
- @Nullable
- public List getToolCall() {
- return toolCall;
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatResponse.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatResponse.java
deleted file mode 100644
index 6f06af4e0..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIChatResponse.java
+++ /dev/null
@@ -1,156 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.List;
-
-/**
- * Represents the response from the OpenAI chat completion API.
- */
-public interface OpenAIChatResponse {
-
- /**
- * Represents the usage of the chat completion API.
- */
- interface Usage {
-
- /**
- * Gets the number of tokens used for the prompt.
- *
- * @return the number of tokens used for the prompt
- */
- @JsonProperty("prompt_tokens")
- int getPromptTokens();
-
- /**
- * Gets the number of tokens used for the completion.
- *
- * @return the number of tokens used for the completion
- */
- @JsonProperty("completion_tokens")
- int getCompletionTokens();
-
- /**
- * Gets the total number of tokens used.
- *
- * @return the total number of tokens used
- */
- @JsonProperty("total_tokens")
- int getTotalTokens();
- }
-
- /**
- * Represents a choice in the chat completion response.
- */
- interface Choice {
-
- /**
- * Gets the message in the chat completion response.
- *
- * @return the message in the chat completion response
- */
- @JsonProperty("message")
- Message getMessage();
-
- /**
- * Gets the finish details in the chat completion response.
- *
- * @return the finish details in the chat completion response
- */
- @JsonProperty("finish_details")
- FinishDetails getFinishDetails();
-
- /**
- * Gets the index of the choice.
- *
- * @return the index of the choice
- */
- @JsonProperty("index")
- int getIndex();
- }
-
- /**
- * Represents a message in the chat completion response.
- */
- interface Message {
-
- /**
- * Gets the role of the message.
- *
- * @return the role of the message
- */
- @JsonProperty("role")
- String getRole();
-
- /**
- * Gets the content of the message.
- *
- * @return the content of the message
- */
- @JsonProperty("content")
- String getContent();
- }
-
- /**
- * Represents the finish details in the chat completion response.
- */
- interface FinishDetails {
-
- /**
- * Gets the type of the finish details.
- *
- * @return the type of the finish details
- */
- @JsonProperty("type")
- String getType();
- }
-
- /**
- * Gets the id of the chat completion response.
- *
- * @return the id of the chat completion response
- */
- @JsonProperty("id")
- String getId();
-
- /**
- * Gets the object of the chat completion response.
- *
- * @return the object of the chat completion response
- */
- @JsonProperty("object")
- String getObject();
-
- /**
- * Gets the created time of the chat completion response.
- *
- * @return the created time of the chat completion response
- */
- @JsonProperty("created")
- Long getCreated();
-
- /**
- * Gets the model of the chat completion response.
- *
- * @return the model of the chat completion response
- */
- @JsonProperty("model")
- String getModel();
-
- /**
- * Gets the usage of the chat completion response.
- *
- * @return the usage of the chat completion response
- */
- @JsonProperty("Usage")
- List getUsage();
-
- /**
- * Gets the choices of the chat completion response.
- *
- * @return the choices of the chat completion response
- */
- @JsonProperty("choices")
- List getChoices();
-
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIFunction.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIFunction.java
deleted file mode 100644
index cf126d095..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIFunction.java
+++ /dev/null
@@ -1,246 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.azure.ai.openai.models.FunctionDefinition;
-import com.azure.core.util.BinaryData;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.microsoft.semantickernel.exceptions.SKException;
-import com.microsoft.semantickernel.orchestration.responseformat.ResponseSchemaGenerator;
-import com.microsoft.semantickernel.semanticfunctions.InputVariable;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunctionMetadata;
-import org.apache.commons.lang3.StringUtils;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Objects;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.stream.Collectors;
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
-
-class OpenAIFunction {
-
- private final String pluginName;
- private final String name;
- private final FunctionDefinition functionDefinition;
-
- protected OpenAIFunction(
- @Nonnull String name,
- @Nonnull String pluginName,
- @Nonnull FunctionDefinition functionDefinition) {
- this.name = name;
- this.pluginName = pluginName;
- this.functionDefinition = functionDefinition;
- }
-
- public static OpenAIFunction build(KernelFunctionMetadata> metadata, String pluginName) {
- String name = metadata.getName();
- FunctionDefinition functionDefinition = toFunctionDefinition(metadata, pluginName);
- return new OpenAIFunction(name, pluginName, functionDefinition);
- }
-
- public String getName() {
- return name;
- }
-
- public String getPluginName() {
- return pluginName;
- }
-
- public FunctionDefinition getFunctionDefinition() {
- return functionDefinition;
- }
-
- /**
- * Gets the separator used between the plugin name and the function name, if a plugin name is
- * present.
- *
- * @return The separator used between the plugin name and the function name.
- */
- public static String getNameSeparator() {
- return "-";
- }
-
- /**
- * Gets the fully-qualified name of the function.
- *
- * @return The fully-qualified name of the function.
- */
- private static String getFullyQualifiedName(
- @Nullable String pluginName, String functionName) {
- return (pluginName == null || pluginName.isEmpty()) ? functionName
- : pluginName + getNameSeparator() + functionName;
- }
-
- /**
- * Converts a KernelFunctionMetadata representation to the SDK's FunctionDefinition
- * representation.
- *
- * @return A FunctionDefinition containing all the function information.
- */
- public static FunctionDefinition toFunctionDefinition(KernelFunctionMetadata> metadata,
- @Nullable String pluginName) {
- BinaryData resultParameters;
-
- Map properties = new HashMap<>();
- List required = new ArrayList<>();
-
- try {
- ObjectMapper objectMapper = new ObjectMapper();
- for (InputVariable parameter : metadata.getParameters()) {
- String parameterJsonSchema = getSchemaForFunctionParameter(parameter);
-
- properties.put(parameter.getName(), objectMapper.readTree(parameterJsonSchema));
-
- if (parameter.isRequired()) {
- required.add(parameter.getName());
- }
- }
-
- String json = objectMapper
- .writeValueAsString(new OpenAIFunctionParameter("object", required, properties));
- resultParameters = BinaryData.fromObject(objectMapper.readTree(json));
- } catch (JsonProcessingException e) {
- throw new RuntimeException(e);
- }
-
- FunctionDefinition functionDefinition = new FunctionDefinition(
- getFullyQualifiedName(pluginName, metadata.getName()));
- functionDefinition.setDescription(metadata.getDescription());
- functionDefinition.setParameters(resultParameters);
-
- return functionDefinition;
- }
-
- private static class OpenAIFunctionParameter {
-
- @JsonProperty("type")
- private String type;
- @JsonProperty("required")
- private List required;
- @JsonProperty("properties")
- private Map properties;
-
- public OpenAIFunctionParameter(
- String type,
- List required,
- Map properties) {
- this.type = type;
- this.required = Collections.unmodifiableList(required);
- this.properties = Collections.unmodifiableMap(properties);
- }
-
- @SuppressWarnings("UnusedMethod")
- public String getType() {
- return type;
- }
-
- @SuppressWarnings("UnusedMethod")
- public List getRequired() {
- return required;
- }
-
- @SuppressWarnings("UnusedMethod")
- public Map getProperties() {
- return properties;
- }
- }
-
- private static String getSchemaForFunctionParameter(@Nullable InputVariable parameter) {
- List entries = new ArrayList<>();
-
- String type = "string";
-
- if (parameter != null && parameter.getType() != null) {
- type = getJavaTypeToOpenAiFunctionType(parameter.getType());
- }
-
- entries.add("\"type\":\"" + type + "\"");
-
- // Add description if present
- String description = null;
- if (parameter != null && parameter.getDescription() != null && !parameter.getDescription()
- .isEmpty()) {
- description = parameter.getDescription();
- description = description.replaceAll("\\r?\\n|\\r", "");
- description = description.replace("\"", "\\\"");
- entries.add(String.format("\"description\":\"%s\"", description));
- }
- // If custom type, generate schema
- if ("object".equalsIgnoreCase(type)) {
- return getObjectSchema(parameter.getType(), description);
- }
-
- // Add enum options if parameter is an enum
- if (parameter != null && parameter.getEnumValues() != null && !parameter.getEnumValues()
- .isEmpty()) {
- String enumEntry = parameter
- .getEnumValues()
- .stream()
- .map(Object::toString)
- .map(it -> "\"" + it + "\"")
- .collect(Collectors.joining(","));
-
- entries.add("\"enum\":[ " + enumEntry + " ]");
- }
-
- String schema = String.join(",", entries);
-
- return "{" + schema + "}";
- }
-
- private static String getJavaTypeToOpenAiFunctionType(String javaType) {
- switch (javaType.toLowerCase(Locale.ROOT)) {
- case "java.lang.boolean":
- case "boolean":
- return "boolean";
- case "java.lang.integer":
- case "integer":
- case "int":
- case "java.lang.long":
- case "long":
- case "java.lang.short":
- case "short":
- case "java.lang.byte":
- case "byte":
- return "integer";
- case "java.lang.double":
- case "double":
- case "java.lang.float":
- case "float":
- return "number";
- case "java.lang.string":
- case "string":
- return "string";
- case "array":
- return "array";
- case "java.lang.void":
- case "void":
- return "null";
- default:
- return "object";
- }
- }
-
- private static String getObjectSchema(String type, String description) {
- String schema = "{ \"type\" : \"object\" }";
- try {
- Class> clazz = Class.forName(type);
- schema = ResponseSchemaGenerator.jacksonGenerator().generateSchema(clazz);
-
- } catch (ClassNotFoundException | SKException ignored) {
-
- }
- Map properties = BinaryData.fromString(schema).toObject(Map.class);
- if (StringUtils.isNotBlank(description)) {
- properties.put("description", description);
- }
- return BinaryData.fromObject(properties).toString();
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIFunctionToolCall.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIFunctionToolCall.java
deleted file mode 100644
index c1def3379..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIFunctionToolCall.java
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.microsoft.semantickernel.contents.FunctionCallContent;
-import com.microsoft.semantickernel.semanticfunctions.KernelArguments;
-import javax.annotation.Nullable;
-
-/**
- * Represents a call to a function in the OpenAI tool.
- *
- * @deprecated Use {@link FunctionCallContent} instead.
- */
-@Deprecated
-public class OpenAIFunctionToolCall extends FunctionCallContent {
-
- /**
- * Creates a new instance of the {@link OpenAIFunctionToolCall} class.
- *
- * @param id The ID of the tool call.
- * @param pluginName The name of the plugin with which this function is associated, if any.
- * @param functionName The name of the function.
- * @param arguments A name/value collection of the arguments to the function, if any.
- */
- public OpenAIFunctionToolCall(
- @Nullable String id,
- @Nullable String pluginName,
- String functionName,
- @Nullable KernelArguments arguments) {
- super(functionName, pluginName, id, arguments);
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIStreamingChatMessageContent.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIStreamingChatMessageContent.java
deleted file mode 100644
index c919f5c6e..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIStreamingChatMessageContent.java
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.StreamingChatContent;
-import java.nio.charset.Charset;
-import java.util.List;
-import javax.annotation.Nullable;
-
-/**
- * Represents the content of a chat message.
- *
- * @param The type of the inner content.
- */
-public class OpenAIStreamingChatMessageContent extends OpenAIChatMessageContent implements
- StreamingChatContent {
-
- private final String id;
-
- /**
- * Creates a new instance of the {@link OpenAIChatMessageContent} class.
- *
- * @param id The id of the message.
- * @param authorRole The author role that generated the content.
- * @param content The content.
- * @param modelId The model id.
- * @param innerContent The inner content.
- * @param encoding The encoding.
- * @param metadata The metadata.
- * @param toolCall The tool call.
- */
- public OpenAIStreamingChatMessageContent(
- String id,
- AuthorRole authorRole,
- String content,
- @Nullable String modelId,
- @Nullable T innerContent,
- @Nullable Charset encoding,
- @Nullable FunctionResultMetadata metadata,
- @Nullable List toolCall) {
- super(
- authorRole,
- content,
- modelId,
- innerContent,
- encoding,
- metadata,
- toolCall);
-
- this.id = id;
- }
-
- @Override
- public String getId() {
- return id;
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIToolCallConfig.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIToolCallConfig.java
deleted file mode 100644
index 454ed3ce1..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAIToolCallConfig.java
+++ /dev/null
@@ -1,75 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.azure.ai.openai.models.ChatCompletionsToolDefinition;
-import com.azure.ai.openai.models.ChatCompletionsToolSelection;
-import com.microsoft.semantickernel.functionchoice.FunctionChoiceBehaviorOptions;
-import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
-
-import javax.annotation.Nullable;
-import java.util.Collections;
-import java.util.List;
-
-public class OpenAIToolCallConfig {
- private final List tools;
- private final ChatCompletionsToolSelection toolChoice;
- private final boolean autoInvoke;
- @Nullable
- private final FunctionChoiceBehaviorOptions options;
-
- /**
- * Creates a new instance of the {@link OpenAIToolCallConfig} class.
- *
- * @param tools The list of tools available for the call.
- * @param toolChoice The tool selection strategy.
- * @param autoInvoke Indicates whether to automatically invoke the tool.
- * @param options Additional options for function choice behavior.
- */
- @SuppressFBWarnings("EI_EXPOSE_REP2")
- public OpenAIToolCallConfig(
- List tools,
- ChatCompletionsToolSelection toolChoice,
- boolean autoInvoke,
- @Nullable FunctionChoiceBehaviorOptions options) {
- this.tools = tools;
- this.toolChoice = toolChoice;
- this.autoInvoke = autoInvoke;
- this.options = options;
- }
-
- /**
- * Gets the list of tools available for the call.
- *
- * @return The list of tools.
- */
- public List getTools() {
- return Collections.unmodifiableList(tools);
- }
-
- /**
- * Gets the tool selection strategy.
- *
- * @return The tool selection strategy.
- */
- public ChatCompletionsToolSelection getToolChoice() {
- return toolChoice;
- }
-
- /**
- * Indicates whether to automatically invoke the tool.
- *
- * @return True if auto-invocation is enabled; otherwise, false.
- */
- public boolean isAutoInvoke() {
- return autoInvoke;
- }
-
- /**
- * Gets additional options for function choice behavior.
- *
- * @return The function choice behavior options.
- */
- public FunctionChoiceBehaviorOptions getOptions() {
- return options;
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAiXMLPromptParser.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAiXMLPromptParser.java
deleted file mode 100644
index 4f5fd99cc..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAiXMLPromptParser.java
+++ /dev/null
@@ -1,163 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.azure.ai.openai.models.ChatRequestAssistantMessage;
-import com.azure.ai.openai.models.ChatRequestFunctionMessage;
-import com.azure.ai.openai.models.ChatRequestMessage;
-import com.azure.ai.openai.models.ChatRequestSystemMessage;
-import com.azure.ai.openai.models.ChatRequestToolMessage;
-import com.azure.ai.openai.models.ChatRequestUserMessage;
-import com.azure.ai.openai.models.FunctionDefinition;
-import com.azure.core.util.BinaryData;
-import com.microsoft.semantickernel.exceptions.SKException;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.implementation.chatcompletion.ChatPromptParseVisitor;
-import com.microsoft.semantickernel.implementation.chatcompletion.ChatXMLPromptParser;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Locale;
-import java.util.UUID;
-import javax.annotation.Nullable;
-import org.apache.commons.text.StringEscapeUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-class OpenAiXMLPromptParser {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(OpenAiXMLPromptParser.class);
-
- private static class OpenAiChatPromptParseVisitor implements
- ChatPromptParseVisitor {
-
- @Nullable
- private ParsedPrompt parsedRaw = null;
- private final List functionDefinitions = new ArrayList<>();
- private final List messages = new ArrayList<>();
-
- @Override
- public ChatPromptParseVisitor addMessage(String role,
- String content) {
- messages.add(getChatRequestMessage(role, content));
- return this;
- }
-
- @Override
- public ChatPromptParseVisitor addFunction(
- String name,
- @Nullable String description,
- @Nullable BinaryData parameters) {
- FunctionDefinition function = new FunctionDefinition(name);
-
- if (description != null) {
- function.setDescription(description);
- }
-
- if (parameters != null) {
- function.setParameters(parameters);
- }
-
- functionDefinitions.add(function);
-
- return this;
- }
-
- @Override
- public boolean areMessagesEmpty() {
- return messages.isEmpty();
- }
-
- @Override
- public ChatPromptParseVisitor fromRawPrompt(String rawPrompt) {
- ChatRequestUserMessage message = new ChatRequestUserMessage(rawPrompt);
-
- if (message.getName() == null) {
- message.setName(UUID.randomUUID().toString());
- }
-
- this.parsedRaw = new ParsedPrompt(Collections.singletonList(message), null);
- return this;
- }
-
- @Override
- public ParsedPrompt get() {
- if (parsedRaw != null) {
- return parsedRaw;
- }
-
- return new ParsedPrompt(messages, functionDefinitions);
- }
-
- @Override
- public ChatPromptParseVisitor reset() {
- return new OpenAiChatPromptParseVisitor();
- }
- }
-
- public static ParsedPrompt parse(String rawPrompt) {
- ChatPromptParseVisitor visitor = ChatXMLPromptParser.parse(rawPrompt,
- new OpenAiChatPromptParseVisitor());
-
- return visitor.get();
-
- }
-
- private static ChatRequestMessage getChatRequestMessage(
- String role,
- String content) {
- try {
- AuthorRole authorRole = AuthorRole.valueOf(role.toUpperCase(Locale.ROOT));
- return OpenAIChatCompletion.getChatRequestMessage(authorRole, content);
- } catch (IllegalArgumentException e) {
- LOGGER.debug("Unknown author role: " + role);
- throw new SKException("Unknown author role: " + role);
- }
- }
-
- public static ChatRequestMessage unescapeRequest(ChatRequestMessage message) {
- if (message instanceof ChatRequestUserMessage) {
- ChatRequestUserMessage chatRequestMessage = (ChatRequestUserMessage) message;
- String content = StringEscapeUtils.unescapeXml(
- BinaryDataUtils.toString(chatRequestMessage.getContent()));
-
- return new ChatRequestUserMessage(content)
- .setName(chatRequestMessage.getName());
- } else if (message instanceof ChatRequestSystemMessage) {
- ChatRequestSystemMessage chatRequestMessage = (ChatRequestSystemMessage) message;
- String content = StringEscapeUtils
- .unescapeXml(
- BinaryDataUtils.toString(chatRequestMessage.getContent()));
-
- return new ChatRequestSystemMessage(content)
- .setName(chatRequestMessage.getName());
- } else if (message instanceof ChatRequestAssistantMessage) {
- ChatRequestAssistantMessage chatRequestMessage = (ChatRequestAssistantMessage) message;
- String content = StringEscapeUtils
- .unescapeXml(
- BinaryDataUtils.toString(chatRequestMessage.getContent()));
-
- return new ChatRequestAssistantMessage(content)
- .setToolCalls(chatRequestMessage.getToolCalls())
- .setFunctionCall(chatRequestMessage.getFunctionCall())
- .setName(chatRequestMessage.getName());
- } else if (message instanceof ChatRequestFunctionMessage) {
- ChatRequestFunctionMessage chatRequestMessage = (ChatRequestFunctionMessage) message;
- String content = StringEscapeUtils.unescapeXml(chatRequestMessage.getContent());
-
- return new ChatRequestFunctionMessage(
- chatRequestMessage.getName(),
- content);
- } else if (message instanceof ChatRequestToolMessage) {
- ChatRequestToolMessage chatRequestMessage = (ChatRequestToolMessage) message;
- String content = StringEscapeUtils
- .unescapeXml(
- BinaryDataUtils.toString(chatRequestMessage.getContent()));
-
- return new ChatRequestToolMessage(
- content,
- chatRequestMessage.getToolCallId());
- }
-
- throw new SKException("Unknown message type: " + message.getClass().getSimpleName());
- }
-}
\ No newline at end of file
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/ParsedPrompt.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/ParsedPrompt.java
deleted file mode 100644
index 7f49573ee..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/ParsedPrompt.java
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.azure.ai.openai.models.ChatRequestMessage;
-import com.azure.ai.openai.models.FunctionDefinition;
-import java.util.ArrayList;
-import java.util.List;
-import javax.annotation.Nullable;
-
-class ParsedPrompt {
-
- private final List chatRequestMessages;
- private final List functions;
-
- protected ParsedPrompt(List parsedMessages,
- @Nullable List parsedFunctions) {
- this.chatRequestMessages = parsedMessages;
- if (parsedFunctions == null) {
- parsedFunctions = new ArrayList<>();
- }
- this.functions = parsedFunctions;
- }
-
- public List getChatRequestMessages() {
- return chatRequestMessages;
- }
-
- public List getFunctions() {
- return functions;
- }
-}
\ No newline at end of file
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/responseformat/ChatCompletionsJsonSchemaResponseFormat.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/responseformat/ChatCompletionsJsonSchemaResponseFormat.java
deleted file mode 100644
index f5a3b1c4f..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/responseformat/ChatCompletionsJsonSchemaResponseFormat.java
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion.responseformat;
-
-import com.azure.ai.openai.models.ChatCompletionsResponseFormat;
-import com.azure.json.JsonWriter;
-import com.microsoft.semantickernel.orchestration.responseformat.JsonResponseSchema;
-import java.io.IOException;
-
-/**
- * Represents a response format for chat completions that uses a JSON schema.
- */
-public class ChatCompletionsJsonSchemaResponseFormat extends ChatCompletionsResponseFormat {
-
- private final JsonResponseSchema schema;
- private String type = "json_schema";
-
- /**
- * Creates a new instance of the {@link ChatCompletionsJsonSchemaResponseFormat} class.
- *
- * @param schema The JSON schema.
- */
- public ChatCompletionsJsonSchemaResponseFormat(JsonResponseSchema schema) {
- this.schema = schema;
- }
-
- @Override
- public String getType() {
- return this.type;
- }
-
- @Override
- public JsonWriter toJson(JsonWriter jsonWriter) throws IOException {
- jsonWriter.writeStartObject();
- jsonWriter.writeStringField("type", this.type);
- jsonWriter.writeStartObject("json_schema");
-
- jsonWriter.writeBooleanField("strict", this.schema.isStrict());
- jsonWriter.writeStringField("name", this.schema.getName());
-
- jsonWriter.writeRawField("schema", this.schema.getSchema());
- jsonWriter.writeEndObject();
- return jsonWriter.writeEndObject();
- }
-
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/responseformat/JacksonResponseFormatGenerator.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/responseformat/JacksonResponseFormatGenerator.java
deleted file mode 100644
index 97060cab0..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/responseformat/JacksonResponseFormatGenerator.java
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion.responseformat;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.node.ArrayNode;
-import com.fasterxml.jackson.databind.node.ContainerNode;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.github.victools.jsonschema.generator.OptionPreset;
-import com.github.victools.jsonschema.generator.SchemaGenerator;
-import com.github.victools.jsonschema.generator.SchemaGeneratorConfigBuilder;
-import com.github.victools.jsonschema.generator.SchemaVersion;
-import com.github.victools.jsonschema.module.jackson.JacksonModule;
-import com.microsoft.semantickernel.orchestration.responseformat.ResponseSchemaGenerator;
-
-/**
- * Represents a response format generator that uses Jackson.
- */
-public class JacksonResponseFormatGenerator implements ResponseSchemaGenerator {
-
- private final SchemaGenerator generator;
-
- /**
- * Creates a new instance of the {@link JacksonResponseFormatGenerator} class.
- */
- public JacksonResponseFormatGenerator() {
- JacksonModule module = new JacksonModule();
- SchemaGeneratorConfigBuilder builder = new SchemaGeneratorConfigBuilder(
- SchemaVersion.DRAFT_2020_12, OptionPreset.PLAIN_JSON)
- .with(module);
-
- builder
- .forFields()
- .withRequiredCheck(fieldScope -> {
- return true;
- });
-
- generator = new SchemaGenerator(builder.build());
- }
-
- /**
- * Creates a new instance of the {@link JacksonResponseFormatGenerator} class.
- *
- * @param generator The schema generator.
- */
- public JacksonResponseFormatGenerator(SchemaGenerator generator) {
- this.generator = generator;
- }
-
- @Override
- public String generateSchema(Class> clazz) {
- ObjectNode schema = generator.generateSchema(clazz);
-
- sanitize(schema);
-
- return schema.toPrettyString();
- }
-
- private static void sanitize(ContainerNode schema) {
- if (schema instanceof ObjectNode) {
- ((ObjectNode) schema).remove("$schema");
-
- if (schema.has("type") && schema.get("type").asText().equals("object")) {
- ((ObjectNode) schema).put("additionalProperties", false);
- }
-
- for (JsonNode node : (ObjectNode) schema) {
- if (node instanceof ContainerNode) {
- sanitize((ContainerNode) node);
- }
- }
- } else if (schema instanceof ArrayNode) {
- for (JsonNode node : (ArrayNode) schema) {
- if (node instanceof ContainerNode) {
- sanitize((ContainerNode) node);
- }
- }
- }
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/implementation/OpenAIRequestSettings.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/implementation/OpenAIRequestSettings.java
deleted file mode 100644
index 35d0d3f3e..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/implementation/OpenAIRequestSettings.java
+++ /dev/null
@@ -1,81 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.implementation;
-
-import com.azure.core.http.HttpHeaderName;
-import com.azure.core.http.policy.UserAgentPolicy;
-import com.azure.core.http.rest.RequestOptions;
-import com.azure.core.util.Context;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-import org.slf4j.Logger;
-
-/**
- * Provides Http request settings for OpenAI requests.
- */
-public final class OpenAIRequestSettings {
-
- private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(
- OpenAIRequestSettings.class);
-
- private static final String SEMANTIC_KERNEL_VERSION_PROPERTY_NAME = "semantic-kernel.version";
- private static final String SEMANTIC_KERNEL_VERSION_PROPERTIES_FILE = "semantic-kernel-version.properties";
-
- private static final String useragent;
- private static final String header;
-
- public static final String SEMANTIC_KERNEL_DISABLE_USERAGENT_PROPERTY = "semantic-kernel.useragent-disable";
-
- private static final boolean disabled;
-
- static {
- disabled = isDisabled();
- String version = loadVersion();
- useragent = "semantic-kernel-java/" + version;
- header = "java/" + version;
- }
-
- private static boolean isDisabled() {
- return Boolean.parseBoolean(
- System.getProperty(SEMANTIC_KERNEL_DISABLE_USERAGENT_PROPERTY, "false"));
- }
-
- private static String loadVersion() {
-
- String version = "unknown";
-
- try (InputStream settingsFile = OpenAIRequestSettings.class.getResourceAsStream(
- SEMANTIC_KERNEL_VERSION_PROPERTIES_FILE)) {
-
- Properties props = new Properties();
- props.load(settingsFile);
- if (props.containsKey(SEMANTIC_KERNEL_VERSION_PROPERTY_NAME)) {
- String skVersion = props.getProperty(SEMANTIC_KERNEL_VERSION_PROPERTY_NAME);
- if (skVersion != null && !skVersion.isEmpty()) {
- return skVersion;
- }
- }
- } catch (IOException e) {
- //Ignore
- LOGGER.trace("Failed to load Semantic Kernel version from properties file", e);
- }
- return version;
- }
-
- /**
- * Get the HTTP request options for OpenAI requests.
- *
- * @return The request options
- */
- public static RequestOptions getRequestOptions() {
- RequestOptions requestOptions = new RequestOptions();
-
- if (disabled) {
- return requestOptions;
- }
-
- return requestOptions
- .setHeader(HttpHeaderName.fromString("Semantic-Kernel-Version"), header)
- .setContext(new Context(UserAgentPolicy.APPEND_USER_AGENT_CONTEXT_KEY, useragent));
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textcompletion/OpenAIStreamingTextContent.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textcompletion/OpenAIStreamingTextContent.java
deleted file mode 100644
index 13272fb50..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textcompletion/OpenAIStreamingTextContent.java
+++ /dev/null
@@ -1,34 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.textcompletion;
-
-import com.microsoft.semantickernel.services.StreamingTextContent;
-import com.microsoft.semantickernel.services.textcompletion.TextContent;
-
-import javax.annotation.Nullable;
-
-/**
- * StreamingTextContent is a wrapper for TextContent that allows for streaming.
- */
-public class OpenAIStreamingTextContent extends StreamingTextContent {
-
- /**
- * Initializes a new instance of the {@code StreamingTextContent} class with a provided text
- * content.
- *
- * @param content The text content.
- */
- public OpenAIStreamingTextContent(TextContent content) {
- super(content, 0, null, null);
- }
-
- @Override
- @Nullable
- public String getContent() {
- TextContent content = getInnerContent();
- if (content == null) {
- return null;
- }
- return content.getContent();
- }
-
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textcompletion/OpenAITextGenerationService.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textcompletion/OpenAITextGenerationService.java
deleted file mode 100644
index ec04c568d..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textcompletion/OpenAITextGenerationService.java
+++ /dev/null
@@ -1,175 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.textcompletion;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.models.CompletionsOptions;
-import com.azure.ai.openai.models.CompletionsUsage;
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.aiservices.openai.OpenAiService;
-import com.microsoft.semantickernel.aiservices.openai.implementation.OpenAIRequestSettings;
-import com.microsoft.semantickernel.exceptions.AIException;
-import com.microsoft.semantickernel.exceptions.AIException.ErrorCodes;
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.services.StreamingTextContent;
-import com.microsoft.semantickernel.services.textcompletion.TextContent;
-import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.stream.Collectors;
-import javax.annotation.Nullable;
-import org.apache.commons.text.StringEscapeUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import reactor.core.publisher.Flux;
-import reactor.core.publisher.Mono;
-
-/**
- * An OpenAI implementation of a {@link TextGenerationService}.
- */
-public class OpenAITextGenerationService extends OpenAiService
- implements TextGenerationService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(OpenAITextGenerationService.class);
-
- /**
- * Creates a new {@link OpenAITextGenerationService}.
- *
- * @param client OpenAI client
- * @param modelId OpenAI model id
- * @param serviceId Service id
- */
- protected OpenAITextGenerationService(
- OpenAIAsyncClient client,
- String modelId,
- @Nullable String serviceId,
- String deploymentName) {
- super(client, serviceId, modelId, deploymentName);
- }
-
- /**
- * Creates a builder for creating a {@link OpenAITextGenerationService}.
- *
- * @return A new {@link OpenAITextGenerationService} builder.
- */
- public static Builder builder() {
- return new Builder();
- }
-
- @Override
- public Mono> getTextContentsAsync(
- String prompt,
- @Nullable PromptExecutionSettings executionSettings,
- @Nullable Kernel kernel) {
- return this.internalCompleteTextAsync(prompt, executionSettings);
- }
-
- @Override
- public Flux getStreamingTextContentsAsync(
- String prompt,
- @Nullable PromptExecutionSettings executionSettings,
- @Nullable Kernel kernel) {
- return this
- .internalCompleteTextAsync(prompt, executionSettings)
- .flatMapMany(it -> Flux.fromStream(it.stream())
- .map(OpenAIStreamingTextContent::new));
- }
-
- protected Mono> internalCompleteTextAsync(
- String text,
- @Nullable PromptExecutionSettings requestSettings) {
-
- CompletionsOptions completionsOptions = getCompletionsOptions(text, requestSettings);
-
- return getClient()
- .getCompletionsWithResponse(getDeploymentName(), completionsOptions,
- OpenAIRequestSettings.getRequestOptions())
- .flatMap(completionsResult -> {
- if (completionsResult.getStatusCode() >= 400) {
- return Mono.error(new AIException(ErrorCodes.SERVICE_ERROR,
- "Request failed: " + completionsResult.getStatusCode()));
- }
- return Mono.just(completionsResult.getValue());
- })
- .map(completions -> {
- FunctionResultMetadata metadata = FunctionResultMetadata.build(
- completions.getId(),
- completions.getUsage(),
- completions.getCreatedAt());
-
- return completions
- .getChoices()
- .stream()
- .map(choice -> {
- return new TextContent(
- choice.getText(),
- completionsOptions.getModel(),
- metadata);
- })
- .collect(Collectors.toList());
- });
- }
-
- private CompletionsOptions getCompletionsOptions(
- String text,
- @Nullable PromptExecutionSettings requestSettings) {
- text = StringEscapeUtils.unescapeXml(text);
-
- if (requestSettings == null) {
- return new CompletionsOptions(Collections.singletonList(text))
- .setMaxTokens(PromptExecutionSettings.DEFAULT_MAX_TOKENS);
- }
- if (requestSettings.getMaxTokens() < 1) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST, "Max tokens must be >0");
- }
- if (requestSettings.getResultsPerPrompt() < 1
- || requestSettings.getResultsPerPrompt() > MAX_RESULTS_PER_PROMPT) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- String.format("Results per prompt must be in range between 1 and %d, inclusive.",
- MAX_RESULTS_PER_PROMPT));
- }
-
- CompletionsOptions options = new CompletionsOptions(Collections.singletonList(text))
- .setMaxTokens(requestSettings.getMaxTokens())
- .setTemperature(requestSettings.getTemperature())
- .setTopP(requestSettings.getTopP())
- .setFrequencyPenalty(requestSettings.getFrequencyPenalty())
- .setPresencePenalty(requestSettings.getPresencePenalty())
- .setModel(getModelId())
- .setN(requestSettings.getResultsPerPrompt())
- .setUser(requestSettings.getUser())
- .setBestOf(requestSettings.getBestOf())
- .setLogitBias(new HashMap<>());
- return options;
- }
-
- /**
- * Builder for a TextGenerationService
- */
- public static class Builder extends TextGenerationService.Builder {
-
- @Override
- public TextGenerationService build() {
-
- if (this.client == null) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "OpenAI client must be provided");
- }
- if (this.modelId == null) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "OpenAI model id must be provided");
- }
- if (deploymentName == null) {
- LOGGER.debug("Deployment name is not provided, using model id as deployment name");
- deploymentName = modelId;
- }
-
- return new OpenAITextGenerationService(
- this.client,
- this.modelId,
- this.serviceId,
- this.deploymentName);
- }
- }
-}
diff --git a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textembedding/OpenAITextEmbeddingGenerationService.java b/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textembedding/OpenAITextEmbeddingGenerationService.java
deleted file mode 100644
index 355f2ab95..000000000
--- a/aiservices/openai/src/main/java/com/microsoft/semantickernel/aiservices/openai/textembedding/OpenAITextEmbeddingGenerationService.java
+++ /dev/null
@@ -1,156 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.textembedding;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.models.EmbeddingItem;
-import com.azure.ai.openai.models.Embeddings;
-import com.azure.ai.openai.models.EmbeddingsOptions;
-import com.microsoft.semantickernel.aiservices.openai.OpenAiService;
-import com.microsoft.semantickernel.exceptions.AIException;
-import com.microsoft.semantickernel.services.openai.OpenAiServiceBuilder;
-import com.microsoft.semantickernel.services.textembedding.Embedding;
-import com.microsoft.semantickernel.services.textembedding.TextEmbeddingGenerationService;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import javax.annotation.Nullable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import reactor.core.publisher.Mono;
-
-/**
- * An OpenAI implementation of a {@link TextEmbeddingGenerationService}.
- */
-public class OpenAITextEmbeddingGenerationService extends OpenAiService
- implements TextEmbeddingGenerationService {
-
- private static final Logger LOGGER = LoggerFactory
- .getLogger(OpenAITextEmbeddingGenerationService.class);
- private final int dimensions;
-
- /**
- * Dimension of the OpenAI
- * {@code text-embedding-3-small} model.
- */
- public static final int EMBEDDING_DIMENSIONS_SMALL = 1536;
- /**
- * Dimension of the OpenAI
- * {@code text-embedding-3-large} model.
- */
- public static final int EMBEDDING_DIMENSIONS_LARGE = 3072;
-
- /**
- * Creates a new {@link OpenAITextEmbeddingGenerationService}.
- *
- * @param client OpenAI client
- * @param deploymentName deployment name
- * @param dimensions The dimensions for the embeddings.
- * @param modelId OpenAI model id
- * @param serviceId Service id
- */
- public OpenAITextEmbeddingGenerationService(
- OpenAIAsyncClient client,
- String deploymentName,
- String modelId,
- @Nullable String serviceId,
- int dimensions) {
- super(client, serviceId, modelId, deploymentName);
- this.dimensions = dimensions;
- }
-
- /**
- * Creates a builder for creating a {@link OpenAITextEmbeddingGenerationService}.
- *
- * @return A new {@link OpenAITextEmbeddingGenerationService} builder.
- */
- public static Builder builder() {
- return new Builder();
- }
-
- /**
- * Generates embeddings for the given data.
- *
- * @param data The data to generate embeddings for.
- * @return A Mono that completes with the embeddings.
- */
- @Override
- public Mono generateEmbeddingAsync(String data) {
- return this.internalGenerateTextEmbeddingsAsync(Arrays.asList(data))
- .flatMap(embeddings -> {
- if (embeddings.isEmpty()) {
- return Mono.empty();
- }
-
- return Mono.just(embeddings.get(0));
- });
- }
-
- /**
- * Generates embeddings for the given data.
- *
- * @param data The data to generate embeddings for.
- * @return A Mono that completes with the embeddings.
- */
- @Override
- public Mono> generateEmbeddingsAsync(List data) {
- return this.internalGenerateTextEmbeddingsAsync(data);
- }
-
- protected Mono> internalGenerateTextEmbeddingsAsync(List data) {
- EmbeddingsOptions options = new EmbeddingsOptions(data)
- .setModel(getModelId())
- .setInputType("string");
- if (dimensions > 0) {
- options.setDimensions(dimensions);
- }
-
- return getClient()
- .getEmbeddings(getModelId(), options)
- .flatMapIterable(Embeddings::getData)
- .mapNotNull(EmbeddingItem::getEmbedding)
- .map(ArrayList::new)
- .mapNotNull(Embedding::new)
- .collectList();
- }
-
- /**
- * A builder for creating a {@link OpenAITextEmbeddingGenerationService}.
- */
- public static class Builder extends
- OpenAiServiceBuilder {
-
- private int dimensions = -1;
-
- /**
- * Sets the dimensions for the embeddings.
- *
- * @param dimensions The dimensions for the embeddings.
- * @return The builder.
- */
- public Builder withDimensions(int dimensions) {
- this.dimensions = dimensions;
- return this;
- }
-
- @Override
- public OpenAITextEmbeddingGenerationService build() {
- if (this.client == null) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "OpenAI client must be provided");
- }
-
- if (this.modelId == null || modelId.isEmpty()) {
- throw new AIException(AIException.ErrorCodes.INVALID_REQUEST,
- "OpenAI model id must be provided");
- }
-
- if (deploymentName == null) {
- LOGGER.debug("Deployment name is not provided, using model id as deployment name");
- deploymentName = modelId;
- }
-
- return new OpenAITextEmbeddingGenerationService(client, deploymentName, modelId,
- serviceId, dimensions);
- }
- }
-}
diff --git a/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.audio.AudioToTextService$Builder b/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.audio.AudioToTextService$Builder
deleted file mode 100644
index 439003dbe..000000000
--- a/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.audio.AudioToTextService$Builder
+++ /dev/null
@@ -1 +0,0 @@
-com.microsoft.semantickernel.aiservices.openai.audio.OpenAiAudioToTextService$Builder
\ No newline at end of file
diff --git a/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.audio.TextToAudioService$Builder b/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.audio.TextToAudioService$Builder
deleted file mode 100644
index 7d494cbb2..000000000
--- a/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.audio.TextToAudioService$Builder
+++ /dev/null
@@ -1 +0,0 @@
-com.microsoft.semantickernel.aiservices.openai.audio.OpenAiTextToAudioService$Builder
\ No newline at end of file
diff --git a/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService$Builder b/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService$Builder
deleted file mode 100644
index ba94ee72f..000000000
--- a/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService$Builder
+++ /dev/null
@@ -1 +0,0 @@
-com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion$Builder
diff --git a/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.textcompletion.TextGenerationService$Builder b/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.textcompletion.TextGenerationService$Builder
deleted file mode 100644
index 6012c79d3..000000000
--- a/aiservices/openai/src/main/resources/META-INF/services/com.microsoft.semantickernel.services.textcompletion.TextGenerationService$Builder
+++ /dev/null
@@ -1 +0,0 @@
-com.microsoft.semantickernel.aiservices.openai.textcompletion.OpenAITextGenerationService$Builder
diff --git a/aiservices/openai/src/main/resources/com/microsoft/semantickernel/aiservices/openai/implementation/semantic-kernel-version.properties b/aiservices/openai/src/main/resources/com/microsoft/semantickernel/aiservices/openai/implementation/semantic-kernel-version.properties
deleted file mode 100644
index 73220f4f8..000000000
--- a/aiservices/openai/src/main/resources/com/microsoft/semantickernel/aiservices/openai/implementation/semantic-kernel-version.properties
+++ /dev/null
@@ -1 +0,0 @@
-semantic-kernel.version=${project.version}
\ No newline at end of file
diff --git a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/OtelCaptureTest.java b/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/OtelCaptureTest.java
deleted file mode 100644
index 5c3444b3a..000000000
--- a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/OtelCaptureTest.java
+++ /dev/null
@@ -1,126 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.models.ChatCompletions;
-import com.azure.ai.openai.models.ChatCompletionsOptions;
-import com.azure.ai.openai.models.Completions;
-import com.azure.ai.openai.models.CompletionsOptions;
-import com.azure.ai.openai.models.CompletionsUsage;
-import com.azure.core.http.rest.Response;
-import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
-import com.microsoft.semantickernel.aiservices.openai.textcompletion.OpenAITextGenerationService;
-import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
-import io.opentelemetry.api.GlobalOpenTelemetry;
-import io.opentelemetry.api.common.AttributeKey;
-import io.opentelemetry.sdk.OpenTelemetrySdk;
-import io.opentelemetry.sdk.common.CompletableResultCode;
-import io.opentelemetry.sdk.trace.SdkTracerProvider;
-import io.opentelemetry.sdk.trace.data.SpanData;
-import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor;
-import io.opentelemetry.sdk.trace.export.SpanExporter;
-import java.util.ArrayList;
-import java.util.Collection;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mockito;
-import reactor.core.publisher.Mono;
-
-public class OtelCaptureTest {
-
- private static OpenTelemetrySdk otel;
- private static ArrayList spans = new ArrayList<>();
-
- @BeforeEach
- public void clearSpans() {
- spans.clear();
- }
-
- @BeforeAll
- public static void setup() {
-
- SdkTracerProvider tracerProvider = SdkTracerProvider.builder()
- .addSpanProcessor(SimpleSpanProcessor.builder(new SpanExporter() {
- @Override
- public CompletableResultCode export(Collection collection) {
- spans.addAll(collection);
- return new CompletableResultCode();
- }
-
- @Override
- public CompletableResultCode flush() {
- return new CompletableResultCode();
- }
-
- @Override
- public CompletableResultCode shutdown() {
- return new CompletableResultCode();
- }
- })
- .build())
- .build();
-
- GlobalOpenTelemetry.resetForTest();
-
- otel = OpenTelemetrySdk.builder()
- .setTracerProvider(tracerProvider)
- .buildAndRegisterGlobal();
- }
-
- @AfterAll
- public static void shutdown() {
- otel.shutdown();
- }
-
- @Test
- public void otelChatCaptureTest() {
- OpenAIAsyncClient openAIAsyncClient = Mockito.mock(OpenAIAsyncClient.class);
-
- CompletionsUsage completionsUsage = Mockito.mock(CompletionsUsage.class);
- Mockito.when(completionsUsage.getCompletionTokens()).thenReturn(21);
- Mockito.when(completionsUsage.getPromptTokens()).thenReturn(42);
-
- ChatCompletions chatCompletions = Mockito.mock(ChatCompletions.class);
- Mockito.when(chatCompletions.getUsage()).thenReturn(completionsUsage);
-
- Response response = Mockito.mock(Response.class);
- Mockito.when(response.getStatusCode()).thenReturn(200);
- Mockito.when(response.getValue()).thenReturn(chatCompletions);
-
- Mockito.when(openAIAsyncClient.getChatCompletionsWithResponse(
- Mockito.any(),
- Mockito.any(),
- Mockito.any())).thenAnswer(invocation -> Mono.just(response));
-
- OpenAIChatCompletion client = OpenAIChatCompletion.builder()
- .withOpenAIAsyncClient(openAIAsyncClient)
- .withModelId("a-model")
- .build();
-
- try {
- client.getChatMessageContentsAsync(
- "foo",
- null,
- null).block();
- } catch (Exception e) {
- // Expect to fail
- }
-
- Assertions.assertFalse(spans.isEmpty());
- Assertions.assertEquals("a-model",
- spans.get(0).getAttributes().get(AttributeKey.stringKey("gen_ai.request.model")));
- Assertions.assertEquals("chat.completions",
- spans.get(0).getAttributes().get(AttributeKey.stringKey("gen_ai.operation.name")));
- Assertions.assertEquals("openai",
- spans.get(0).getAttributes().get(AttributeKey.stringKey("gen_ai.system")));
- Assertions.assertEquals(21,
- spans.get(0).getAttributes()
- .get(AttributeKey.longKey("gen_ai.usage.output_tokens")));
- Assertions.assertEquals(42,
- spans.get(0).getAttributes()
- .get(AttributeKey.longKey("gen_ai.usage.input_tokens")));
- }
-}
diff --git a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/Bar.java b/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/Bar.java
deleted file mode 100644
index 45d1caa78..000000000
--- a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/Bar.java
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class Bar {
-
- private final String bar;
-
- public Bar(
- @JsonProperty("bar") String bar) {
- this.bar = bar;
- }
-
- public String getBar() {
- return bar;
- }
-
-}
diff --git a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/Baz.java b/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/Baz.java
deleted file mode 100644
index 90ad60ac1..000000000
--- a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/Baz.java
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class Baz {
-
- @JsonProperty("bar")
- private final Bar bar;
-
- @JsonCreator
- public Baz(
- @JsonProperty("bar") Bar bar) {
- this.bar = bar;
- }
-
- @JsonProperty("bar")
- public Bar getBar() {
- return bar;
- }
-}
\ No newline at end of file
diff --git a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/Foo.java b/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/Foo.java
deleted file mode 100644
index 34479b221..000000000
--- a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/Foo.java
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class Foo {
-
- @JsonProperty("bar")
- private final T bar;
-
- @JsonCreator
- public Foo(
- @JsonProperty("bar") T bar) {
- this.bar = bar;
- }
-
- @JsonProperty("bar")
- public T getBar() {
- return bar;
- }
-}
\ No newline at end of file
diff --git a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/JsonSchemaTest.java b/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/JsonSchemaTest.java
deleted file mode 100644
index 33870fba2..000000000
--- a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/JsonSchemaTest.java
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.fasterxml.jackson.annotation.JsonPropertyDescription;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.microsoft.semantickernel.orchestration.responseformat.JsonSchemaResponseFormat;
-import com.microsoft.semantickernel.plugin.KernelPlugin;
-import com.microsoft.semantickernel.plugin.KernelPluginFactory;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
-import com.microsoft.semantickernel.semanticfunctions.annotations.DefineKernelFunction;
-import com.microsoft.semantickernel.semanticfunctions.annotations.KernelFunctionParameter;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-import reactor.core.publisher.Mono;
-
-public class JsonSchemaTest {
-
- @Test
- public void jacksonGenerationTest() throws JsonProcessingException {
- JsonSchemaResponseFormat format = JsonSchemaResponseFormat.builder()
- .setResponseFormat(Foo.class)
- .setName("foo")
- .build();
-
- Assertions.assertEquals("foo", format.getJsonSchema().getName());
-
- Assertions.assertTrue(format.getJsonSchema().getSchema()
- .replaceAll("\\r\\n|\\r|\\n", "")
- .replaceAll(" +", "")
- .contains(
- "\"type\":\"object\",\"properties\":{\"bar\":{}}"));
- }
-
- @Test
- public void openAIFunctionTest() {
- KernelPlugin plugin = KernelPluginFactory.createFromObject(
- new TestPlugin(),
- "test");
-
- Assertions.assertNotNull(plugin);
- Assertions.assertEquals(plugin.getName(), "test");
- Assertions.assertEquals(plugin.getFunctions().size(), 3);
-
- KernelFunction> testFunction = plugin.getFunctions()
- .get("asyncPersonFunction");
- OpenAIFunction openAIFunction = OpenAIFunction.build(
- testFunction.getMetadata(),
- plugin.getName());
-
- String parameters = "{\"type\":\"object\",\"required\":[\"person\",\"input\"],\"properties\":{\"input\":{\"type\":\"string\",\"description\":\"input string\"},\"person\":{\"type\":\"object\",\"properties\":{\"age\":{\"type\":\"integer\",\"description\":\"The age of the person.\"},\"name\":{\"type\":\"string\",\"description\":\"The name of the person.\"},\"title\":{\"type\":\"string\",\"enum\":[\"MS\",\"MRS\",\"MR\"],\"description\":\"The title of the person.\"}},\"required\":[\"age\",\"name\",\"title\"],\"additionalProperties\":false,\"description\":\"input person\"}}}";
- Assertions.assertEquals(parameters,
- openAIFunction.getFunctionDefinition().getParameters().toString());
-
- }
-
- public static class TestPlugin {
-
- @DefineKernelFunction
- public String testFunction(
- @KernelFunctionParameter(name = "input", description = "input string") String input) {
- return "test" + input;
- }
-
- @DefineKernelFunction(returnType = "int")
- public Mono asyncTestFunction(
- @KernelFunctionParameter(name = "input") String input) {
- return Mono.just(1);
- }
-
- @DefineKernelFunction(returnType = "int", description = "test function description", name = "asyncPersonFunction", returnDescription = "test return description")
- public Mono asyncPersonFunction(
- @KernelFunctionParameter(name = "person", description = "input person", type = Person.class) Person person,
- @KernelFunctionParameter(name = "input", description = "input string") String input) {
- return Mono.just(1);
- }
- }
-
- private static enum Title {
- MS, MRS, MR
- }
-
- public static class Person {
- @JsonPropertyDescription("The name of the person.")
- private String name;
- @JsonPropertyDescription("The age of the person.")
- private int age;
- @JsonPropertyDescription("The title of the person.")
- private Title title;
-
- public Person(String name, int age) {
- this.name = name;
- this.age = age;
- }
-
- public String getName() {
- return name;
- }
-
- public int getAge() {
- return age;
- }
-
- public Title getTitle() {
- return title;
- }
-
- public void setTitle(Title title) {
- this.title = title;
- }
- }
-
-}
diff --git a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAiChatCompletionTest.java b/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAiChatCompletionTest.java
deleted file mode 100644
index 5a1be82a4..000000000
--- a/aiservices/openai/src/test/java/com/microsoft/semantickernel/aiservices/openai/chatcompletion/OpenAiChatCompletionTest.java
+++ /dev/null
@@ -1,123 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.aiservices.openai.chatcompletion;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.models.ChatCompletions;
-import com.azure.ai.openai.models.ChatCompletionsFunctionToolCall;
-import com.azure.ai.openai.models.ChatCompletionsOptions;
-import com.azure.ai.openai.models.ChatRequestAssistantMessage;
-import com.azure.core.http.HttpHeaders;
-import com.azure.core.http.HttpRequest;
-import com.azure.core.http.rest.Response;
-import com.azure.json.JsonOptions;
-import com.azure.json.implementation.DefaultJsonReader;
-import com.microsoft.semantickernel.implementation.EmbeddedResourceLoader;
-import com.microsoft.semantickernel.orchestration.FunctionResultMetadata;
-import com.microsoft.semantickernel.semanticfunctions.KernelArguments;
-import com.microsoft.semantickernel.services.chatcompletion.AuthorRole;
-import com.microsoft.semantickernel.services.chatcompletion.ChatHistory;
-import java.nio.charset.Charset;
-import java.util.Arrays;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mockito;
-import reactor.core.publisher.Mono;
-
-public class OpenAiChatCompletionTest {
-
- @Test
- public void serializesToolCallsCorrectly() {
- OpenAIAsyncClient client = Mockito.mock(OpenAIAsyncClient.class);
- OpenAIChatCompletion chatCompletion = mockClient(client);
-
- ChatHistory chatHistory = new ChatHistory();
-
- chatHistory.addUserMessage(
- "What is the name of the pet with id ca2fc6bc-1307-4da6-a009-d7bf88dec37b?");
-
- chatHistory.addMessage(new OpenAIChatMessageContent(
- AuthorRole.ASSISTANT,
- "",
- "test",
- null,
- Charset.defaultCharset(),
- null,
- Arrays.asList(
- new OpenAIFunctionToolCall(
- "a-tool-id",
- "pluginName",
- "funcName",
- KernelArguments.builder()
- .withVariable("id", "ca2fc6bc-1307-4da6-a009-d7bf88dec37b")
- .build()))));
- chatHistory.addMessage(new OpenAIChatMessageContent(
- AuthorRole.TOOL,
- "Snuggles",
- "test",
- null,
- Charset.defaultCharset(),
- FunctionResultMetadata.build("a-tool-id"),
- null));
-
- chatCompletion
- .getChatMessageContentsAsync(chatHistory, null, null).block();
-
- Mockito.verify(client, Mockito.times(1))
- .getChatCompletionsWithResponse(
- Mockito.any(),
- Mockito.argThat(options -> {
- ChatRequestAssistantMessage message = ((ChatRequestAssistantMessage) options
- .getMessages()
- .get(1));
- ChatCompletionsFunctionToolCall toolcall = ((ChatCompletionsFunctionToolCall) message
- .getToolCalls()
- .get(0));
- return toolcall
- .getFunction()
- .getArguments()
- .equals("{\"id\": \"ca2fc6bc-1307-4da6-a009-d7bf88dec37b\"}");
- }),
- Mockito.any());
- }
-
- private static OpenAIChatCompletion mockClient(OpenAIAsyncClient client) {
- Mockito.when(client.getChatCompletionsWithResponse(Mockito.any(),
- Mockito.any(), Mockito.any()))
- .thenReturn(Mono.just(
- new Response() {
- @Override
- public int getStatusCode() {
- return 200;
- }
-
- @Override
- public HttpHeaders getHeaders() {
- return new HttpHeaders();
- }
-
- @Override
- public HttpRequest getRequest() {
- return null;
- }
-
- @Override
- public ChatCompletions getValue() {
- try {
- String message = EmbeddedResourceLoader.readFile("chatCompletion.txt",
- OpenAiChatCompletionTest.class);
-
- return ChatCompletions.fromJson(
- DefaultJsonReader.fromString(
- String.format(message, "Snuggles"), new JsonOptions()));
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
- }));
- return new OpenAIChatCompletion(
- client,
- "test",
- "test",
- "test");
- }
-
-}
diff --git a/aiservices/openai/src/test/resources/com/microsoft/semantickernel/aiservices/openai/chatcompletion/chatCompletion.txt b/aiservices/openai/src/test/resources/com/microsoft/semantickernel/aiservices/openai/chatcompletion/chatCompletion.txt
deleted file mode 100644
index 799c8ab48..000000000
--- a/aiservices/openai/src/test/resources/com/microsoft/semantickernel/aiservices/openai/chatcompletion/chatCompletion.txt
+++ /dev/null
@@ -1,60 +0,0 @@
-{
- "choices" : [
- {
- "content_filter_results" : {
- "hate" : {
- "filtered" : false,
- "severity" : "safe"
- },
- "self_harm" : {
- "filtered" : false,
- "severity" : "safe"
- },
- "sexual" : {
- "filtered" : false,
- "severity" : "safe"
- },
- "violence" : {
- "filtered" : false,
- "severity" : "safe"
- }
- },
- "finish_reason" : "stop",
- "index" : 0,
- "message" : {
- "content" : "%s",
- "role" : "assistant"
- }
- }
- ],
- "created" : 1707253039,
- "id" : "chatcmpl-xxx",
- "prompt_filter_results" : [
- {
- "content_filter_results" : {
- "hate" : {
- "filtered" : false,
- "severity" : "safe"
- },
- "self_harm" : {
- "filtered" : false,
- "severity" : "safe"
- },
- "sexual" : {
- "filtered" : false,
- "severity" : "safe"
- },
- "violence" : {
- "filtered" : false,
- "severity" : "safe"
- }
- },
- "prompt_index" : 0
- }
- ],
- "usage" : {
- "completion_tokens" : 131,
- "prompt_tokens" : 26,
- "total_tokens" : 157
- }
-}
diff --git a/api-test/integration-tests/pom.xml b/api-test/integration-tests/pom.xml
deleted file mode 100644
index 862cc5184..000000000
--- a/api-test/integration-tests/pom.xml
+++ /dev/null
@@ -1,246 +0,0 @@
-
-
-
- 4.0.0
-
-
- com.microsoft.semantic-kernel
- api-test
- 1.4.4-RC3-SNAPSHOT
- ../pom.xml
-
-
- Semantic Kernel Integration Tests
- integration-tests
- jar
-
-
-
- com.microsoft.semantic-kernel
- semantickernel-syntax-examples
- ${project.version}
- test
-
-
- org.apache.logging.log4j
- log4j-api
- test
-
-
- org.apache.logging.log4j
- log4j-core
- test
-
-
- org.apache.logging.log4j
- log4j-slf4j2-impl
- test
-
-
- org.mockito
- mockito-core
- test
-
-
- org.junit.jupiter
- junit-jupiter
- test
-
-
- org.junit.jupiter
- junit-jupiter-api
- test
-
-
- com.microsoft.semantic-kernel
- semantickernel-api
- test
-
-
- com.microsoft.semantic-kernel
- semantickernel-data-jdbc
- test
-
-
- com.microsoft.semantic-kernel
- semantickernel-data-mysql
- test
-
-
- com.microsoft.semantic-kernel
- semantickernel-data-hsqldb
- test
-
-
- com.microsoft.semantic-kernel
- semantickernel-data-sqlite
- test
-
-
- com.microsoft.semantic-kernel
- semantickernel-data-redis
- test
-
-
-
- org.xerial
- sqlite-jdbc
- 3.46.0.0
-
-
- com.mysql
- mysql-connector-j
- 9.0.0
- test
-
-
- org.postgresql
- postgresql
- 42.7.3
-
-
- org.xerial
- sqlite-jdbc
- 3.46.1.0
-
-
-
- org.testcontainers
- junit-jupiter
- test
-
-
- org.testcontainers
- postgresql
- test
-
-
- org.testcontainers
- mysql
- test
-
-
- com.redis
- testcontainers-redis
- 2.2.2
- test
-
-
- org.wiremock
- wiremock
- test
-
-
-
-
- com.github.victools
- jsonschema-generator
- test
-
-
- com.github.victools
- jsonschema-module-jackson
- test
-
-
-
-
- org.hsqldb
- hsqldb
- 2.7.3
- test
-
-
-
-
-
-
- org.testcontainers
- testcontainers-bom
- 1.18.3
- pom
- import
-
-
-
-
-
-
-
- org.codehaus.mojo
- exec-maven-plugin
- 3.1.0
-
-
-
- exec
-
- recordmappings
-
- java
-
- -Djavax.net.ssl.trustStore=scripts/client.truststore
- -Djavax.net.ssl.trustStorePassword=password
-
- com.microsoft.semantickernel.syntaxexamples.WiremockRecord
- test
-
-
-
-
-
-
-
-
-
- run-wiremocks
-
- false
-
-
-
-
- org.codehaus.mojo
- exec-maven-plugin
- 3.1.0
-
-
- validate
-
- exec
-
-
- ./generateCert.sh
- scripts
-
-
-
-
-
- org.apache.maven.plugins
- maven-surefire-plugin
- 3.2.5
-
-
- **/WiremockExamplesIT.java
-
- false
- 1
-
- -Djavax.net.ssl.trustStore=scripts/client.truststore
- -Djavax.net.ssl.trustStorePassword=password
-
-
- foo
- https://localhost:8443/
- ../../
-
-
-
-
-
-
-
-
-
-
diff --git a/api-test/integration-tests/scripts/generateCert.sh b/api-test/integration-tests/scripts/generateCert.sh
deleted file mode 100755
index 300f90d1f..000000000
--- a/api-test/integration-tests/scripts/generateCert.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/bash
-
-KEYTOOL_ARGS="-noprompt -srcstorepass password -deststorepass password -srckeypass password"
-
-SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
-cd $SCRIPT_DIR
-
-if [ -f client.keystore ]; then
- exit
-fi
-
-rm client.keystore client.truststore server.keystore server.truststore || true
-
-openssl genrsa -out diagserverCA.key 2048
-openssl req -x509 -new -nodes -key diagserverCA.key -sha256 -days 1024 -out diagserverCA.pem -subj "/C=US/ST=NA/L=NA/O=NA/OU=NA/CN=localhost"
-openssl pkcs12 -export -name server-cert -password pass:password -in diagserverCA.pem -inkey diagserverCA.key -out serverkeystore.p12
-
-openssl genrsa -out diagclientCA.key 2048
-openssl req -x509 -new -nodes -key diagclientCA.key -sha256 -days 1024 -out diagclientCA.pem -subj "/C=US/ST=NA/L=NA/O=NA/OU=NA/CN=localhost"
-openssl pkcs12 -export -name client-cert -password pass:password -in diagclientCA.pem -inkey diagclientCA.key -out clientkeystore.p12
-
-
-keytool -importkeystore $KEYTOOL_ARGS -destkeystore server.keystore -srckeystore serverkeystore.p12 -srcstoretype pkcs12 -alias server-cert
-keytool -import $KEYTOOL_ARGS -alias client-cert -file diagclientCA.pem -keystore server.truststore
-keytool -import $KEYTOOL_ARGS -alias server-cert -file diagserverCA.pem -keystore server.truststore
-
-
-keytool -importkeystore $KEYTOOL_ARGS -destkeystore client.keystore -srckeystore clientkeystore.p12 -srcstoretype pkcs12 -alias client-cert
-keytool -import $KEYTOOL_ARGS -alias server-cert -file diagserverCA.pem -keystore client.truststore
-keytool -import $KEYTOOL_ARGS -alias client-cert -file diagclientCA.pem -keystore client.truststore
-
-keytool -importkeystore -srckeystore /usr/lib/jvm/default-java/lib/security/cacerts -destkeystore client.truststore -srcstorepass changeit -deststorepass password
-keytool -importkeystore -srckeystore /usr/lib/jvm/default-java/lib/security/cacerts -destkeystore server.truststore -srcstorepass changeit -deststorepass password
-
-rm diagclientCA.key diagserverCA.pem clientkeystore.p12 diagclientCA.pem serverkeystore.p12 diagserverCA.key || true
diff --git a/api-test/integration-tests/scripts/recordMappings.sh b/api-test/integration-tests/scripts/recordMappings.sh
deleted file mode 100755
index 4616e6931..000000000
--- a/api-test/integration-tests/scripts/recordMappings.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-
-# Run this from the api-test/integration-tests directory
-
-SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
-ROOT_DIR="$SCRIPT_DIR/.."
-
-cd $SCRIPT_DIR
-
-if [ ! -f client.truststore ]; then
- ./generateCert.sh
-fi
-
-rm -r "$ROOT_DIR/target/wiremock/mappings" || true
-rm -r "$ROOT_DIR/src/test/resources/wiremock/mappings/" || true
-mkdir -p "$ROOT_DIR/target/wiremock/mappings"
-mkdir -p "$ROOT_DIR/src/test/resources/wiremock/mappings/"
-
-cd $ROOT_DIR
-source "$ROOT_DIR/../../.env.record"
-export AZURE_CLIENT_KEY
-export CLIENT_ENDPOINT
-export PLUGIN_DIR="$ROOT_DIR/../../../"
-
-../../mvnw clean package -DskipTests -Pcompile-jdk17
-
-MAVEN_OPTS="-Djavax.net.ssl.trustStore=scripts/client.truststore -Djavax.net.ssl.trustStorePassword=password" \
-../../mvnw exec:java@recordmappings -Pcompile-jdk17
-
-for f in $ROOT_DIR/target/wiremock/mappings/*.json; do
- cat $f | jq 'del(.response.headers[])' > /tmp/mapping.json
- cat /tmp/mapping.json | json_pp > $f
- mv $f src/test/resources/wiremock/mappings/
-done
diff --git a/api-test/integration-tests/scripts/runWiremocks.sh b/api-test/integration-tests/scripts/runWiremocks.sh
deleted file mode 100755
index 577edf89e..000000000
--- a/api-test/integration-tests/scripts/runWiremocks.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
-ROOT_DIR="$SCRIPT_DIR/.."
-
-cd $SCRIPT_DIR
-
-if [ ! -f client.truststore ]; then
- ./generateCert.sh
-fi
-
-
-cd $ROOT_DIR
-
-../../mvnw clean test -Dtest=WiremockExamplesIT -Prun-wiremocks
\ No newline at end of file
diff --git a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/Example01NativeFunctionsTest.java b/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/Example01NativeFunctionsTest.java
deleted file mode 100644
index 74448cde9..000000000
--- a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/Example01NativeFunctionsTest.java
+++ /dev/null
@@ -1,34 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.tests;
-
-import com.microsoft.semantickernel.semanticfunctions.annotations.DefineKernelFunction;
-import com.microsoft.semantickernel.semanticfunctions.annotations.KernelFunctionParameter;
-
-import java.util.Locale;
-
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-public class Example01NativeFunctionsTest {
-
- private static class TextPlugin {
-
- @DefineKernelFunction(description = "Change all string chars to uppercase.", name = "Uppercase")
- public String uppercase(
- @KernelFunctionParameter(description = "Text to uppercase", name = "input") String text) {
- return text.toUpperCase(Locale.ROOT);
- }
-
- }
-
- @Test
- public void run() {
- // Load native plugin
- TextPlugin text = new TextPlugin();
-
- // Use function without kernel
- String result = text.uppercase("ciao!");
-
- Assertions.assertEquals("CIAO!", result);
- }
-}
diff --git a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/Example03_ArgumentsTest.java b/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/Example03_ArgumentsTest.java
deleted file mode 100644
index dd283952d..000000000
--- a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/Example03_ArgumentsTest.java
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.tests;
-
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.orchestration.FunctionResult;
-import com.microsoft.semantickernel.plugin.KernelPlugin;
-import com.microsoft.semantickernel.plugin.KernelPluginFactory;
-import com.microsoft.semantickernel.samples.syntaxexamples.functions.Example03_Arguments.StaticTextPlugin;
-import com.microsoft.semantickernel.semanticfunctions.KernelArguments;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-/**
- * Demonstrates running a pipeline (a sequence of functions) on a
- * {@code com.microsoft.semantickernel.orchestration.SKContext}
- */
-public class Example03_ArgumentsTest {
-
- @Test
- public void main() {
- Kernel kernel = Kernel.builder().build();
-
- // Load native plugin
- KernelPlugin functionCollection = KernelPluginFactory
- .createFromObject(new StaticTextPlugin(), "text");
-
- KernelArguments arguments = KernelArguments.builder()
- .withInput("Today is: ")
- .withVariable("day", "Monday")
- .build();
-
- FunctionResult resultValue = kernel.invokeAsync(
- functionCollection.get("AppendDay"))
- .withArguments(arguments)
- .block();
-
- Assertions.assertEquals("Today is: Monday", resultValue.getResult());
- }
-}
diff --git a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/Example05_InlineFunctionDefinitionTest.java b/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/Example05_InlineFunctionDefinitionTest.java
deleted file mode 100644
index d791ec9c2..000000000
--- a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/Example05_InlineFunctionDefinitionTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.tests;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.OpenAIClientBuilder;
-import com.github.tomakehurst.wiremock.junit5.WireMockRuntimeInfo;
-import com.github.tomakehurst.wiremock.junit5.WireMockTest;
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
-import com.microsoft.semantickernel.orchestration.FunctionResult;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
-import com.microsoft.semantickernel.semanticfunctions.KernelArguments;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunctionFromPrompt;
-
-import java.time.Instant;
-import java.time.ZoneOffset;
-import java.time.format.DateTimeFormatter;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-@WireMockTest
-public class Example05_InlineFunctionDefinitionTest {
-
- @Test
- public void main(WireMockRuntimeInfo wmRuntimeInfo) {
- final OpenAIAsyncClient client = new OpenAIClientBuilder()
- .endpoint("http://localhost:" + wmRuntimeInfo.getHttpPort())
- .buildAsyncClient();
-
- OpenAIChatCompletion chatCompletion = OpenAIChatCompletion.builder()
- .withOpenAIAsyncClient(client)
- .withModelId("gpt-35-turbo")
- .build();
-
- Kernel kernel = Kernel.builder()
- .withAIService(OpenAIChatCompletion.class, chatCompletion)
- .build();
-
- System.out.println("======== Inline Function Definition ========");
-
- // Function defined using few-shot design pattern
- String promptTemplate = """
- Generate a creative reason or excuse for the given event.
- Be creative and be funny. Let your imagination run wild.
-
- Event: I am running late.
- Excuse: I was being held ransom by giraffe gangsters.
-
- Event: I haven't been to the gym for a year
- Excuse: I've been too busy training my pet dragon.
-
- Event: {{$input}}
- """.stripIndent();
-
- var excuseFunction = new KernelFunctionFromPrompt.Builder()
- .withName("Excuse")
- .withTemplate(promptTemplate)
- .withDefaultExecutionSettings(
- new PromptExecutionSettings.Builder()
- .withTemperature(0.4)
- .withTopP(1)
- .withMaxTokens(100)
- .build())
- .build();
-
- WireMockUtil.mockChatCompletionResponse("I missed the F1 final race", "a-response");
-
- var result = kernel.invokeAsync(excuseFunction)
- .withArguments(
- KernelArguments.builder()
- .withInput("I missed the F1 final race")
- .build())
- .block();
-
- Assertions.assertEquals("a-response", result.getResult());
-
- WireMockUtil.mockChatCompletionResponse("sorry I forgot your birthday", "a-response-2");
-
- result = kernel.invokeAsync(excuseFunction)
- .withArguments(
- KernelArguments.builder()
- .withInput("sorry I forgot your birthday")
- .build())
- .block();
-
- Assertions.assertEquals("a-response-2", result.getResult());
-
- WireMockUtil.mockChatCompletionResponse("Translate this date ", "a-response-3");
-
- var date = DateTimeFormatter.ISO_LOCAL_DATE.withZone(ZoneOffset.UTC)
- .format(Instant.ofEpochSecond(1));
- var message = "Translate this date " + date + " to French format";
- var fixedFunction = KernelFunction.createFromPrompt(message)
- .withDefaultExecutionSettings(
- PromptExecutionSettings.builder()
- .withMaxTokens(100)
- .build())
- .build();
-
- FunctionResult fixedFunctionResult = kernel
- .invokeAsync(fixedFunction)
- .block();
-
- Assertions.assertEquals("a-response-3", fixedFunctionResult.getResult());
-
- }
-}
diff --git a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/ImportingMultiplePluginsTest.java b/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/ImportingMultiplePluginsTest.java
deleted file mode 100644
index a555937ed..000000000
--- a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/ImportingMultiplePluginsTest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.tests;
-
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.plugin.KernelPlugin;
-import com.microsoft.semantickernel.plugin.KernelPluginFactory;
-import org.junit.Ignore;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Disabled;
-import org.junit.jupiter.api.Test;
-
-public class ImportingMultiplePluginsTest {
- @Disabled
- @Test
- public void canImportMultiplePlugins() {
- KernelPlugin summarize = KernelPluginFactory.importPluginFromResourcesDirectory(
- "Plugins",
- "SummarizePlugin",
- "Summarize",
- null,
- String.class);
-
- KernelPlugin topics = KernelPluginFactory.importPluginFromResourcesDirectory(
- "Plugins",
- "SummarizePlugin",
- "Topics",
- null,
- String.class);
-
- KernelPlugin notegen = KernelPluginFactory.importPluginFromResourcesDirectory(
- "Plugins",
- "SummarizePlugin",
- "Notegen",
- null,
- String.class);
-
- Kernel kernel = Kernel.builder()
- .withPlugin(summarize)
- .withPlugin(topics)
- .withPlugin(notegen)
- .build();
-
- Assertions.assertEquals(3,
- kernel.getPlugin("SummarizePlugin").getFunctions().size());
- Assertions.assertEquals("Summarize",
- kernel.getPlugin("SummarizePlugin").get("Summarize").getName());
- Assertions.assertEquals("Topics",
- kernel.getPlugin("SummarizePlugin").get("Topics").getName());
- Assertions.assertEquals("Notegen",
- kernel.getPlugin("SummarizePlugin").get("Notegen").getName());
-
- }
-}
diff --git a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/KernelHooksTest.java b/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/KernelHooksTest.java
deleted file mode 100644
index 4a128fb16..000000000
--- a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/KernelHooksTest.java
+++ /dev/null
@@ -1,103 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.tests;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.OpenAIClientBuilder;
-import com.github.tomakehurst.wiremock.junit5.WireMockRuntimeInfo;
-import com.github.tomakehurst.wiremock.junit5.WireMockTest;
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.Kernel.Builder;
-import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
-import com.microsoft.semantickernel.hooks.KernelHook.FunctionInvokedHook;
-import com.microsoft.semantickernel.hooks.KernelHook.FunctionInvokingHook;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
-import com.microsoft.semantickernel.semanticfunctions.KernelArguments;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunctionFromPrompt;
-import com.microsoft.semantickernel.semanticfunctions.OutputVariable;
-import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
-import java.util.concurrent.atomic.AtomicBoolean;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.Test;
-
-@WireMockTest
-public class KernelHooksTest {
-
- private static Builder getKernelBuilder(WireMockRuntimeInfo wmRuntimeInfo) {
- final OpenAIAsyncClient client = new OpenAIClientBuilder()
- .endpoint("http://localhost:" + wmRuntimeInfo.getHttpPort())
- .buildAsyncClient();
-
- ChatCompletionService openAIChatCompletion = OpenAIChatCompletion.builder()
- .withModelId("gpt-35-turbo")
- .withOpenAIAsyncClient(client)
- .build();
-
- return Kernel.builder()
- .withAIService(ChatCompletionService.class, openAIChatCompletion);
- }
-
- @Test
- public void getUsageAsync(WireMockRuntimeInfo wmRuntimeInfo) {
- WireMockUtil.mockChatCompletionResponse("Write a random paragraph about", "a-response");
- Kernel kernel = getKernelBuilder(wmRuntimeInfo).build();
-
- System.out.println("\n======== Get Usage Data ========\n");
-
- // Initialize prompt
- String functionPrompt = "Write a random paragraph about: {{$input}}.";
-
- KernelFunction excuseFunction = KernelFunctionFromPrompt.builder()
- .withTemplate(functionPrompt)
- .withName("Excuse")
- .withDefaultExecutionSettings(PromptExecutionSettings
- .builder()
- .withMaxTokens(100)
- .withTemperature(0.4)
- .withTopP(1)
- .build())
- .withOutputVariable(new OutputVariable<>("result", String.class))
- .build();
-
- AtomicBoolean preHookTriggered = new AtomicBoolean(false);
-
- FunctionInvokingHook preHook = event -> {
- preHookTriggered.set(true);
- return event;
- };
-
- AtomicBoolean removedPreExecutionHandlerTriggered = new AtomicBoolean(false);
-
- FunctionInvokingHook removedPreExecutionHandler = event -> {
- removedPreExecutionHandlerTriggered.set(true);
- return event;
- };
-
- AtomicBoolean postExecutionHandlerTriggered = new AtomicBoolean(false);
-
- FunctionInvokedHook postExecutionHandler = event -> {
- postExecutionHandlerTriggered.set(true);
- return event;
- };
-
- kernel.getGlobalKernelHooks().addHook(preHook);
-
- // Demonstrate pattern for removing a handler.
- kernel.getGlobalKernelHooks().addHook("pre-invoke-removed", removedPreExecutionHandler);
- kernel.getGlobalKernelHooks().removeHook("pre-invoke-removed");
- kernel.getGlobalKernelHooks().addHook(postExecutionHandler);
-
- kernel.invokeAsync(
- excuseFunction)
- .withArguments(
- KernelArguments
- .builder()
- .withVariable("input", "I missed the F1 final race")
- .build())
- .block();
-
- Assertions.assertTrue(preHookTriggered.get());
- Assertions.assertFalse(removedPreExecutionHandlerTriggered.get());
- Assertions.assertTrue(postExecutionHandlerTriggered.get());
- }
-}
diff --git a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/RenderingTest.java b/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/RenderingTest.java
deleted file mode 100644
index ff4655d26..000000000
--- a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/RenderingTest.java
+++ /dev/null
@@ -1,308 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-package com.microsoft.semantickernel.tests;
-
-import static com.github.tomakehurst.wiremock.client.WireMock.stubFor;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.OpenAIClientBuilder;
-import com.github.tomakehurst.wiremock.WireMockServer;
-import com.github.tomakehurst.wiremock.junit5.WireMockTest;
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
-import com.microsoft.semantickernel.aiservices.openai.textcompletion.OpenAITextGenerationService;
-import com.microsoft.semantickernel.plugin.KernelPluginFactory;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunction;
-import com.microsoft.semantickernel.semanticfunctions.KernelArguments;
-import com.microsoft.semantickernel.semanticfunctions.PromptTemplateConfig;
-import com.microsoft.semantickernel.semanticfunctions.annotations.DefineKernelFunction;
-import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
-import com.microsoft.semantickernel.services.textcompletion.TextGenerationService;
-import java.util.List;
-import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import reactor.core.publisher.Mono;
-
-@WireMockTest
-public class RenderingTest {
-
- private WireMockServer wm;
-
- @BeforeEach
- public void before() {
- wm = new WireMockServer();
- }
-
- @AfterEach
- public void after() {
- wm.stop();
- }
-
- @Test
- public void textSemanticKernelTemplateXml() {
- buildTextKernel()
- .invokeAsync(
- KernelFunction
- .createFromPrompt("""
- Value: {{$value}}
- """)
- .withTemplateFormat(PromptTemplateConfig.SEMANTIC_KERNEL_TEMPLATE_FORMAT)
- .build())
- .withArguments(KernelArguments
- .builder()
- .withVariable("value", "\"hello world\"")
- .build())
- .block();
-
- // The actual body will be escaped as its json
- Assertions.assertTrue(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString()
- .contains("\\\"hello world\\\""));
- }
-
- @Test
- public void textSemanticKernelTemplate() {
- buildTextKernel()
- .invokeAsync(
- KernelFunction
- .createFromPrompt("""
- Value: {{$value}}
- """)
- .withTemplateFormat(PromptTemplateConfig.SEMANTIC_KERNEL_TEMPLATE_FORMAT)
- .build())
- .withArguments(KernelArguments
- .builder()
- .withVariable("value", "{{$ignore}}")
- .withVariable("ignore", "dont show")
- .build())
- .block();
-
- Assertions.assertFalse(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("dont show"));
- Assertions.assertTrue(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("{{$ignore}}"));
- }
-
- @Test
- public void textHandleBarsTemplate() {
- buildTextKernel()
- .invokeAsync(
- KernelFunction
- .createFromPrompt("""
- Value: {{value}}
- """)
- .withTemplateFormat("handlebars")
- .build())
- .withArguments(KernelArguments
- .builder()
- .withVariable("value", "{{ignore}}")
- .withVariable("ignore", "dont show")
- .build())
- .block();
-
- Assertions.assertFalse(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("dont show"));
- Assertions.assertTrue(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("{{ignore}}"));
- }
-
- @Test
- public void chatSemanticKernelTemplateXml() {
- buildChatKernel()
- .invokeAsync(
- KernelFunction
- .createFromPrompt("""
- Value: {{$value}}
- """)
- .withTemplateFormat(PromptTemplateConfig.SEMANTIC_KERNEL_TEMPLATE_FORMAT)
- .build())
- .withArguments(KernelArguments
- .builder()
- .withVariable("value", "\"hello world\"")
- .build())
- .block();
-
- // The actual body will be escaped as its json
- Assertions.assertTrue(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString()
- .contains("\\\"hello world\\\""));
- }
-
- @Test
- public void chatSemanticKernelTemplate() {
- buildChatKernel()
- .invokeAsync(
- KernelFunction
- .createFromPrompt("""
- Value: {{$value}}
- """)
- .withTemplateFormat(PromptTemplateConfig.SEMANTIC_KERNEL_TEMPLATE_FORMAT)
- .build())
- .withArguments(KernelArguments
- .builder()
- .withVariable("value", "{{$ignore}}")
- .withVariable("ignore", "dont show")
- .build())
- .block();
-
- Assertions.assertFalse(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("dont show"));
- Assertions.assertTrue(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("{{$ignore}}"));
- }
-
- @Test
- public void chatHandleBarsTemplate() {
- buildChatKernel()
- .invokeAsync(
- KernelFunction
- .createFromPrompt("""
- Value: {{value}}
- """)
- .withTemplateFormat("handlebars")
- .build())
- .withArguments(KernelArguments
- .builder()
- .withVariable("value", "{{ignore}}")
- .withVariable("ignore", "dont show")
- .build())
- .block();
-
- Assertions.assertFalse(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("dont show"));
- Assertions.assertTrue(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("{{ignore}}"));
- }
-
- @Test
- public void chatSemanticKernelTemplate2() {
- buildChatKernel()
- .invokeAsync(
- KernelFunction
- .createFromPrompt("""
- Value: {{$value}}
- """)
- .withTemplateFormat(PromptTemplateConfig.SEMANTIC_KERNEL_TEMPLATE_FORMAT)
- .build())
- .withArguments(KernelArguments
- .builder()
- .withVariable("value", "{{$ignore}}")
- .withVariable("ignore", "dont show")
- .build())
- .block();
-
- Assertions.assertFalse(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("dont show"));
- Assertions.assertTrue(
- wm.getAllServeEvents().get(0).getRequest().getBodyAsString().contains("{{$ignore}}"));
- }
-
- @DefineKernelFunction(name = "WithEmptyListReturn")
- public List WithEmptyListReturn() {
- return List.of();
- }
-
- @DefineKernelFunction(name = "WithListReturn")
- public List WithListReturn() {
- return List.of(1, 2, 3);
- }
-
- @DefineKernelFunction(name = "WithListReturn2", returnType = "java.util.List")
- public List WithListReturn2() {
- return List.of(4, 5, 6);
- }
-
- @DefineKernelFunction(name = "WithListReturn3", returnType = "java.util.List")
- public Mono> WithListReturn3() {
- return Mono.just(List.of(7, 8, 9));
- }
-
- @DefineKernelFunction(name = "WithListReturn4", returnType = "java.util.List")
- public Mono> WithListReturn4() {
- return Mono.just(List.of());
- }
-
- @Test
- public void canHandleIterableReturnFromFunction() {
- buildChatKernel()
- .invokeAsync(
- KernelFunction
- .createFromPrompt("""
- {{RenderingTest-WithEmptyListReturn}}
- {{RenderingTest-WithListReturn}}
- {{RenderingTest-WithListReturn2}}
- {{RenderingTest-WithListReturn3}}
- {{RenderingTest-WithListReturn4}}
- """)
- .withTemplateFormat("handlebars")
- .build())
- .block();
-
- String requestBody = wm.getAllServeEvents().get(0).getRequest().getBodyAsString();
- Assertions.assertTrue(requestBody.contains("\"content\":\"[]"));
- Assertions.assertTrue(requestBody.contains("\"content\":\"[1, 2, 3]"));
- Assertions.assertTrue(requestBody.contains("\"content\":\"[4, 5, 6]"));
- Assertions.assertTrue(requestBody.contains("\"content\":\"[7, 8, 9]"));
- }
-
- private Kernel buildTextKernel() {
- wm.addStubMapping(
- stubFor(
- ToolCallBehaviourTest.buildTextResponse(" ", """
- "choices": [
- {
- "text": "Value: bar"
- }
- ]
- """)));
- wm.start();
-
- final OpenAIAsyncClient client = new OpenAIClientBuilder()
- .endpoint("http://localhost:" + wm.port() + "/")
- .buildAsyncClient();
-
- TextGenerationService textGenerationService = OpenAITextGenerationService.builder()
- .withOpenAIAsyncClient(client)
- .withModelId("gpt-35-turbo-2")
- .build();
-
- Kernel kernel = Kernel.builder()
- .withAIService(TextGenerationService.class, textGenerationService)
- .build();
- return kernel;
- }
-
- private Kernel buildChatKernel() {
- wm.addStubMapping(
- stubFor(
- ToolCallBehaviourTest.buildResponse(" ", """
- "choices" : [
- {
- "finish_reason" : "stop",
- "index" : 0,
- "message" : {
- "content" : "done",
- "role" : "assistant"
- }
- }
- ]
- """)));
- wm.start();
-
- final OpenAIAsyncClient client = new OpenAIClientBuilder()
- .endpoint("http://localhost:" + wm.port() + "/")
- .buildAsyncClient();
-
- ChatCompletionService textGenerationService = OpenAIChatCompletion.builder()
- .withOpenAIAsyncClient(client)
- .withModelId("gpt-35-turbo-2")
- .build();
-
- Kernel kernel = Kernel.builder()
- .withAIService(ChatCompletionService.class, textGenerationService)
- .withPlugin(KernelPluginFactory.createFromObject(this, "RenderingTest"))
- .build();
- return kernel;
- }
-}
diff --git a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/ResponseSchemaTest.java b/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/ResponseSchemaTest.java
deleted file mode 100644
index 4ec677742..000000000
--- a/api-test/integration-tests/src/test/java/com/microsoft/semantickernel/tests/ResponseSchemaTest.java
+++ /dev/null
@@ -1,311 +0,0 @@
-package com.microsoft.semantickernel.tests;
-
-import com.azure.ai.openai.OpenAIAsyncClient;
-import com.azure.ai.openai.models.ChatCompletions;
-import com.azure.ai.openai.models.ChatCompletionsOptions;
-import com.azure.core.http.HttpHeaders;
-import com.azure.core.http.HttpRequest;
-import com.azure.core.http.rest.RequestOptions;
-import com.azure.core.http.rest.Response;
-import com.azure.json.JsonOptions;
-import com.azure.json.JsonWriter;
-import com.azure.json.implementation.DefaultJsonReader;
-import com.azure.json.implementation.DefaultJsonWriter;
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.microsoft.semantickernel.Kernel;
-import com.microsoft.semantickernel.aiservices.openai.chatcompletion.OpenAIChatCompletion;
-import com.microsoft.semantickernel.contextvariables.converters.ContextVariableJacksonConverter;
-import com.microsoft.semantickernel.implementation.EmbeddedResourceLoader;
-import com.microsoft.semantickernel.implementation.EmbeddedResourceLoader.ResourceLocation;
-import com.microsoft.semantickernel.orchestration.FunctionResult;
-import com.microsoft.semantickernel.orchestration.PromptExecutionSettings;
-import com.microsoft.semantickernel.orchestration.responseformat.JsonSchemaResponseFormat;
-import com.microsoft.semantickernel.semanticfunctions.HandlebarsPromptTemplateFactory;
-import com.microsoft.semantickernel.semanticfunctions.KernelFunctionYaml;
-import com.microsoft.semantickernel.services.chatcompletion.ChatCompletionService;
-import java.io.IOException;
-import java.io.StringWriter;
-import org.apache.commons.text.StringEscapeUtils;
-import org.jetbrains.annotations.NotNull;
-import org.junit.jupiter.api.Disabled;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mockito;
-import reactor.core.publisher.Mono;
-
-public class ResponseSchemaTest {
-
- public static class TestClass {
-
- private final String name;
-
- @JsonCreator
- public TestClass(
- @JsonProperty("name") String name) {
- this.name = name;
- }
-
- public String getName() {
- return name;
- }
- }
-
- @Test
- public void sendsResponseSchemaFromTemplate() throws IOException {
- OpenAIAsyncClient client = getOpenAIAsyncClient(
- """
- {
- "name": "Test name"
- }
- """
- .stripIndent());
- Kernel kernel = buildKernel(client);
-
- var getIntent = KernelFunctionYaml.fromPromptYaml(
- EmbeddedResourceLoader
- .readFile("responseSchema.prompt.yaml", ResponseSchemaTest.class,
- ResourceLocation.CLASSPATH_ROOT),
- new HandlebarsPromptTemplateFactory());
-
- FunctionResult response = getIntent.invokeAsync(kernel)
- .withResultTypeAutoConversion(TestClass.class)
- .block();
-
- verifyCalled(client,
- """
- {
- "type":"json_schema",
- "json_schema":{
- "strict":true,
- "name":"Test",
- "schema":{
- "type" : "object",
- "properties" : {
- "name" : {
- "type" : "string"
- }
- },
- "required" : [
- "name"
- ],
- "additionalProperties" : false
- }
- }
- }
- """
- );
- }
-
- @Disabled
- @Test
- public void sendsResponseSchema() {
- OpenAIAsyncClient client = getOpenAIAsyncClient(
- """
- {
- "name": "Test name"
- }
- """
- .stripIndent());
-
- Kernel kernel = buildKernel(client);
-
- PromptExecutionSettings promptExecutionSettings = PromptExecutionSettings.builder()
- .withResponseFormat(
- JsonSchemaResponseFormat.builder()
- .setResponseFormat(TestClass.class)
- .setName("Test")
- .build()
- )
- .build();
-
- FunctionResult response = kernel.invokePromptAsync(
- "Generate TestClass")
- .withTypeConverter(ContextVariableJacksonConverter.create(TestClass.class))
- .withResultType(TestClass.class)
- .withPromptExecutionSettings(promptExecutionSettings)
- .block();
-
- verifyCalled(client,
- """
- {
- "type":"json_schema",
- "json_schema":{
- "strict":true,
- "name":"Test",
- "schema":{
- "type" : "object",
- "properties" : {
- "name" : {
- "type" : "string"
- }
- },
- "required" : [ "name" ],
- "additionalProperties" : false
- }
- }
- }
- """
- );
- }
-
- private static void verifyCalled(OpenAIAsyncClient client, String expected) {
- Mockito.verify(client, Mockito.atLeastOnce())
- .getChatCompletionsWithResponse(
- Mockito.any(),
- Mockito.