From 9e2e9b518a479b9ead07c88ead0fb511d3a1980a Mon Sep 17 00:00:00 2001 From: Marco Prado <31007209+pradorodriguez@users.noreply.github.com> Date: Tue, 29 Apr 2025 17:40:10 -0400 Subject: [PATCH] Update index.md File updated: semantic-kernel/concepts/plugins/index.md Description: The posted code does not work, probably due to Semantic Kernel lib updates. Changes: I updated the code so it can run correctly --- semantic-kernel/concepts/plugins/index.md | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/semantic-kernel/concepts/plugins/index.md b/semantic-kernel/concepts/plugins/index.md index d3425adb..5b9f1133 100644 --- a/semantic-kernel/concepts/plugins/index.md +++ b/semantic-kernel/concepts/plugins/index.md @@ -156,7 +156,7 @@ public class LightModel ::: zone pivot="programming-language-python" ```python -from typing import TypedDict, Annotated +from typing import TypedDict, Annotated, List, Optional class LightModel(TypedDict): id: int @@ -302,8 +302,8 @@ from semantic_kernel.functions import kernel_function from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase -from semantic_kernel.contents.chat_history import ChatHistory -from semantic_kernel.functions.kernel_arguments import KernelArguments +from semantic_kernel.contents.chat_history import ChatHistory, ChatMessageContent +from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.connectors.ai.open_ai.prompt_execution_settings.azure_chat_prompt_execution_settings import ( AzureChatPromptExecutionSettings, @@ -317,7 +317,7 @@ async def main(): chat_completion = AzureChatCompletion( deployment_name="your_models_deployment_name", api_key="your_api_key", - base_url="your_base_url", + endpoint="your_base_url", ) kernel.add_service(chat_completion) @@ -333,7 +333,12 @@ async def main(): # Create a history of the conversation history = ChatHistory() - history.add_message("Please turn on the lamp") + history.add_message( + ChatMessageContent( + role=AuthorRole.USER, + content="Please turn on the lamp" + ) + ) # Get the response from the AI result = await chat_completion.get_chat_message_content( @@ -437,4 +442,4 @@ By storing data locally, you can keep the information private and secure while a Use one of the techniques described in the [Providing functions return type schema to LLM](./adding-native-plugins.md#provide-function-return-type-information-in-function-description) section to provide the function's return type schema to the AI model. By utilizing a well-defined return type schema, the AI model can accurately identify the intended properties, eliminating potential inaccuracies that may arise when the model makes assumptions based on incomplete or ambiguous information in the absence of the schema. -Consequently, this enhances the accuracy of function calls, leading to more reliable and precise outcomes. \ No newline at end of file +Consequently, this enhances the accuracy of function calls, leading to more reliable and precise outcomes.