Skip to content

Commit a2720a6

Browse files
authoredJun 11, 2024
Python: Fix broken concept samples. Run concept examples as tests. (microsoft#6660)
### Motivation and Context We had many broken concept examples. These samples are also not covered by tests so it's hard to know when things are out of sync. <!-- Thank you for your contribution to the semantic-kernel repo! Please help reviewers and future users, providing the following information: 1. Why is this change required? 2. What problem does it solve? 3. What scenario does it contribute to? 4. If it fixes an open issue, please link to the issue here. --> ### Description Fix the Python concept samples, and also cover them with tests so we aren't checking in code that is going to break them. <!-- Describe your changes, the overall approach, the underlying design. These notes will help understanding how your code works. Thanks! --> ### Contribution Checklist <!-- Before submitting this PR, please make sure: --> - [X] The code builds clean without any errors or warnings - [X] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [X] All unit tests pass, and I have added new tests where possible - [X] I didn't break anyone 😄
1 parent 424df70 commit a2720a6

30 files changed

+423
-121
lines changed
 

‎.github/workflows/python-integration-tests.yml

+8
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,10 @@ jobs:
9292
AZURE_AI_SEARCH_API_KEY: ${{secrets.AZURE_AI_SEARCH_API_KEY}}
9393
AZURE_AI_SEARCH_ENDPOINT: ${{secrets.AZURE_AI_SEARCH_ENDPOINT}}
9494
MONGODB_ATLAS_CONNECTION_STRING: ${{secrets.MONGODB_ATLAS_CONNECTION_STRING}}
95+
AZURE_KEY_VAULT_ENDPOINT: ${{secrets.AZURE_KEY_VAULT_ENDPOINT}}
96+
AZURE_KEY_VAULT_CLIENT_ID: ${{secrets.AZURE_KEY_VAULT_CLIENT_ID}}
97+
AZURE_KEY_VAULT_CLIENT_SECRET: ${{secrets.AZURE_KEY_VAULT_CLIENT_SECRET}}
98+
ACA_POOL_MANAGEMENT_ENDPOINT: ${{secrets.ACA_POOL_MANAGEMENT_ENDPOINT}}
9599
run: |
96100
if ${{ matrix.os == 'ubuntu-latest' }}; then
97101
docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest
@@ -154,6 +158,10 @@ jobs:
154158
AZURE_AI_SEARCH_API_KEY: ${{secrets.AZURE_AI_SEARCH_API_KEY}}
155159
AZURE_AI_SEARCH_ENDPOINT: ${{secrets.AZURE_AI_SEARCH_ENDPOINT}}
156160
MONGODB_ATLAS_CONNECTION_STRING: ${{secrets.MONGODB_ATLAS_CONNECTION_STRING}}
161+
AZURE_KEY_VAULT_ENDPOINT: ${{secrets.AZURE_KEY_VAULT_ENDPOINT}}
162+
AZURE_KEY_VAULT_CLIENT_ID: ${{secrets.AZURE_KEY_VAULT_CLIENT_ID}}
163+
AZURE_KEY_VAULT_CLIENT_SECRET: ${{secrets.AZURE_KEY_VAULT_CLIENT_SECRET}}
164+
ACA_POOL_MANAGEMENT_ENDPOINT: ${{secrets.ACA_POOL_MANAGEMENT_ENDPOINT}}
157165
run: |
158166
if ${{ matrix.os == 'ubuntu-latest' }}; then
159167
docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest

‎python/.env.example

+7-3
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,14 @@
11
OPENAI_API_KEY=""
2+
OPEN_AI_CHAT_MODEL_ID=""
3+
OPEN_AI_TEXT_MODEL_ID=""
4+
OPEN_AI_EMBEDDING_MODEL_ID=""
25
OPENAI_ORG_ID=""
3-
AZURE_OPENAI_SYSTEM_MESSAGE="You are an AI assistant that helps people find information"
4-
AZURE_OPENAI_API_VERSION="2024-02-15-preview"
5-
AZURE_OPENAI_DEPLOYMENT_NAME=""
6+
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME=""
7+
AZURE_OPENAI_TEXT_DEPLOYMENT_NAME=""
8+
AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=""
69
AZURE_OPENAI_ENDPOINT=""
710
AZURE_OPENAI_API_KEY=""
11+
AZURE_OPENAI_API_VERSION="2024-02-15-preview"
812
AZURE_OPENAI_TEMPERATURE=0
913
AZURE_OPENAI_MAX_TOKENS=1000
1014
AZURE_OPENAI_TOP_P=1.0

‎python/samples/concepts/chat_completion/openai_logit_bias.py

+2
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@
1919

2020

2121
def _config_ban_tokens(settings: PromptExecutionSettings, keys: dict[Any, Any]):
22+
if settings.logit_bias is None:
23+
settings.logit_bias = {}
2224
# Map each token in the keys list to a bias value from -100 (a potential ban) to 100 (exclusive selection)
2325
for k in keys:
2426
# -100 to potentially ban all tokens in the list

‎python/samples/concepts/filtering/prompt_filters.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@
5151
# this type of filter allows you to manipulate the final message being sent
5252
# as is shown below, or the inputs used to generate the message by making a change to the
5353
# arguments before calling next.
54-
@kernel.filter(FilterTypes.PROMPT_RENDERING_FILTER)
54+
@kernel.filter(FilterTypes.PROMPT_RENDERING)
5555
async def prompt_rendering_filter(context: PromptRenderContext, next):
5656
await next(context)
5757
context.rendered_prompt = f"You pretend to be Mosscap, but you are Papssom who is the opposite of Moscapp in every way {context.rendered_prompt or ''}" # noqa: E501

‎python/samples/concepts/grounding/grounded.py

+8-3
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,9 @@
22

33
import asyncio
44
import logging
5+
import os
56

6-
from samples.utils import Colors
7+
from samples.concepts.resources.utils import Colors
78
from semantic_kernel import Kernel
89
from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion
910
from semantic_kernel.functions import KernelArguments
@@ -70,7 +71,7 @@ def setup(use_azure: bool = False, plugin_name: str = "GroundingPlugin"):
7071
)
7172

7273
# note: using plugins from the samples folder
73-
plugins_directory = "../samples/plugins/"
74+
plugins_directory = os.path.join(__file__, "../../../../../prompt_template_samples/")
7475

7576
kernel.add_plugin(parent_directory=plugins_directory, plugin_name=plugin_name)
7677

@@ -173,5 +174,9 @@ async def run_grounding(use_azure: bool = False):
173174
print(f"{Colors.CBOLD.value}Finished!{Colors.CEND.value}")
174175

175176

177+
async def main() -> None:
178+
await run_grounding(use_azure=False)
179+
180+
176181
if __name__ == "__main__":
177-
asyncio.run(run_grounding(use_azure=True))
182+
asyncio.run(main())

‎python/samples/concepts/on_your_data/azure_chat_gpt_with_data_api.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
# }
3636

3737
# Create the data source settings
38-
azure_ai_search_settings = AzureAISearchSettings.create(env_file_path=".env")
38+
azure_ai_search_settings = AzureAISearchSettings.create()
3939

4040
az_source = AzureAISearchDataSource.from_azure_ai_search_settings(azure_ai_search_settings=azure_ai_search_settings)
4141
extra = ExtraBody(data_sources=[az_source])

‎python/samples/concepts/on_your_data/azure_chat_gpt_with_data_api_function_calling.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
kernel = sk.Kernel()
2828

2929
# Create the data source settings
30-
azure_ai_search_settings = AzureAISearchSettings()
30+
azure_ai_search_settings = AzureAISearchSettings.create()
3131
az_source = AzureAISearchDataSource(parameters=azure_ai_search_settings.model_dump())
3232
extra = ExtraBody(data_sources=[az_source])
3333
req_settings = AzureChatPromptExecutionSettings(service_id="chat-gpt", extra_body=extra, tool_choice="auto")
@@ -103,7 +103,7 @@ async def chat() -> bool:
103103
arguments["chat_history"] = history
104104
arguments["user_input"] = user_input
105105
answer = await kernel.invoke(
106-
functions=chat_function,
106+
function=chat_function,
107107
arguments=arguments,
108108
)
109109
print(f"Mosscap:> {answer}")

‎python/samples/concepts/on_your_data/azure_chat_gpt_with_data_api_vector_search.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,8 @@
2424
# Bonded by their love for the natural world and shared curiosity, they uncovered a
2525
# groundbreaking phenomenon in glaciology that could potentially reshape our understanding of climate change.
2626

27-
azure_ai_search_settings = AzureAISearchSettings()
27+
azure_ai_search_settings = AzureAISearchSettings.create()
28+
azure_ai_search_settings = azure_ai_search_settings.model_dump()
2829

2930
# This example index has fields "title", "chunk", and "vector".
3031
# Add fields mapping to the settings.
@@ -42,14 +43,15 @@
4243
azure_ai_search_settings["query_type"] = "vector"
4344

4445
# Create the data source settings
45-
az_source = AzureAISearchDataSource(parameters=azure_ai_search_settings.model_dump())
46+
az_source = AzureAISearchDataSource(parameters=azure_ai_search_settings)
4647
extra = ExtraBody(data_sources=[az_source])
4748
service_id = "chat-gpt"
4849
req_settings = AzureChatPromptExecutionSettings(service_id=service_id, extra_body=extra)
4950

5051
# When using data, use the 2024-02-15-preview API version.
5152
chat_service = AzureChatCompletion(
5253
service_id="chat-gpt",
54+
api_version="2024-02-15-preview",
5355
)
5456
kernel.add_service(chat_service)
5557

‎python/samples/concepts/planners/azure_openai_function_calling_stepwise_planner.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,11 @@ async def main():
1919
),
2020
)
2121

22-
cur_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "resources")
23-
kernel.add_plugin(parent_directory=cur_dir, plugin_name="email_plugin")
22+
plugin_path = os.path.join(
23+
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
24+
"resources",
25+
)
26+
kernel.add_plugin(parent_directory=plugin_path, plugin_name="email_plugin")
2427

2528
kernel.add_plugin(MathPlugin(), "MathPlugin")
2629
kernel.add_plugin(TimePlugin(), "TimePlugin")

‎python/samples/concepts/planners/openai_function_calling_stepwise_planner.py

+5-2
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,11 @@ async def main():
2020
),
2121
)
2222

23-
cur_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "resources")
24-
kernel.add_plugin(parent_directory=cur_dir, plugin_name="email_plugin")
23+
plugin_path = os.path.join(
24+
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
25+
"resources",
26+
)
27+
kernel.add_plugin(parent_directory=plugin_path, plugin_name="email_plugin")
2528
kernel.add_plugins({"MathPlugin": MathPlugin(), "TimePlugin": TimePlugin()})
2629

2730
questions = [

‎python/samples/concepts/planners/sequential_planner.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ async def main():
2525
plan = await planner.create_plan(goal=ask)
2626

2727
# ask the sequential planner to execute the identified function.
28-
result = await plan.invoke()
28+
result = await plan.invoke(kernel=kernel)
2929

3030
for step in plan._steps:
3131
print(step.description, ":", step._state.__dict__)
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
# Copyright (c) Microsoft. All rights reserved.
22

3+
from typing import ClassVar
4+
35
from pydantic import SecretStr
46

5-
from semantic_kernel.connectors.memory.memory_settings_base import BaseModelSettings
6-
from semantic_kernel.kernel_pydantic import HttpsUrl
7+
from semantic_kernel.kernel_pydantic import HttpsUrl, KernelBaseSettings
78

89

9-
class AzureKeyVaultSettings(BaseModelSettings):
10+
class AzureKeyVaultSettings(KernelBaseSettings):
1011
"""Azure Key Vault model settings
1112
1213
Optional:
@@ -18,9 +19,8 @@ class AzureKeyVaultSettings(BaseModelSettings):
1819
(Env var AZURE_KEY_VAULT_CLIENT_SECRET)
1920
"""
2021

22+
env_prefix: ClassVar[str] = "AZURE_KEY_VAULT_"
23+
2124
endpoint: HttpsUrl
2225
client_id: str
2326
client_secret: SecretStr
24-
25-
class Config(BaseModelSettings.Config):
26-
env_prefix = "AZURE_KEY_VAULT_"

‎python/samples/concepts/plugins/openai_plugin_azure_key_vault.py

+184-55
Original file line numberDiff line numberDiff line change
@@ -1,44 +1,80 @@
11
# Copyright (c) Microsoft. All rights reserved.
22

3-
3+
import json
44
import os
5+
import platform
6+
from functools import reduce
57

68
import httpx
79
from aiohttp import ClientSession
8-
from azure_key_vault_settings import AzureKeyVaultSettings
910

11+
from samples.concepts.plugins.azure_key_vault_settings import AzureKeyVaultSettings
1012
from semantic_kernel import Kernel
13+
from semantic_kernel.connectors.ai.function_call_behavior import FunctionCallBehavior
14+
from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAIChatPromptExecutionSettings
1115
from semantic_kernel.connectors.openai_plugin import OpenAIAuthenticationType, OpenAIFunctionExecutionParameters
12-
from semantic_kernel.functions import KernelPlugin
13-
from semantic_kernel.functions.kernel_arguments import KernelArguments
16+
from semantic_kernel.contents import ChatHistory
17+
from semantic_kernel.contents.chat_message_content import ChatMessageContent
18+
from semantic_kernel.contents.function_call_content import FunctionCallContent
19+
from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent
20+
from semantic_kernel.functions import KernelArguments, KernelFunction, KernelPlugin
1421

22+
# region Helper functions
1523

16-
async def add_secret_to_key_vault(kernel: Kernel, plugin: KernelPlugin):
17-
"""Adds a secret to the Azure Key Vault."""
18-
arguments = KernelArguments()
19-
arguments["secret_name"] = "Foo" # nosec
20-
arguments["api_version"] = "7.0"
21-
arguments["value"] = "Bar"
22-
arguments["enabled"] = True
23-
result = await kernel.invoke(
24-
function=plugin["SetSecret"],
25-
arguments=arguments,
26-
)
2724

28-
print(f"Secret added to Key Vault: {result}")
25+
def get_file_url(relative_path):
26+
absolute_path = os.path.abspath(relative_path)
27+
if platform.system() == "Windows":
28+
return f"file:///{absolute_path.replace('\\', '/')}"
29+
return f"file://{absolute_path}"
2930

3031

31-
async def get_secret_from_key_vault(kernel: Kernel, plugin: KernelPlugin):
32-
"""Gets a secret from the Azure Key Vault."""
33-
arguments = KernelArguments()
34-
arguments["secret_name"] = "Foo" # nosec
35-
arguments["api_version"] = "7.0"
36-
result = await kernel.invoke(
37-
function=plugin["GetSecret"],
38-
arguments=arguments,
32+
def load_and_update_openai_spec():
33+
# Construct the path to the OpenAI spec file
34+
openai_spec_file = os.path.join(
35+
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
36+
"resources",
37+
"open_ai_plugins",
38+
"akv-openai.json"
3939
)
4040

41-
print(f"Secret retrieved from Key Vault: {result}")
41+
# Read the OpenAI spec file
42+
with open(openai_spec_file) as file:
43+
openai_spec = json.load(file)
44+
45+
# Adjust the OpenAI spec file to use the correct file URL based on platform
46+
openapi_yaml_path = os.path.join(
47+
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
48+
"resources",
49+
"open_ai_plugins",
50+
"akv-openapi.yaml"
51+
)
52+
openai_spec["api"]["url"] = get_file_url(openapi_yaml_path)
53+
54+
return json.dumps(openai_spec, indent=4)
55+
56+
57+
def print_tool_calls(message: ChatMessageContent) -> None:
58+
# A helper method to pretty print the tool calls from the message.
59+
# This is only triggered if auto invoke tool calls is disabled.
60+
items = message.items
61+
formatted_tool_calls = []
62+
for i, item in enumerate(items, start=1):
63+
if isinstance(item, FunctionCallContent):
64+
tool_call_id = item.id
65+
function_name = item.name
66+
function_arguments = item.arguments
67+
formatted_str = (
68+
f"tool_call {i} id: {tool_call_id}\n"
69+
f"tool_call {i} function name: {function_name}\n"
70+
f"tool_call {i} arguments: {function_arguments}"
71+
)
72+
formatted_tool_calls.append(formatted_str)
73+
print("Tool calls:\n" + "\n\n".join(formatted_tool_calls))
74+
75+
# endregion
76+
77+
# region Sample Authentication Provider
4278

4379

4480
class OpenAIAuthenticationProvider:
@@ -101,28 +137,99 @@ async def authenticate_request(
101137
auth_header = f"{scheme} {credential}"
102138
return {"Authorization": auth_header}
103139

140+
# endregion
141+
142+
# region AKV Plugin Functions
143+
144+
145+
async def add_secret_to_key_vault(kernel: Kernel, plugin: KernelPlugin):
146+
"""Adds a secret to the Azure Key Vault."""
147+
arguments = KernelArguments()
148+
arguments["secret_name"] = "Foo" # nosec
149+
arguments["api_version"] = "7.0"
150+
arguments["value"] = "Bar"
151+
arguments["enabled"] = True
152+
result = await kernel.invoke(
153+
function=plugin["SetSecret"],
154+
arguments=arguments,
155+
)
156+
157+
print(f"Secret added to Key Vault: {result}")
158+
159+
160+
async def get_secret_from_key_vault(kernel: Kernel, plugin: KernelPlugin):
161+
"""Gets a secret from the Azure Key Vault."""
162+
arguments = KernelArguments()
163+
arguments["secret_name"] = "Foo" # nosec
164+
arguments["api_version"] = "7.0"
165+
result = await kernel.invoke(
166+
function=plugin["GetSecret"],
167+
arguments=arguments,
168+
)
169+
170+
print(f"Secret retrieved from Key Vault: {result}")
171+
172+
# endregion
104173

105-
async def main():
106-
# This example demonstrates how to connect an Azure Key Vault plugin to the Semantic Kernel.
107-
# To use this example, there are a few requirements:
108-
# 1. Register a client application with the Microsoft identity platform.
109-
# https://learn.microsoft.com/en-us/azure/active-directory/develop/quickstart-register-app
110-
#
111-
# 2. Create an Azure Key Vault
112-
# https://learn.microsoft.com/en-us/azure/key-vault/general/quick-create-portal
113-
# Please make sure to configure the AKV with a Vault Policy, instead of the default RBAC policy
114-
# This is because you will need to assign the Key Vault access policy to the client application you
115-
# registered in step 1. You should give the client application the "Get," "List," and "Set"
116-
# permissions for secrets.
117-
#
118-
# 3. Set your Key Vault endpoint, client ID, and client secret as user secrets using in your .env file:
119-
# AZURE_KEY_VAULT_ENDPOINT = ""
120-
# AZURE_KEY_VAULT_CLIENT_ID = ""
121-
# AZURE_KEY_VAULT_CLIENT_SECRET = ""
122-
#
123-
# 4. Replace your tenant ID with the "TENANT_ID" placeholder in
124-
# python/samples/kernel-syntax-examples/resources/akv-openai.json
125174

175+
kernel = Kernel()
176+
177+
kernel.add_service(OpenAIChatCompletion(service_id="chat"))
178+
179+
chat_function = kernel.add_function(
180+
prompt="{{$chat_history}}{{$user_input}}",
181+
plugin_name="ChatBot",
182+
function_name="Chat",
183+
)
184+
185+
execution_settings = OpenAIChatPromptExecutionSettings(
186+
service_id="chat",
187+
max_tokens=2000,
188+
temperature=0.7,
189+
top_p=0.8,
190+
function_call_behavior=FunctionCallBehavior.EnableFunctions(
191+
auto_invoke=True, filters={"included_plugins": ["AzureKeyVaultPlugin"]}
192+
),
193+
)
194+
195+
history = ChatHistory()
196+
history.add_system_message("Use Api-version 7.0, if needed.")
197+
198+
arguments = KernelArguments(settings=execution_settings)
199+
200+
201+
async def handle_streaming(
202+
kernel: Kernel,
203+
chat_function: "KernelFunction",
204+
arguments: KernelArguments,
205+
) -> None:
206+
"""Handle streaming chat messages."""
207+
response = kernel.invoke_stream(
208+
chat_function,
209+
return_function_results=False,
210+
arguments=arguments,
211+
)
212+
213+
print("Security Agent:> ", end="")
214+
streamed_chunks: list[StreamingChatMessageContent] = []
215+
async for message in response:
216+
if not execution_settings.function_call_behavior.auto_invoke_kernel_functions and isinstance(
217+
message[0], StreamingChatMessageContent
218+
):
219+
streamed_chunks.append(message[0])
220+
else:
221+
print(str(message[0]), end="")
222+
223+
if streamed_chunks:
224+
streaming_chat_message = reduce(lambda first, second: first + second, streamed_chunks)
225+
print("Auto tool calls is disabled, printing returned tool calls...")
226+
print_tool_calls(streaming_chat_message)
227+
228+
print("\n")
229+
230+
231+
async def main() -> None:
232+
"""Main function to run the chat bot."""
126233
azure_keyvault_settings = AzureKeyVaultSettings.create()
127234
client_id = azure_keyvault_settings.client_id
128235
client_secret = azure_keyvault_settings.client_secret.get_secret_value()
@@ -138,17 +245,11 @@ async def main():
138245
}
139246
)
140247

141-
kernel = Kernel()
142-
143-
openai_spec_file = os.path.join(
144-
os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "open_ai_plugins", "akv-openai.json"
145-
)
146-
with open(openai_spec_file) as file:
147-
openai_spec = file.read()
248+
openai_spec = load_and_update_openai_spec()
148249

149250
http_client = httpx.AsyncClient()
150251

151-
plugin = await kernel.add_plugin_from_openai(
252+
await kernel.add_plugin_from_openai(
152253
plugin_name="AzureKeyVaultPlugin",
153254
plugin_str=openai_spec,
154255
execution_parameters=OpenAIFunctionExecutionParameters(
@@ -159,8 +260,36 @@ async def main():
159260
),
160261
)
161262

162-
await add_secret_to_key_vault(kernel, plugin)
163-
await get_secret_from_key_vault(kernel, plugin)
263+
chatting = True
264+
print(
265+
"Welcome to the chat bot!\
266+
\n Type 'exit' to exit.\
267+
\n Try chatting about Azure Key Vault!"
268+
)
269+
while chatting:
270+
chatting = await chat()
271+
272+
273+
async def chat() -> bool:
274+
"""Chat with the bot."""
275+
try:
276+
user_input = input("User:> ")
277+
except KeyboardInterrupt:
278+
print("\n\nExiting chat...")
279+
return False
280+
except EOFError:
281+
print("\n\nExiting chat...")
282+
return False
283+
284+
if user_input == "exit":
285+
print("\n\nExiting chat...")
286+
return False
287+
arguments["user_input"] = user_input
288+
arguments["chat_history"] = history
289+
290+
await handle_streaming(kernel, chat_function, arguments=arguments)
291+
292+
return True
164293

165294

166295
if __name__ == "__main__":

‎python/samples/concepts/prompt_templates/azure_chat_gpt_api_handlebars.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import logging
55

66
from semantic_kernel import Kernel
7+
from semantic_kernel.connectors.ai.function_call_behavior import FunctionCallBehavior
78
from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion
89
from semantic_kernel.contents import ChatHistory
910
from semantic_kernel.functions import KernelArguments
@@ -31,7 +32,7 @@
3132
req_settings.max_tokens = 2000
3233
req_settings.temperature = 0.7
3334
req_settings.top_p = 0.8
34-
req_settings.auto_invoke_kernel_functions = False
35+
req_settings.function_call_behavior = FunctionCallBehavior.AutoInvokeKernelFunctions()
3536

3637

3738
chat_function = kernel.add_function(

‎python/samples/concepts/prompt_templates/azure_chat_gpt_api_jinja2.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import logging
55

66
from semantic_kernel import Kernel
7+
from semantic_kernel.connectors.ai.function_call_behavior import FunctionCallBehavior
78
from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion
89
from semantic_kernel.contents import ChatHistory
910
from semantic_kernel.functions import KernelArguments
@@ -31,7 +32,7 @@
3132
req_settings.max_tokens = 2000
3233
req_settings.temperature = 0.7
3334
req_settings.top_p = 0.8
34-
req_settings.auto_invoke_kernel_functions = False
35+
req_settings.function_call_behavior = FunctionCallBehavior.AutoInvokeKernelFunctions()
3536

3637

3738
chat_function = kernel.add_function(

‎python/samples/concepts/prompt_templates/load_yaml_prompt.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -13,15 +13,18 @@ async def main():
1313

1414
service_id = "default"
1515
chat_service = OpenAIChatCompletion(
16-
ai_model_id="gpt-4-0613",
16+
ai_model_id="gpt-3.5-turbo-1106",
1717
service_id=service_id,
1818
)
1919
kernel.add_service(chat_service)
2020

2121
chat_history = ChatHistory(system_message="Assistant is a large language model")
2222

23-
cur_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "resources")
24-
plugin = kernel.add_plugin(plugin_name="sample_plugins", parent_directory=cur_dir)
23+
plugin_path = os.path.join(
24+
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
25+
"resources",
26+
)
27+
plugin = kernel.add_plugin(plugin_name="sample_plugins", parent_directory=plugin_path)
2528

2629
result = await kernel.invoke(plugin["Parrot"], count=2, user_message="I love parrots.", chat_history=chat_history)
2730
print(result)

‎python/samples/concepts/prompt_templates/template_language.py

+7-9
Original file line numberDiff line numberDiff line change
@@ -11,19 +11,16 @@
1111
async def main():
1212
kernel = Kernel()
1313

14-
useAzureOpenAI = False
15-
model = "gpt-35-turbo" if useAzureOpenAI else "gpt-3.5-turbo-1106"
16-
service_id = model
17-
14+
service_id = "template_language"
1815
kernel.add_service(
19-
OpenAIChatCompletion(service_id=service_id, ai_model_id=model),
16+
OpenAIChatCompletion(service_id=service_id),
2017
)
2118

2219
kernel.add_plugin(TimePlugin(), "time")
2320

2421
function_definition = """
25-
Today is: {{time.Date}}
26-
Current time is: {{time.Time}}
22+
Today is: {{time.date}}
23+
Current time is: {{time.time}}
2724
2825
Answer to the following questions using JSON syntax, including the data used.
2926
Is it morning, afternoon, evening, or night (morning/afternoon/evening/night)?
@@ -32,7 +29,7 @@ async def main():
3229

3330
print("--- Rendered Prompt ---")
3431
prompt_template_config = PromptTemplateConfig(template=function_definition)
35-
prompt_template = KernelPromptTemplate(prompt_template_config)
32+
prompt_template = KernelPromptTemplate(prompt_template_config=prompt_template_config)
3633
rendered_prompt = await prompt_template.render(kernel, arguments=None)
3734
print(rendered_prompt)
3835

@@ -41,10 +38,11 @@ async def main():
4138
template=function_definition,
4239
execution_settings=OpenAIChatPromptExecutionSettings(service_id=service_id, max_tokens=100),
4340
function_name="kind_of_day",
41+
prompt_template=prompt_template,
4442
)
4543

4644
print("--- Prompt Function Result ---")
47-
result = await kernel.invoke(kind_of_day)
45+
result = await kernel.invoke(function=kind_of_day)
4846
print(result)
4947

5048

‎python/samples/concepts/rag/self-critique_rag.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ async def populate_memory(memory: SemanticTextMemory) -> None:
2929
async def main() -> None:
3030
kernel = Kernel()
3131

32-
azure_ai_search_settings = AzureAISearchSettings()
32+
azure_ai_search_settings = AzureAISearchSettings.create()
3333
vector_size = 1536
3434

3535
# Setting up OpenAI services for text completion and text embedding
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
11
# Copyright (c) Microsoft. All rights reserved.
22

3-
# intentionally left empty
3+
from samples.concepts.resources.utils import Colors
4+
5+
__all__ = ["Colors"]

‎python/samples/concepts/search/bing_plugin_examples.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ async def example2(kernel: Kernel, service_id: str):
6565
oracle = kernel.add_function(
6666
function_name="oracle",
6767
plugin_name="OraclePlugin",
68-
template=prompt,
68+
prompt=prompt,
6969
execution_settings=OpenAIChatPromptExecutionSettings(
7070
service_id=service_id, max_tokens=150, temperature=0, top_p=1
7171
),

‎python/samples/getting_started/.env.example

+6-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
11
OPENAI_API_KEY=""
2+
OPEN_AI_CHAT_MODEL_ID=""
3+
OPEN_AI_TEXT_MODEL_ID=""
4+
OPEN_AI_EMBEDDING_MODEL_ID=""
25
OPENAI_ORG_ID=""
3-
AZURE_OPENAI_DEPLOYMENT_NAME=""
6+
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME=""
7+
AZURE_OPENAI_TEXT_DEPLOYMENT_NAME=""
8+
AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=""
49
AZURE_OPENAI_ENDPOINT=""
510
AZURE_OPENAI_API_KEY=""
611
AZURE_AISEARCH_API_KEY=""
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,14 @@
11
OPENAI_API_KEY=""
2+
OPEN_AI_CHAT_MODEL_ID=""
3+
OPEN_AI_TEXT_MODEL_ID=""
4+
OPEN_AI_EMBEDDING_MODEL_ID=""
25
OPENAI_ORG_ID=""
3-
AZURE_OPENAI_DEPLOYMENT_NAME=""
6+
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME=""
7+
AZURE_OPENAI_TEXT_DEPLOYMENT_NAME=""
8+
AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=""
49
AZURE_OPENAI_ENDPOINT=""
510
AZURE_OPENAI_API_KEY=""
11+
AZURE_AISEARCH_API_KEY=""
12+
AZURE_AISEARCH_URL=""
613
WEAVIATE_URL="http://localhost:8080"
714
# WEAVIATE_API_KEY=""
+8-7
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1-
GLOBAL_LLM_SERVICE="OpenAI" # Toggle between "OpenAI" or "AzureOpenAI"
2-
OPEN_AI_CHAT_COMPLETION_MODEL_ID="gpt-3.5-turbo-0125"
3-
OPEN_AI_TEXT_COMPLETION_MODEL_ID="gpt-3.5-turbo-instruct"
41
OPENAI_API_KEY=""
2+
OPEN_AI_CHAT_MODEL_ID=""
3+
OPEN_AI_TEXT_MODEL_ID=""
4+
OPEN_AI_EMBEDDING_MODEL_ID=""
55
OPENAI_ORG_ID=""
6-
AZURE_OPEN_AI_DEPLOYMENT_TYPE="chat-completion" # chat-completion or text-completion
7-
AZURE_OPEN_AI_CHAT_COMPLETION_DEPLOYMENT_NAME="gpt-35-turbo"
8-
AZURE_OPEN_AI_TEXT_COMPLETION_DEPLOYMENT_NAME="gpt-35-turbo-instruct"
6+
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME=""
7+
AZURE_OPENAI_TEXT_DEPLOYMENT_NAME=""
8+
AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME=""
99
AZURE_OPENAI_ENDPOINT=""
1010
AZURE_OPENAI_API_KEY=""
11-
AZURE_OPENAI_API_VERSION=""
11+
AZURE_AISEARCH_API_KEY=""
12+
AZURE_AISEARCH_URL=""

‎python/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion_base.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -436,7 +436,7 @@ async def _process_function_call(
436436
if parsed_args:
437437
args_cloned.update(parsed_args)
438438
except (FunctionCallInvalidArgumentsException, TypeError) as exc:
439-
logger.exception(
439+
logger.info(
440440
f"Received invalid arguments for function {function_call.name}: {exc}. Trying tool call again."
441441
)
442442
frc = FunctionResultContent.from_function_call_content_and_result(
@@ -485,7 +485,7 @@ async def _process_function_call(
485485
f"{[param.name for param in function_to_call.parameters if param.is_required]}. "
486486
"Please provide the required arguments and try again."
487487
)
488-
logger.exception(msg)
488+
logger.info(msg)
489489
frc = FunctionResultContent.from_function_call_content_and_result(
490490
function_call_content=function_call,
491491
result=msg,

‎python/semantic_kernel/connectors/memory/azure_cognitive_search/azure_ai_search_settings.py

+9
Original file line numberDiff line numberDiff line change
@@ -23,3 +23,12 @@ class AzureAISearchSettings(KernelBaseSettings):
2323
api_key: SecretStr
2424
endpoint: HttpsUrl
2525
index_name: str | None = None
26+
27+
def model_dump(self) -> dict[str, str]:
28+
"""Dump the model to a dictionary."""
29+
data = super().model_dump()
30+
data.update({
31+
"api_key": self.api_key.get_secret_value(),
32+
"endpoint": str(self.endpoint),
33+
})
34+
return data

‎python/semantic_kernel/connectors/search_engine/bing_connector.py

+19-11
Original file line numberDiff line numberDiff line change
@@ -78,14 +78,22 @@ async def search(self, query: str, num_results: int = 1, offset: int = 0) -> lis
7878

7979
headers = {"Ocp-Apim-Subscription-Key": self._settings.api_key.get_secret_value()}
8080

81-
async with (
82-
aiohttp.ClientSession() as session,
83-
session.get(_request_url, headers=headers, raise_for_status=True) as response,
84-
):
85-
if response.status == 200:
86-
data = await response.json()
87-
pages = data.get("webPages", {}).get("value")
88-
if pages:
89-
return list(map(lambda x: x["snippet"], pages)) or []
90-
return None
91-
return []
81+
try:
82+
async with aiohttp.ClientSession() as session, session.get(_request_url, headers=headers) as response:
83+
response.raise_for_status()
84+
if response.status == 200:
85+
data = await response.json()
86+
pages = data.get("webPages", {}).get("value")
87+
if pages:
88+
return list(map(lambda x: x["snippet"], pages)) or []
89+
return None
90+
return []
91+
except aiohttp.ClientResponseError as ex:
92+
logger.error(f"Failed to get search results: {ex}")
93+
raise ServiceInvalidRequestError("Failed to get search results.") from ex
94+
except aiohttp.ClientError as ex:
95+
logger.error(f"Client error occurred: {ex}")
96+
raise ServiceInvalidRequestError("A client error occurred while getting search results.") from ex
97+
except Exception as ex:
98+
logger.error(f"An unexpected error occurred: {ex}")
99+
raise ServiceInvalidRequestError("An unexpected error occurred while getting search results.") from ex

‎python/semantic_kernel/core_plugins/web_search_engine_plugin.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ def __init__(self, connector: "ConnectorBase") -> None:
2727
"""Initializes a new instance of the WebSearchEnginePlugin class."""
2828
self._connector = connector
2929

30-
@kernel_function(description="Performs a web search for a given query")
30+
@kernel_function(name="search", description="Performs a web search for a given query")
3131
async def search(
3232
self,
3333
query: Annotated[str, "The search query"],

‎python/semantic_kernel/functions/kernel_function_decorator.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@ def kernel_function(
4545
if not supplied, the function docstring will be used, can be None.
4646
4747
"""
48-
4948
def decorator(func: Callable[..., object]) -> Callable[..., object]:
5049
"""The actual decorator function."""
5150
setattr(func, "__kernel_function__", True)
@@ -115,7 +114,7 @@ def _parse_parameter(name: str, param: Any, default: Any) -> dict[str, Any]:
115114
logger.debug(f"Parsing param: {name}")
116115
logger.debug(f"Parsing annotation: {param}")
117116
ret: dict[str, Any] = {"name": name}
118-
if default:
117+
if default is not None:
119118
ret["default_value"] = default
120119
ret["is_required"] = False
121120
else:

‎python/tests/samples/test_concepts.py

+112
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
from pytest import mark
4+
5+
from samples.concepts.auto_function_calling.azure_python_code_interpreter_function_calling import (
6+
main as azure_python_code_interpreter_function_calling,
7+
)
8+
from samples.concepts.auto_function_calling.chat_gpt_api_function_calling import main as chat_gpt_api_function_calling
9+
from samples.concepts.chat_completion.azure_chat_gpt_api import main as azure_chat_gpt_api
10+
from samples.concepts.chat_completion.chat_gpt_api import main as chat_gpt_api
11+
from samples.concepts.chat_completion.chat_streaming import main as chat_streaming
12+
from samples.concepts.chat_completion.openai_logit_bias import main as openai_logit_bias
13+
from samples.concepts.filtering.auto_function_invoke_filters import main as auto_function_invoke_filters
14+
from samples.concepts.filtering.function_invocation_filters import main as function_invocation_filters
15+
from samples.concepts.filtering.function_invocation_filters_stream import main as function_invocation_filters_stream
16+
from samples.concepts.filtering.prompt_filters import main as prompt_filters
17+
from samples.concepts.functions.kernel_arguments import main as kernel_arguments
18+
from samples.concepts.grounding.grounded import main as grounded
19+
from samples.concepts.memory.azure_cognitive_search_memory import main as azure_cognitive_search_memory
20+
from samples.concepts.memory.memory import main as memory
21+
from samples.concepts.planners.azure_openai_function_calling_stepwise_planner import (
22+
main as azure_openai_function_calling_stepwise_planner,
23+
)
24+
from samples.concepts.planners.openai_function_calling_stepwise_planner import (
25+
main as openai_function_calling_stepwise_planner,
26+
)
27+
from samples.concepts.planners.sequential_planner import main as sequential_planner
28+
from samples.concepts.plugins.azure_python_code_interpreter import main as azure_python_code_interpreter
29+
from samples.concepts.plugins.openai_function_calling_with_custom_plugin import (
30+
main as openai_function_calling_with_custom_plugin,
31+
)
32+
from samples.concepts.plugins.openai_plugin_azure_key_vault import main as openai_plugin_azure_key_vault
33+
from samples.concepts.plugins.openai_plugin_klarna import main as openai_plugin_klarna
34+
from samples.concepts.plugins.plugins_from_dir import main as plugins_from_dir
35+
from samples.concepts.prompt_templates.azure_chat_gpt_api_handlebars import main as azure_chat_gpt_api_handlebars
36+
from samples.concepts.prompt_templates.azure_chat_gpt_api_jinja2 import main as azure_chat_gpt_api_jinja2
37+
from samples.concepts.prompt_templates.configuring_prompts import main as configuring_prompts
38+
from samples.concepts.prompt_templates.load_yaml_prompt import main as load_yaml_prompt
39+
from samples.concepts.prompt_templates.template_language import main as template_language
40+
from samples.concepts.rag.rag_with_text_memory_plugin import main as rag_with_text_memory_plugin
41+
from samples.concepts.search.bing_search_plugin import main as bing_search_plugin
42+
43+
44+
@mark.asyncio
45+
@mark.parametrize(
46+
"func,responses",
47+
[
48+
(azure_python_code_interpreter_function_calling, ["print('Hello, World!')", "exit"]),
49+
(chat_gpt_api_function_calling, ["What is 3+3?", "exit"]),
50+
(azure_chat_gpt_api, ["Why is the sky blue?", "exit"]),
51+
(chat_gpt_api, ["What is life?", "exit"]),
52+
(chat_streaming, ["Why is the sun hot?", "exit"]),
53+
(openai_logit_bias, []),
54+
(auto_function_invoke_filters, ["What is 3+3?", "exit"]),
55+
(function_invocation_filters, ["What is 3+3?", "exit"]),
56+
(function_invocation_filters_stream, ["What is 3+3?", "exit"]),
57+
(prompt_filters, ["What is the fastest animal?", "exit"]),
58+
(kernel_arguments, []),
59+
(grounded, []),
60+
(azure_cognitive_search_memory, []),
61+
(memory, ["What are my investments?", "exit"]),
62+
(azure_openai_function_calling_stepwise_planner, []),
63+
(openai_function_calling_stepwise_planner, []),
64+
(sequential_planner, []),
65+
(azure_python_code_interpreter, []),
66+
(openai_function_calling_with_custom_plugin, []),
67+
(openai_plugin_azure_key_vault, ["Create a secret with the name 'Foo' and value 'Bar'", "exit"]),
68+
(openai_plugin_klarna, []),
69+
(plugins_from_dir, []),
70+
(azure_chat_gpt_api_handlebars, ["What is 3+3?", "exit"]),
71+
(azure_chat_gpt_api_jinja2, ["What is 3+3?", "exit"]),
72+
(configuring_prompts, ["What is my name?", "exit"]),
73+
(load_yaml_prompt, []),
74+
(template_language, []),
75+
(rag_with_text_memory_plugin, []),
76+
(bing_search_plugin, []),
77+
],
78+
ids=[
79+
"azure_python_code_interpreter_function_calling",
80+
"chat_gpt_api_function_calling",
81+
"azure_chat_gpt_api",
82+
"chat_gpt_api",
83+
"chat_streaming",
84+
"openai_logit_bias",
85+
"auto_function_invoke_filters",
86+
"function_invocation_filters",
87+
"function_invocation_filters_stream",
88+
"prompt_filters",
89+
"kernel_arguments",
90+
"grounded",
91+
"azure_cognitive_search_memory",
92+
"memory",
93+
"azure_openai_function_calling_stepwise_planner",
94+
"openai_function_calling_stepwise_planner",
95+
"sequential_planner",
96+
"azure_python_code_interpreter",
97+
"openai_function_calling_with_custom_plugin",
98+
"openai_plugin_azure_key_vault",
99+
"openai_plugin_klarna",
100+
"plugins_from_dir",
101+
"azure_chat_gpt_api_handlebars",
102+
"azure_chat_gpt_api_jinja2",
103+
"configuring_prompts",
104+
"load_yaml_prompt",
105+
"template_language",
106+
"rag_with_text_memory_plugin",
107+
"bing_search_plugin",
108+
],
109+
)
110+
async def test_concepts(func, responses, monkeypatch):
111+
monkeypatch.setattr("builtins.input", lambda _: responses.pop(0))
112+
await func()

‎python/tests/unit/connectors/open_ai/services/test_open_ai_chat_completion_base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,7 @@ async def test_process_tool_calls_with_continuation_on_malformed_arguments():
192192
FunctionCallBehavior.AutoInvokeKernelFunctions(),
193193
)
194194

195-
logger_mock.exception.assert_any_call(
195+
logger_mock.info.assert_any_call(
196196
"Received invalid arguments for function test_function: Malformed arguments. Trying tool call again."
197197
)
198198

0 commit comments

Comments
 (0)
Please sign in to comment.