|
| 1 | +# Copyright (c) Microsoft. All rights reserved. |
| 2 | + |
| 3 | +import asyncio |
| 4 | +from dataclasses import dataclass |
| 5 | +from typing import Annotated |
| 6 | + |
| 7 | +from samples.concepts.setup.chat_completion_services import Services, get_chat_completion_service_and_request_settings |
| 8 | +from semantic_kernel import Kernel |
| 9 | +from semantic_kernel.connectors.ai.function_choice_behavior import FunctionChoiceBehavior |
| 10 | +from semantic_kernel.connectors.memory.azure_cosmos_db.azure_cosmos_db_no_sql_store import AzureCosmosDBNoSQLStore |
| 11 | +from semantic_kernel.contents import ChatHistory |
| 12 | +from semantic_kernel.contents.chat_message_content import ChatMessageContent |
| 13 | +from semantic_kernel.core_plugins.math_plugin import MathPlugin |
| 14 | +from semantic_kernel.core_plugins.time_plugin import TimePlugin |
| 15 | +from semantic_kernel.data.record_definition.vector_store_model_decorator import vectorstoremodel |
| 16 | +from semantic_kernel.data.record_definition.vector_store_record_fields import ( |
| 17 | + VectorStoreRecordDataField, |
| 18 | + VectorStoreRecordKeyField, |
| 19 | +) |
| 20 | +from semantic_kernel.data.vector_storage.vector_store import VectorStore |
| 21 | +from semantic_kernel.data.vector_storage.vector_store_record_collection import VectorStoreRecordCollection |
| 22 | + |
| 23 | +""" |
| 24 | +This sample demonstrates how to build a conversational chatbot |
| 25 | +using Semantic Kernel, it features auto function calling, |
| 26 | +but with Azure CosmosDB as storage for the chat history. |
| 27 | +This sample stores and reads the chat history at every turn. |
| 28 | +This is not the best way to do it, but clearly demonstrates the mechanics. |
| 29 | +
|
| 30 | +Further refinement would be to only write once when a conversation is done. |
| 31 | +And there is also no logic to see if there is something to write. |
| 32 | +You could also enhance the ChatHistoryModel with a summary and a vector for that |
| 33 | +in order to search for similar conversations. |
| 34 | +""" |
| 35 | + |
| 36 | + |
| 37 | +# 1. We first create simple datamodel for the chat history. |
| 38 | +# Note that this model does not contain any vectors, |
| 39 | +# those can be added, for instance to store a summary of the conversation. |
| 40 | +@vectorstoremodel |
| 41 | +@dataclass |
| 42 | +class ChatHistoryModel: |
| 43 | + session_id: Annotated[str, VectorStoreRecordKeyField] |
| 44 | + user_id: Annotated[str, VectorStoreRecordDataField(is_filterable=True)] |
| 45 | + messages: Annotated[list[dict[str, str]], VectorStoreRecordDataField(is_filterable=True)] |
| 46 | + |
| 47 | + |
| 48 | +# 2. We then create a class that extends the ChatHistory class |
| 49 | +# and implements the methods to store and read the chat history. |
| 50 | +# This could also use one of the history reducers to make |
| 51 | +# sure the database doesn't grow too large. |
| 52 | +# It adds a `store` attribute and a couple of methods. |
| 53 | +class ChatHistoryInCosmosDB(ChatHistory): |
| 54 | + """This class extends the ChatHistory class to store the chat history in a Cosmos DB.""" |
| 55 | + |
| 56 | + session_id: str |
| 57 | + user_id: str |
| 58 | + store: VectorStore |
| 59 | + collection: VectorStoreRecordCollection[str, ChatHistoryModel] | None = None |
| 60 | + |
| 61 | + async def create_collection(self, collection_name: str) -> None: |
| 62 | + """Create a collection with the inbuild data model using the vector store. |
| 63 | +
|
| 64 | + First create the collection, then call this method to create the collection itself. |
| 65 | + """ |
| 66 | + self.collection = self.store.get_collection( |
| 67 | + collection_name=collection_name, |
| 68 | + data_model_type=ChatHistoryModel, |
| 69 | + ) |
| 70 | + await self.collection.create_collection_if_not_exists() |
| 71 | + |
| 72 | + async def store_messages(self) -> None: |
| 73 | + """Store the chat history in the Cosmos DB. |
| 74 | +
|
| 75 | + Note that we use model_dump to convert the chat message content into a serializable format. |
| 76 | + """ |
| 77 | + if self.collection: |
| 78 | + await self.collection.upsert( |
| 79 | + ChatHistoryModel( |
| 80 | + session_id=self.session_id, |
| 81 | + user_id=self.user_id, |
| 82 | + messages=[msg.model_dump() for msg in self.messages], |
| 83 | + ) |
| 84 | + ) |
| 85 | + |
| 86 | + async def read_messages(self) -> None: |
| 87 | + """Read the chat history from the Cosmos DB. |
| 88 | +
|
| 89 | + Note that we use the model_validate method to convert the serializable format back into a ChatMessageContent. |
| 90 | + """ |
| 91 | + if self.collection: |
| 92 | + record = await self.collection.get(self.session_id) |
| 93 | + if record: |
| 94 | + for message in record.messages: |
| 95 | + self.messages.append(ChatMessageContent.model_validate(message)) |
| 96 | + |
| 97 | + |
| 98 | +# 3. We now create a fairly standard kernel, with functions and a chat service. |
| 99 | +# Create and configure the kernel. |
| 100 | +kernel = Kernel() |
| 101 | + |
| 102 | +# Load some sample plugins (for demonstration of function calling). |
| 103 | +kernel.add_plugin(MathPlugin(), plugin_name="math") |
| 104 | +kernel.add_plugin(TimePlugin(), plugin_name="time") |
| 105 | + |
| 106 | +# You can select from the following chat completion services that support function calling: |
| 107 | +# - Services.OPENAI |
| 108 | +# - Services.AZURE_OPENAI |
| 109 | +# - Services.AZURE_AI_INFERENCE |
| 110 | +# - Services.ANTHROPIC |
| 111 | +# - Services.BEDROCK |
| 112 | +# - Services.GOOGLE_AI |
| 113 | +# - Services.MISTRAL_AI |
| 114 | +# - Services.OLLAMA |
| 115 | +# - Services.ONNX |
| 116 | +# - Services.VERTEX_AI |
| 117 | +# - Services.DEEPSEEK |
| 118 | +# Please make sure you have configured your environment correctly for the selected chat completion service. |
| 119 | +chat_completion_service, request_settings = get_chat_completion_service_and_request_settings(Services.AZURE_OPENAI) |
| 120 | + |
| 121 | +# Configure the function choice behavior. Here, we set it to Auto, where auto_invoke=True by default. |
| 122 | +# With `auto_invoke=True`, the model will automatically choose and call functions as needed. |
| 123 | +request_settings.function_choice_behavior = FunctionChoiceBehavior.Auto(filters={"excluded_plugins": ["ChatBot"]}) |
| 124 | + |
| 125 | +kernel.add_service(chat_completion_service) |
| 126 | + |
| 127 | + |
| 128 | +# 4. The main chat loop, which takes a history object and prompts the user for input. |
| 129 | +# It then adds the user input to the history and gets a response from the chat completion service. |
| 130 | +# Finally, it prints the response and saves the chat history to the Cosmos DB. |
| 131 | +async def chat(history: ChatHistoryInCosmosDB) -> bool: |
| 132 | + """ |
| 133 | + Continuously prompt the user for input and show the assistant's response. |
| 134 | + Type 'exit' to exit. |
| 135 | + """ |
| 136 | + await history.read_messages() |
| 137 | + print(f"Chat history successfully loaded {len(history.messages)} messages.") |
| 138 | + if len(history.messages) == 0: |
| 139 | + # if it is a new conversation, add the system message and a couple of initial messages. |
| 140 | + history.add_system_message( |
| 141 | + "You are a chat bot. Your name is Mosscap and you have one goal: figure out what people need." |
| 142 | + ) |
| 143 | + history.add_user_message("Hi there, who are you?") |
| 144 | + history.add_assistant_message("I am Mosscap, a chat bot. I'm trying to figure out what people need.") |
| 145 | + |
| 146 | + try: |
| 147 | + user_input = input("User:> ") |
| 148 | + except (KeyboardInterrupt, EOFError): |
| 149 | + print("\n\nExiting chat...") |
| 150 | + return False |
| 151 | + |
| 152 | + if user_input.lower().strip() == "exit": |
| 153 | + print("\n\nExiting chat...") |
| 154 | + return False |
| 155 | + |
| 156 | + # add the user input to the chat history |
| 157 | + history.add_user_message(user_input) |
| 158 | + |
| 159 | + result = await chat_completion_service.get_chat_message_content(history, request_settings, kernel=kernel) |
| 160 | + |
| 161 | + if result: |
| 162 | + print(f"Mosscap:> {result}") |
| 163 | + history.add_message(result) |
| 164 | + |
| 165 | + # Save the chat history to CosmosDB. |
| 166 | + print(f"Saving {len(history.messages)} messages to AzureCosmosDB.") |
| 167 | + await history.store_messages() |
| 168 | + return True |
| 169 | + |
| 170 | + |
| 171 | +async def main() -> None: |
| 172 | + delete_when_done = True |
| 173 | + session_id = "session1" |
| 174 | + chatting = True |
| 175 | + # 5. We now create the store, ChatHistory and collection and start the chat loop. |
| 176 | + |
| 177 | + # First we enter the store context manager to connect. |
| 178 | + # The create_database flag will create the database if it does not exist. |
| 179 | + async with AzureCosmosDBNoSQLStore(create_database=True) as store: |
| 180 | + # Then we create the chat history in CosmosDB. |
| 181 | + history = ChatHistoryInCosmosDB(store=store, session_id=session_id, user_id="user") |
| 182 | + # Finally we create the collection. |
| 183 | + await history.create_collection(collection_name="chat_history") |
| 184 | + print( |
| 185 | + "Welcome to the chat bot!\n" |
| 186 | + " Type 'exit' to exit.\n" |
| 187 | + " Try a math question to see function calling in action (e.g. 'what is 3+3?')." |
| 188 | + ) |
| 189 | + try: |
| 190 | + while chatting: |
| 191 | + chatting = await chat(history) |
| 192 | + except Exception: |
| 193 | + print("Closing chat...") |
| 194 | + if delete_when_done and history.collection: |
| 195 | + await history.collection.delete_collection() |
| 196 | + |
| 197 | + |
| 198 | +if __name__ == "__main__": |
| 199 | + asyncio.run(main()) |
0 commit comments