Skip to content

Commit fb8edbe

Browse files
authoredJun 3, 2024
Python: updates to cicd and notebooks (microsoft#6485)
### Motivation and Context <!-- Thank you for your contribution to the semantic-kernel repo! Please help reviewers and future users, providing the following information: 1. Why is this change required? 2. What problem does it solve? 3. What scenario does it contribute to? 4. If it fixes an open issue, please link to the issue here. --> Small update to test coverage workflow, to retrigger based on workflow run. Small updates to samples notebooks, to use GLOBAL_LLM_SERVICE allowing control of where they run through env. ### Description <!-- Describe your changes, the overall approach, the underlying design. These notes will help understanding how your code works. Thanks! --> ### Contribution Checklist <!-- Before submitting this PR, please make sure: --> - [x] The code builds clean without any errors or warnings - [x] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [x] All unit tests pass, and I have added new tests where possible - [x] I didn't break anyone 😄
1 parent b0dd67a commit fb8edbe

23 files changed

+166
-73
lines changed
 

‎.github/workflows/python-lint.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name: Python Code Quality Checks
22
on:
33
workflow_dispatch:
4-
pull_request:
4+
pull_request_target:
55
branches: [ "main", "feature*" ]
66
paths:
77
- 'python/**'

‎.github/workflows/python-samples-tests.yml

+2
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ jobs:
1818
matrix:
1919
python-version: ["3.10", "3.11", "3.12"]
2020
os: [ubuntu-latest, windows-latest, macos-latest]
21+
service: ['AzureOpenAI']
2122
steps:
2223
- uses: actions/checkout@v4
2324
- name: Install poetry
@@ -31,6 +32,7 @@ jobs:
3132
id: run_tests
3233
shell: bash
3334
env: # Set Azure credentials secret as an input
35+
GLOBAL_LLM_SERVICE: ${{ matrix.service }}
3436
AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME }}
3537
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_CHAT_DEPLOYMENT_NAME }}
3638
AZURE_OPENAI_TEXT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_TEXT_DEPLOYMENT_NAME }}

‎.github/workflows/python-test-coverage.yml

+4
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,10 @@ on:
55
branches: ["main", "feature*"]
66
paths:
77
- "python/**"
8+
workflow_run:
9+
workflows: ["Python Unit Tests"]
10+
types:
11+
- in_progress
812

913
jobs:
1014
python-tests-coverage:

‎python/samples/getting_started/00-getting-started.ipynb

+11-2
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,17 @@
4545
"source": [
4646
"from services import Service\n",
4747
"\n",
48+
"from samples.service_settings import ServiceSettings\n",
49+
"\n",
50+
"service_settings = ServiceSettings()\n",
51+
"\n",
4852
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
49-
"selectedService = Service.AzureOpenAI"
53+
"selectedService = (\n",
54+
" Service.AzureOpenAI\n",
55+
" if service_settings.global_llm_service is None\n",
56+
" else Service(service_settings.global_llm_service.lower())\n",
57+
")\n",
58+
"print(f\"Using service type: {selectedService}\")"
5059
]
5160
},
5261
{
@@ -170,7 +179,7 @@
170179
"name": "python",
171180
"nbconvert_exporter": "python",
172181
"pygments_lexer": "ipython3",
173-
"version": "3.11.9"
182+
"version": "3.10.14"
174183
}
175184
},
176185
"nbformat": 4,

‎python/samples/getting_started/01-basic-loading-the-kernel.ipynb

+14-5
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
},
3131
{
3232
"cell_type": "code",
33-
"execution_count": null,
33+
"execution_count": 1,
3434
"metadata": {},
3535
"outputs": [],
3636
"source": [
@@ -97,19 +97,28 @@
9797
},
9898
{
9999
"cell_type": "code",
100-
"execution_count": null,
100+
"execution_count": 3,
101101
"metadata": {},
102102
"outputs": [],
103103
"source": [
104104
"from services import Service\n",
105105
"\n",
106+
"from samples.service_settings import ServiceSettings\n",
107+
"\n",
108+
"service_settings = ServiceSettings()\n",
109+
"\n",
106110
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
107-
"selectedService = Service.OpenAI"
111+
"selectedService = (\n",
112+
" Service.AzureOpenAI\n",
113+
" if service_settings.global_llm_service is None\n",
114+
" else Service(service_settings.global_llm_service.lower())\n",
115+
")\n",
116+
"print(f\"Using service type: {selectedService}\")"
108117
]
109118
},
110119
{
111120
"cell_type": "code",
112-
"execution_count": null,
121+
"execution_count": 4,
113122
"metadata": {},
114123
"outputs": [],
115124
"source": [
@@ -155,7 +164,7 @@
155164
"name": "python",
156165
"nbconvert_exporter": "python",
157166
"pygments_lexer": "ipython3",
158-
"version": "3.11.9"
167+
"version": "3.10.14"
159168
},
160169
"polyglot_notebook": {
161170
"kernelInfo": {

‎python/samples/getting_started/02-running-prompts-from-file.ipynb

+10-1
Original file line numberDiff line numberDiff line change
@@ -117,8 +117,17 @@
117117
"source": [
118118
"from services import Service\n",
119119
"\n",
120+
"from samples.service_settings import ServiceSettings\n",
121+
"\n",
122+
"service_settings = ServiceSettings()\n",
123+
"\n",
120124
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
121-
"selectedService = Service.OpenAI"
125+
"selectedService = (\n",
126+
" Service.AzureOpenAI\n",
127+
" if service_settings.global_llm_service is None\n",
128+
" else Service(service_settings.global_llm_service.lower())\n",
129+
")\n",
130+
"print(f\"Using service type: {selectedService}\")"
122131
]
123132
},
124133
{

‎python/samples/getting_started/03-prompt-function-inline.ipynb

+10-1
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,17 @@
6060
"source": [
6161
"from services import Service\n",
6262
"\n",
63+
"from samples.service_settings import ServiceSettings\n",
64+
"\n",
65+
"service_settings = ServiceSettings()\n",
66+
"\n",
6367
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
64-
"selectedService = Service.OpenAI"
68+
"selectedService = (\n",
69+
" Service.AzureOpenAI\n",
70+
" if service_settings.global_llm_service is None\n",
71+
" else Service(service_settings.global_llm_service.lower())\n",
72+
")\n",
73+
"print(f\"Using service type: {selectedService}\")"
6574
]
6675
},
6776
{

‎python/samples/getting_started/04-kernel-arguments-chat.ipynb

+10-1
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,17 @@
3838
"source": [
3939
"from services import Service\n",
4040
"\n",
41+
"from samples.service_settings import ServiceSettings\n",
42+
"\n",
43+
"service_settings = ServiceSettings()\n",
44+
"\n",
4145
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
42-
"selectedService = Service.OpenAI"
46+
"selectedService = (\n",
47+
" Service.AzureOpenAI\n",
48+
" if service_settings.global_llm_service is None\n",
49+
" else Service(service_settings.global_llm_service.lower())\n",
50+
")\n",
51+
"print(f\"Using service type: {selectedService}\")"
4352
]
4453
},
4554
{

‎python/samples/getting_started/05-using-the-planner.ipynb

+10-1
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,17 @@
3535
"source": [
3636
"from services import Service\n",
3737
"\n",
38+
"from samples.service_settings import ServiceSettings\n",
39+
"\n",
40+
"service_settings = ServiceSettings()\n",
41+
"\n",
3842
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
39-
"selectedService = Service.OpenAI"
43+
"selectedService = (\n",
44+
" Service.AzureOpenAI\n",
45+
" if service_settings.global_llm_service is None\n",
46+
" else Service(service_settings.global_llm_service.lower())\n",
47+
")\n",
48+
"print(f\"Using service type: {selectedService}\")"
4049
]
4150
},
4251
{

‎python/samples/getting_started/06-memory-and-embeddings.ipynb

+11-2
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,17 @@
4242
"source": [
4343
"from services import Service\n",
4444
"\n",
45+
"from samples.service_settings import ServiceSettings\n",
46+
"\n",
47+
"service_settings = ServiceSettings()\n",
48+
"\n",
4549
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
46-
"selectedService = Service.OpenAI"
50+
"selectedService = (\n",
51+
" Service.AzureOpenAI\n",
52+
" if service_settings.global_llm_service is None\n",
53+
" else Service(service_settings.global_llm_service.lower())\n",
54+
")\n",
55+
"print(f\"Using service type: {selectedService}\")"
4756
]
4857
},
4958
{
@@ -82,7 +91,7 @@
8291
"if selectedService == Service.AzureOpenAI:\n",
8392
" azure_chat_service = AzureChatCompletion(service_id=chat_service_id)\n",
8493
" # next line assumes embeddings deployment name is \"text-embedding\", adjust the deployment name to the value of your chat model if needed\n",
85-
" embedding_gen = AzureTextEmbedding(deployment_name=\"text-embedding\")\n",
94+
" embedding_gen = AzureTextEmbedding(service_id=\"embedding\")\n",
8695
" kernel.add_service(azure_chat_service)\n",
8796
" kernel.add_service(embedding_gen)\n",
8897
"elif selectedService == Service.OpenAI:\n",

‎python/samples/getting_started/07-hugging-face-for-plugins.ipynb

+2-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,8 @@
3333
"from services import Service\n",
3434
"\n",
3535
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
36-
"selectedService = Service.HuggingFace"
36+
"selectedService = Service.HuggingFace\n",
37+
"print(f\"Using service type: {selectedService}\")"
3738
]
3839
},
3940
{

‎python/samples/getting_started/08-native-function-inline.ipynb

+10-2
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,17 @@
5858
"source": [
5959
"from services import Service\n",
6060
"\n",
61+
"from samples.service_settings import ServiceSettings\n",
62+
"\n",
63+
"service_settings = ServiceSettings()\n",
64+
"\n",
6165
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
62-
"selectedService = Service.OpenAI"
66+
"selectedService = (\n",
67+
" Service.AzureOpenAI\n",
68+
" if service_settings.global_llm_service is None\n",
69+
" else Service(service_settings.global_llm_service.lower())\n",
70+
")\n",
71+
"print(f\"Using service type: {selectedService}\")"
6372
]
6473
},
6574
{
@@ -491,7 +500,6 @@
491500
"metadata": {},
492501
"outputs": [],
493502
"source": [
494-
"\n",
495503
"from semantic_kernel.functions import kernel_function\n",
496504
"\n",
497505
"\n",

‎python/samples/getting_started/09-groundedness-checking.ipynb

+13-3
Original file line numberDiff line numberDiff line change
@@ -92,15 +92,25 @@
9292
"metadata": {},
9393
"outputs": [],
9494
"source": [
95+
"from services import Service\n",
96+
"\n",
97+
"from samples.service_settings import ServiceSettings\n",
9598
"from semantic_kernel import Kernel\n",
9699
"from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion, OpenAIChatCompletion\n",
97100
"\n",
98-
"kernel = Kernel()\n",
101+
"service_settings = ServiceSettings()\n",
99102
"\n",
100-
"useAzureOpenAI = False\n",
103+
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
104+
"selectedService = (\n",
105+
" Service.AzureOpenAI\n",
106+
" if service_settings.global_llm_service is None\n",
107+
" else Service(service_settings.global_llm_service.lower())\n",
108+
")\n",
109+
"print(f\"Using service type: {selectedService}\")\n",
101110
"\n",
111+
"kernel = Kernel()\n",
102112
"# Configure AI service used by the kernel\n",
103-
"if useAzureOpenAI:\n",
113+
"if selectedService == Service.AzureOpenAI:\n",
104114
" service_id = \"default\"\n",
105115
" azure_chat_service = AzureChatCompletion(\n",
106116
" service_id=service_id\n",

‎python/samples/getting_started/10-multiple-results-per-prompt.ipynb

+26-21
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,17 @@
3737
"source": [
3838
"from services import Service\n",
3939
"\n",
40+
"from samples.service_settings import ServiceSettings\n",
41+
"\n",
42+
"service_settings = ServiceSettings()\n",
43+
"\n",
4044
"# Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace)\n",
41-
"selectedService = Service.OpenAI"
45+
"selectedService = (\n",
46+
" Service.AzureOpenAI\n",
47+
" if service_settings.global_llm_service is None\n",
48+
" else Service(service_settings.global_llm_service.lower())\n",
49+
")\n",
50+
"print(f\"Using service type: {selectedService}\")"
4251
]
4352
},
4453
{
@@ -47,22 +56,7 @@
4756
"id": "508ad44f",
4857
"metadata": {},
4958
"outputs": [],
50-
"source": [
51-
"from semantic_kernel.contents import ChatHistory\n",
52-
"\n",
53-
"if selectedService == Service.OpenAI or selectedService == Service.AzureOpenAI:\n",
54-
" from semantic_kernel.connectors.ai.open_ai import (\n",
55-
" AzureChatCompletion,\n",
56-
" AzureChatPromptExecutionSettings,\n",
57-
" AzureTextCompletion,\n",
58-
" OpenAIChatCompletion,\n",
59-
" OpenAIChatPromptExecutionSettings,\n",
60-
" OpenAITextCompletion,\n",
61-
" OpenAITextPromptExecutionSettings,\n",
62-
" )\n",
63-
"if selectedService == Service.HuggingFace:\n",
64-
" from semantic_kernel.connectors.ai.hugging_face import HuggingFaceTextCompletion"
65-
]
59+
"source": []
6660
},
6761
{
6862
"attachments": {},
@@ -81,6 +75,19 @@
8175
"outputs": [],
8276
"source": [
8377
"from semantic_kernel import Kernel\n",
78+
"from semantic_kernel.connectors.ai.hugging_face import ( # noqa: F401\n",
79+
" HuggingFacePromptExecutionSettings,\n",
80+
" HuggingFaceTextCompletion,\n",
81+
")\n",
82+
"from semantic_kernel.connectors.ai.open_ai import AzureChatPromptExecutionSettings # noqa: F401\n",
83+
"from semantic_kernel.connectors.ai.open_ai import OpenAIChatPromptExecutionSettings # noqa: F401\n",
84+
"from semantic_kernel.connectors.ai.open_ai import OpenAITextPromptExecutionSettings # noqa: F401\n",
85+
"from semantic_kernel.connectors.ai.open_ai import (\n",
86+
" AzureChatCompletion,\n",
87+
" AzureTextCompletion,\n",
88+
" OpenAIChatCompletion,\n",
89+
" OpenAITextCompletion,\n",
90+
")\n",
8491
"\n",
8592
"kernel = Kernel()\n",
8693
"\n",
@@ -201,10 +208,6 @@
201208
"outputs": [],
202209
"source": [
203210
"if selectedService == Service.HuggingFace:\n",
204-
" from semantic_kernel.connectors.ai.hugging_face.hf_prompt_execution_settings import (\n",
205-
" HuggingFacePromptExecutionSettings,\n",
206-
" )\n",
207-
"\n",
208211
" hf_prompt_execution_settings = HuggingFacePromptExecutionSettings(\n",
209212
" service_id=\"hf_text\", extension_data={\"max_new_tokens\": 80, \"temperature\": 0.7, \"top_p\": 1}\n",
210213
" )"
@@ -269,6 +272,8 @@
269272
"metadata": {},
270273
"outputs": [],
271274
"source": [
275+
"from semantic_kernel.contents import ChatHistory\n",
276+
"\n",
272277
"if selectedService == Service.OpenAI:\n",
273278
" chat = ChatHistory()\n",
274279
" chat.add_user_message(\n",

0 commit comments

Comments
 (0)
Please sign in to comment.