#!/usr/bin/env python # coding: utf-8 # # Basic Loading of the Kernel # # To run the notebooks we recommend using Poetry and starting a shell with a virtual environment # prepared to use SK. # # See [DEV_SETUP.md](../../python/DEV_SETUP.md) for more information. # # In[ ]: get_ipython().system('python -m pip install semantic-kernel==1.0.3') # In[ ]: from semantic_kernel import Kernel, kernel kernel = Kernel() # ### Configuring API Keys and Endpoints # # #### Option 1: using OpenAI # # Add your [OpenAI Key](https://openai.com/product/) key to either your environment variables or to the `.env` file in the same folder (org Id only if you have multiple orgs): # # ``` # OPENAI_API_KEY="sk-..." # OPENAI_ORG_ID="" # ``` # The environment variables names should match the names used in the `.env` file, as shown above. # # If using the `.env` file, please configure the `env_file_path` parameter with a valid path when creating the ChatCompletion class: # # ``` # chat_completion = OpenAIChatCompletion(service_id="test", env_file_path=) # ``` # # Use "keyword arguments" to instantiate an OpenAI Chat Completion service and add it to the kernel: # # #### Option 2: using Azure OpenAI # # Add your [Azure Open AI Service key](https://learn.microsoft.com/azure/cognitive-services/openai/quickstart?pivots=programming-language-studio) settings to either your system's environment variables or to the `.env` file in the same folder: # # ``` # AZURE_OPENAI_API_KEY="..." # AZURE_OPENAI_ENDPOINT="https://..." # AZURE_OPENAI_CHAT_DEPLOYMENT_NAME="..." # AZURE_OPENAI_TEXT_DEPLOYMENT_NAME="..." # AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME="..." # ``` # The environment variables names should match the names used in the `.env` file, as shown above. # # If using the `.env` file, please configure the `env_file_path` parameter with a valid path when creating the ChatCompletion class: # # ``` # chat_completion = AzureChatCompletion(service_id="test", env_file_path=) # ``` # # When using the kernel for AI requests, the kernel needs some settings like URL and credentials to the AI models. # # The SDK currently supports OpenAI and Azure OpenAI, among other connectors. # # If you need an Azure OpenAI key, go [here](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart?pivots=rest-api). # # In[ ]: from services import Service # Select a service to use for this notebook (available services: OpenAI, AzureOpenAI, HuggingFace) selectedService = Service.OpenAI # In[ ]: service_id = None if selectedService == Service.OpenAI: from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion service_id = "oai_chat_gpt" kernel.add_service( OpenAIChatCompletion(service_id=service_id, ai_model_id="gpt-3.5-turbo-1106"), ) elif selectedService == Service.AzureOpenAI: from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion service_id = "aoai_chat_completion" kernel.add_service( AzureChatCompletion(service_id=service_id), ) # Great, now that you're familiar with setting up the Semantic Kernel, let's see [how we can use it to run prompts](02-running-prompts-from-file.ipynb). #