#!/usr/bin/env python # coding: utf-8 # [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/aurelio-labs/semantic-router/blob/main/docs/00-introduction.ipynb) [![Open nbviewer](https://raw.githubusercontent.com/pinecone-io/examples/master/assets/nbviewer-shield.svg)](https://nbviewer.org/github/aurelio-labs/semantic-router/blob/main/docs/00-introduction.ipynb) # # Semantic Router Intro # The Semantic Router library can be used as a super fast decision making layer on top of LLMs. That means rather than waiting on a slow agent to decide what to do, we can use the magic of semantic vector space to make routes. Cutting route making time down from seconds to milliseconds. # ## Getting Started # We start by installing the library: # In[ ]: get_ipython().system('pip install -qU "semantic-router==0.1.0.dev3"') # We start by defining a dictionary mapping routes to example phrases that should trigger those routes. # In[ ]: from semantic_router import Route politics = Route( name="politics", utterances=[ "isn't politics the best thing ever", "why don't you tell me about your political opinions", "don't you just love the president", "don't you just hate the president", "they're going to destroy this country!", "they will save the country!", ], ) # Let's define another for good measure: # In[ ]: chitchat = Route( name="chitchat", utterances=[ "how's the weather today?", "how are things going?", "lovely weather today", "the weather is horrendous", "let's go to the chippy", ], ) routes = [politics, chitchat] # Now we initialize our embedding model: # In[ ]: import os from getpass import getpass from semantic_router.encoders import CohereEncoder, OpenAIEncoder # os.environ["COHERE_API_KEY"] = os.getenv("COHERE_API_KEY") or getpass( # "Enter Cohere API Key: " # ) os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY") or getpass( "Enter OpenAI API Key: " ) # encoder = CohereEncoder() encoder = OpenAIEncoder() # Now we define the `Router`. When called, the router will consume text (a query) and output the category (`Route`) it belongs to — to initialize a `Router` we need our `encoder` model and a list of `routes`. # In[ ]: from semantic_router.routers import SemanticRouter sr = SemanticRouter(encoder=encoder, routes=routes, auto_sync="local") # Now we can test it: # In[ ]: sr("don't you love politics?") # In[ ]: sr("how's the weather today?") # Both are classified accurately, what if we send a query that is unrelated to our existing `Route` objects? # In[ ]: sr("I'm interested in learning about llama 2") # ---