#!/usr/bin/env python # coding: utf-8 # # DataBricks # # 集成DataBricks LLMs API。 # # ## 先决条件 # # - [Databricks个人访问令牌](https://docs.databricks.com/en/dev-tools/auth/pat.html)用于查询和访问Databricks模型服务端点。 # # - 在[支持的区域](https://docs.databricks.com/en/machine-learning/model-serving/model-serving-limits.html#regions)中拥有[Databricks工作区](https://docs.databricks.com/en/workspace/index.html),以便使用Foundation Model APIs按令牌付费。 # # ## 设置 # # 如果您在colab上打开这个笔记本,您可能需要安装LlamaIndex 🦙。 # # In[ ]: get_ipython().run_line_magic('', 'pip install llama-index-llms-databricks') # In[ ]: get_ipython().system('pip install llama-index') # In[ ]: from llama_index.llms.databricks import DataBricks # ```bash # export DATABRICKS_API_KEY=<你的API密钥> # export DATABRICKS_API_BASE=<你的API服务端点> # ``` # # 或者,你可以在初始化时将你的API密钥和服务端点传递给LLM: # # In[ ]: llm = DataBricks( model="databricks-dbrx-instruct", api_key="your_api_key", api_base="https://[your-work-space].cloud.databricks.com/serving-endpoints/[your-serving-endpoint]", ) # 可以在[这里](https://console.groq.com/docs/models)找到可用的LLM模型列表。 # # In[ ]: response = llm.complete("Explain the importance of open source LLMs") # In[ ]: print(response) # #### 使用消息列表调用 `chat` # # In[ ]: from llama_index.core.llms import ChatMessage messages = [ ChatMessage( role="system", content="You are a pirate with a colorful personality" ), ChatMessage(role="user", content="What is your name"), ] resp = llm.chat(messages) # In[ ]: print(resp) # ### Streaming # # 使用 `stream_complete` 终端点 # # In[ ]: response = llm.stream_complete("Explain the importance of open source LLMs") # In[ ]: for r in response: print(r.delta, end="") # 使用 `stream_chat` 端点 # # In[ ]: from llama_index.core.llms import ChatMessage messages = [ ChatMessage( role="system", content="You are a pirate with a colorful personality" ), ChatMessage(role="user", content="What is your name"), ] resp = llm.stream_chat(messages) # In[ ]: for r in resp: print(r.delta, end="")