-
Notifications
You must be signed in to change notification settings - Fork 213
/
Copy pathllm_integrations.py
83 lines (62 loc) · 2.44 KB
/
llm_integrations.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import os
from langchain_aws import ChatBedrock
from langchain_cohere import ChatCohere
from langchain_google_vertexai import ChatVertexAI
from langchain_mistralai import ChatMistralAI
from langchain_openai import AzureChatOpenAI, ChatOpenAI
LLM_TYPE = os.getenv("LLM_TYPE", "openai")
def init_openai_chat(temperature):
# Include streaming usage as this allows recording of LLM metrics
return ChatOpenAI(
model=os.getenv("CHAT_MODEL"),
streaming=True,
temperature=temperature,
model_kwargs={"stream_options": {"include_usage": True}},
)
def init_vertex_chat(temperature):
# opentelemetry-instrumentation-vertexai is included by EDOT, but does not
# yet not support streaming. Use the Langtrace Python SDK instead.
from langtrace_python_sdk.instrumentation import VertexAIInstrumentation
VertexAIInstrumentation().instrument()
return ChatVertexAI(
model_name=os.getenv("CHAT_MODEL"), streaming=True, temperature=temperature
)
def init_azure_chat(temperature):
# Include streaming usage as this allows recording of LLM metrics
return AzureChatOpenAI(
model=os.getenv("CHAT_DEPLOYMENT"),
streaming=True,
temperature=temperature,
model_kwargs={"stream_options": {"include_usage": True}},
)
def init_bedrock(temperature):
return ChatBedrock(
model_id=os.getenv("CHAT_MODEL"),
streaming=True,
model_kwargs={"temperature": temperature},
)
def init_mistral_chat(temperature):
return ChatMistralAI(
model=os.getenv("CHAT_MODEL"), streaming=True, temperature=temperature
)
def init_cohere_chat(temperature):
# Cohere is not yet in EDOT. Use the Langtrace Python SDK instead
from langtrace_python_sdk.instrumentation import CohereInstrumentation
CohereInstrumentation().instrument()
return ChatCohere(model=os.getenv("CHAT_MODEL"), temperature=temperature)
MAP_LLM_TYPE_TO_CHAT_MODEL = {
"azure": init_azure_chat,
"bedrock": init_bedrock,
"openai": init_openai_chat,
"vertex": init_vertex_chat,
"mistral": init_mistral_chat,
"cohere": init_cohere_chat,
}
def get_llm(temperature=0):
if LLM_TYPE not in MAP_LLM_TYPE_TO_CHAT_MODEL:
raise Exception(
"LLM type not found. Please set LLM_TYPE to one of: "
+ ", ".join(MAP_LLM_TYPE_TO_CHAT_MODEL.keys())
+ "."
)
return MAP_LLM_TYPE_TO_CHAT_MODEL[LLM_TYPE](temperature=temperature)