parent
db48f13e3e
commit
3d7eab04aa
@ -0,0 +1,19 @@
|
|||||||
|
from langchain_core.messages import HumanMessage, SystemMessage
|
||||||
|
from langchain_openai import ChatOpenAI
|
||||||
|
import os
|
||||||
|
|
||||||
|
llm = ChatOpenAI(
|
||||||
|
api_key=os.environ["GITHUB_TOKEN"],
|
||||||
|
base_url="https://models.github.ai/inference",
|
||||||
|
model="openai/gpt-4o-mini",
|
||||||
|
)
|
||||||
|
|
||||||
|
messages = [
|
||||||
|
SystemMessage(content="Translate the following from English into Italian"),
|
||||||
|
HumanMessage(content="hi!"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# works
|
||||||
|
response = llm.invoke(messages)
|
||||||
|
print(response.content)
|
@ -0,0 +1,33 @@
|
|||||||
|
from langchain_core.messages import HumanMessage, SystemMessage
|
||||||
|
from langchain_openai import ChatOpenAI
|
||||||
|
import os
|
||||||
|
from typing_extensions import Annotated, TypedDict
|
||||||
|
|
||||||
|
class add(TypedDict):
|
||||||
|
"""Add two integers."""
|
||||||
|
|
||||||
|
# Annotations must have the type and can optionally include a default value and description (in that order).
|
||||||
|
a: Annotated[int, ..., "First integer"]
|
||||||
|
b: Annotated[int, ..., "Second integer"]
|
||||||
|
|
||||||
|
tools = [add]
|
||||||
|
|
||||||
|
functions = {
|
||||||
|
"add": lambda a, b: a + b
|
||||||
|
}
|
||||||
|
|
||||||
|
llm = ChatOpenAI(
|
||||||
|
api_key=os.environ["GITHUB_TOKEN"],
|
||||||
|
base_url="https://models.github.ai/inference",
|
||||||
|
model="openai/gpt-4o-mini",
|
||||||
|
)
|
||||||
|
|
||||||
|
llm_with_tools = llm.bind_tools(tools)
|
||||||
|
|
||||||
|
query = "What is 3 + 12?"
|
||||||
|
|
||||||
|
res = llm_with_tools.invoke(query)
|
||||||
|
if(res.tool_calls):
|
||||||
|
for tool in res.tool_calls:
|
||||||
|
print("TOOL CALL: ", functions[tool["name"]](**tool["args"]))
|
||||||
|
print("CONTENT: ",res.content)
|
@ -0,0 +1,14 @@
|
|||||||
|
# pip install -qU "langchain[openai]"
|
||||||
|
|
||||||
|
from langchain_openai import ChatOpenAI
|
||||||
|
import os
|
||||||
|
|
||||||
|
llm = ChatOpenAI(
|
||||||
|
api_key=os.environ["GITHUB_TOKEN"],
|
||||||
|
base_url="https://models.github.ai/inference",
|
||||||
|
model="openai/gpt-4o-mini",
|
||||||
|
)
|
||||||
|
|
||||||
|
# works
|
||||||
|
response = llm.invoke("What is 13 raised to the .3432 power?")
|
||||||
|
print(response.content)
|
Loading…
Reference in new issue