Dockerized Langchain / PY example (#175)
parent
e59bad89e7
commit
07c3aa1869
@ -0,0 +1,5 @@ |
||||
FROM python:3.10-bullseye |
||||
COPY ./langchainpy-localai-example /app |
||||
WORKDIR /app |
||||
RUN pip install --no-cache-dir -r requirements.txt |
||||
ENTRYPOINT [ "python", "./simple_demo.py" ]; |
@ -0,0 +1,24 @@ |
||||
{ |
||||
"version": "0.2.0", |
||||
"configurations": [ |
||||
{ |
||||
"name": "Python: Current File", |
||||
"type": "python", |
||||
"request": "launch", |
||||
"program": "${file}", |
||||
"console": "integratedTerminal", |
||||
"redirectOutput": true, |
||||
"justMyCode": false |
||||
}, |
||||
{ |
||||
"name": "Python: Attach to Port 5678", |
||||
"type": "python", |
||||
"request": "attach", |
||||
"connect": { |
||||
"host": "localhost", |
||||
"port": 5678 |
||||
}, |
||||
"justMyCode": false |
||||
} |
||||
] |
||||
} |
@ -0,0 +1,3 @@ |
||||
{ |
||||
"python.defaultInterpreterPath": "${workspaceFolder}/.venv/Scripts/python" |
||||
} |
@ -0,0 +1,39 @@ |
||||
import os |
||||
from langchain.chat_models import ChatOpenAI |
||||
from langchain import PromptTemplate, LLMChain |
||||
from langchain.prompts.chat import ( |
||||
ChatPromptTemplate, |
||||
SystemMessagePromptTemplate, |
||||
AIMessagePromptTemplate, |
||||
HumanMessagePromptTemplate, |
||||
) |
||||
from langchain.schema import ( |
||||
AIMessage, |
||||
HumanMessage, |
||||
SystemMessage |
||||
) |
||||
|
||||
print('Langchain + LocalAI PYTHON Tests') |
||||
|
||||
base_path = os.environ.get('OPENAI_API_BASE', 'http://api:8080/v1') |
||||
key = os.environ.get('OPENAI_API_KEY', '-') |
||||
model_name = os.environ.get('MODEL_NAME', 'gpt-3.5-turbo') |
||||
|
||||
|
||||
chat = ChatOpenAI(temperature=0, openai_api_base=base_path, openai_api_key=key, model_name=model_name, max_tokens=100) |
||||
|
||||
print("Created ChatOpenAI for ", chat.model_name) |
||||
|
||||
template = "You are a helpful assistant that translates {input_language} to {output_language}." |
||||
system_message_prompt = SystemMessagePromptTemplate.from_template(template) |
||||
human_template = "{text}" |
||||
human_message_prompt = HumanMessagePromptTemplate.from_template(human_template) |
||||
|
||||
chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, human_message_prompt]) |
||||
|
||||
print("ABOUT to execute") |
||||
|
||||
# get a chat completion from the formatted messages |
||||
chat(chat_prompt.format_prompt(input_language="English", output_language="French", text="I love programming.").to_messages()) |
||||
|
||||
print("."); |
@ -0,0 +1,32 @@ |
||||
aiohttp==3.8.4 |
||||
aiosignal==1.3.1 |
||||
async-timeout==4.0.2 |
||||
attrs==23.1.0 |
||||
certifi==2022.12.7 |
||||
charset-normalizer==3.1.0 |
||||
colorama==0.4.6 |
||||
dataclasses-json==0.5.7 |
||||
debugpy==1.6.7 |
||||
frozenlist==1.3.3 |
||||
greenlet==2.0.2 |
||||
idna==3.4 |
||||
langchain==0.0.157 |
||||
marshmallow==3.19.0 |
||||
marshmallow-enum==1.5.1 |
||||
multidict==6.0.4 |
||||
mypy-extensions==1.0.0 |
||||
numexpr==2.8.4 |
||||
numpy==1.24.3 |
||||
openai==0.27.6 |
||||
openapi-schema-pydantic==1.2.4 |
||||
packaging==23.1 |
||||
pydantic==1.10.7 |
||||
PyYAML==6.0 |
||||
requests==2.29.0 |
||||
SQLAlchemy==2.0.12 |
||||
tenacity==8.2.2 |
||||
tqdm==4.65.0 |
||||
typing-inspect==0.8.0 |
||||
typing_extensions==4.5.0 |
||||
urllib3==1.26.15 |
||||
yarl==1.9.2 |
@ -0,0 +1,6 @@ |
||||
|
||||
from langchain.llms import OpenAI |
||||
|
||||
llm = OpenAI(temperature=0.9,model_name="gpt-3.5-turbo") |
||||
text = "What would be a good company name for a company that makes colorful socks?" |
||||
print(llm(text)) |
Loading…
Reference in new issue