# 导入模型 ChatOllama
from langchain_ollama import ChatOllama
# 模型设定为qwen2.5:14b,温度为0.1,可以有一定的创造性,0为最保守,对象为llm_model
llm_model = ChatOllama(model="qwen2.5:14b",temperature = 0.1)
# 设定提示的模板 string_template
string_template = """
你是一个经验非常有丰富的翻译家,你可以翻译用户的任何语言。
请根据:{style} ,把用户的话:{language} 翻译出来。
"""
# 导入 ChatPromptTemplate,HumanMesssagePromptTemplate,SystemMessagePromptTemplate3个类
from langchain.prompts import (
ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
)
# 通过 from_template 方法,创建一个聊天提示模板对象 prompt_template
prompt_template = ChatPromptTemplate.from_template(string_template)
# 创建与 string_template 里面占位符 {style} ,{language} 相对应的输入变量
customer_style = """ 请用一种非常专业的风格来翻译,把英语翻译成中文。"""
customer_language = """ Hi,I'm coder, I want to study the AI。"""
# 通过 format_messages 方法,引入 style,language 的变量,格式化模板中的消息
customer_messages = prompt_template.format_messages(
style = customer_style,
language = customer_language,
)
# 消息与聊天模型进行交互
customer_response = llm_model(customer_messages)
# 输出大模型处理完以后的内容
print(customer_response.content)
from langchain.output_parsers import ResponseSchema,StructuredOutputParser
# 建立 customer_review 的上下文,
customer_review = """
This leaf blower is pretty amazing. It has four settings:
candle blower, gentle breeze, windy city, and tornado.
It arrived in two days, just in time for my wife's
anniversary present.
I think my wife liked it so much she was speechless.
So far I've been the only one using it, and I've been
using it every other morning to clear the leaves on our lawn.
It's slightly more expensive than the other leaf blowers
out there, but I think it's worth it for the extra features.
"""
# review_template 的提示词模板,提示词说明了,要创建JSON格式的结果 其中变量占位符为 {text}
review_template = """
For the following text, extract the following information:
gift: Was the item purchased as a gift for someone else?
Answer True if yes, False if not or unknown.
delivery_days: How many days did it take for the product
to arrive? If this information is not found, output -1.
price_value: Extract any sentences about the value or price,
and output them as a comma separated Python list.
Format the output as JSON with the following keys:
gift
delivery_days
price_value
text: {text}
"""
# 建立 schema 模板结构 Response定义了期望的输出结构,包括 name description
gift_schema = ResponseSchema(name="gift",
description="Was the item purchased
as a gift for someone else?
Answer True if yes,
False if not or unknown.")
delivery_days_schema = ResponseSchema(name="delivery_days",
description="How many days
did it take for the product
to arrive? If this
information is not found,
output -1.")
price_value_schema = ResponseSchema(name="price_value",
description="Extract any
sentences about the value or
price, and output them as a
comma separated Python list.")
response_schemas = [gift_schema,
delivery_days_schema,
price_value_schema]
# 建立 review_template_2 的提示词模板 占位符变量为 {text} {format_instructions}
review_template_2 = """
For the following text, extract the following information:
gift: Was the item purchased as a gift for someone else?
Answer True if yes, False if not or unknown.
delivery_days: How many days did it take for the product
to arrive? If this information is not found, output -1.
price_value: Extract any sentences about the value or price,
and output them as a comma separated Python list.
text: {text}
{format_instructions}
"""
# 通过StructuredOutputParser 的 from_response_parsers 方法 导入 response_schemas 列表格式和内容
output_parser = StructuredOutputParser.from_response_schemas(response_schemas)
# 通过 get_format_instructions 方法,获取格式化指令,指导语言模型按照指定的结构输出结果
format_instructions = output_parser.get_format_instructions()
# 通过 from_template 方法 导入 review_template_2 的提示词模板,创建实例 prompt
prompt = ChatPromptTemplate.from_template(template=review_template_2)
# 通过 format_messages 方法,创建格式化的实例 messages
messages = prompt.format_messages(
text = customer_review,
format_instructions = format_instructions
)
# 把messages内容给到大模型进行处理
response = llm_model(messages)
# parse 方法输出指定的字典格式
output_dict = output_parser.parse(response.content)
output_dict
# 获取delivery_days 的内容
output_dict.get("delivery_days")
导入模型
from langchain_ollama import ChatOllama
llm_model = ChatOllama(model="qwen2.5:14b",tempterture=0.1)
导入提示词模板
ChatPromptTemplate的方法:
格式化输出
ResponseSchema方法:
StructuredOutputParser的方法:
参与评论
手机查看
返回顶部