langchain demo

from langchain_community.chat_models import ChatOllama
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI

if __name__ == '__main__':
    # 1. Create prompt template
    system_template = "Translate the following into {language}:"
    prompt_template = ChatPromptTemplate.from_messages([
        ('system', system_template),
        ('user', '{text}')
    ])

    # 2. Create model
    for model in [
        ChatOpenAI(),
        ChatOllama(model="llama2"),
        ChatOllama(model="llama3"),
        # ChatOllama(model="llama2:13b-text"),
        ChatOllama(model="mistral"),
        ChatOllama(model="mixtral"),
        ChatOllama(model="qwen2:7b")
    ]:
        # 3. Create parser
        parser = StrOutputParser()

        # 4. Create chain
        chain = prompt_template | model | parser

        result = chain.invoke({'text': '中国的首都在哪里?', 'language': '韩语'})
        print('----')
        print(f"{model} {result}")