Search code examples
pythonopenai-apilangchain

AttributeError: 'str' object has no attribute 'model_dump' in langchain_core


I've encountered an error while working with the langchain_core langchain-openailibrary and I'm hoping someone can assist me in resolving this issue.

Error Message:

AttributeError: 'str' object has no attribute 'model_dump'

Example code:

import pandas as pd
from data_api import *
from langchain_openai import ChatOpenAI
# from langchain.chat_models import ChatOpenAI
from dotenv import load_dotenv, find_dotenv
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from perplexity.perplexity import search_chat_completion
from langchain.prompts import PromptTemplate
from langchain_core.runnables import RunnableLambda
from operator import itemgetter
import json
import os
from typing import List, Dict, Any
from tqdm import tqdm

        self.llm = ChatOpenAI(model='gpt-4o', temperature=0)
        self.mini = ChatOpenAI(model='gpt-4o-mini', temperature=0)
        self.pplx = ChatOpenAI(base_url="https://api.perplexity.ai", 
                               model='llama-3.1-sonar-huge-128k-online')
        self.o1 =  ChatOpenAI(model='o1-preview', temperature=1)

chain = (
    {
        "chain1": chain1, 
        "chain2": chain2, 
        "chain3": chain3, 
        "chain4": chain4, 
        "chain5": chain5, 
        "chain6": chain6, 
        "chain7": chain7, 
        "company": itemgetter("company")
    }
    | PromptTemplate.from_template(
"""
<text>

{chain1}

{chain2}

{chain3}

{chain4}

{chain5}

{chain6}

{chain7}

</text>
"""
                    ) | self.o1 | StrOutputParser()
)
        
return chain.invoke({"company": symbol})

Traceback:

AttributeError("'str' object has no attribute 'model_dump'")Traceback (most recent call last):


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3022, in invoke
    input = context.run(step.invoke, input, config, **kwargs)


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3727, in invoke
    output = {key: future.result() for key, future in zip(steps, futures)}


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3727, in <dictcomp>
    output = {key: future.result() for key, future in zip(steps, futures)}


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/_base.py", line 439, in result
    return self.__get_result()


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/_base.py", line 391, in __get_result
    raise self._exception


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/thread.py", line 58, in run
    result = self.fn(*self.args, **self.kwargs)


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3711, in _invoke_step
    return context.run(


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3022, in invoke
    input = context.run(step.invoke, input, config, **kwargs)


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3727, in invoke
    output = {key: future.result() for key, future in zip(steps, futures)}


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3727, in <dictcomp>
    output = {key: future.result() for key, future in zip(steps, futures)}


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/_base.py", line 446, in result
    return self.__get_result()


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/_base.py", line 391, in __get_result
    raise self._exception


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/concurrent/futures/thread.py", line 58, in run
    result = self.fn(*self.args, **self.kwargs)


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3711, in _invoke_step
    return context.run(


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/runnables/base.py", line 3024, in invoke
    input = context.run(step.invoke, input, config)


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 286, in invoke
    self.generate_prompt(


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 786, in generate_prompt
    return self.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 643, in generate
    raise e


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 633, in generate
    self._generate_with_cache(


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_core/language_models/chat_models.py", line 851, in _generate_with_cache
    result = self._generate(


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_openai/chat_models/base.py", line 718, in _generate
    return self._create_chat_result(response, generation_info)


  File "/home/azureuser/miniconda3/envs/llm39/lib/python3.9/site-packages/langchain_openai/chat_models/base.py", line 745, in _create_chat_result
    response if isinstance(response, dict) else response.model_dump()


AttributeError: 'str' object has no attribute 'model_dump'

Environment Information:

  • python 3
Package Version
aiohappyeyeballs 2.4.4
aiohttp 3.11.10
aiosignal 1.3.1
annotated-types 0.7.0
anyio 4.7.0
asttokens 3.0.0
async-timeout 4.0.3
attrs 24.2.0
beautifulsoup4 4.12.3
certifi 2024.8.30
charset-normalizer 3.4.0
comm 0.2.2
dataclasses-json 0.6.7
debugpy 1.8.10
decorator 5.1.1
distro 1.9.0
et_xmlfile 2.0.0
exceptiongroup 1.2.2
executing 2.1.0
fastjsonschema 2.21.1
frozenlist 1.5.0
greenlet 3.1.1
h11 0.14.0
httpcore 1.0.7
httpx 0.28.1
httpx-sse 0.4.0
idna 3.10
import-ipynb 0.2
importlib_metadata 8.5.0
ipykernel 6.29.5
ipython 8.18.1
jedi 0.19.2
jiter 0.8.2
jsonpatch 1.33
jsonpointer 3.0.0
jsonschema 4.23.0
jsonschema-specifications 2024.10.1
jupyter_client 8.6.3
jupyter_core 5.7.2
langchain 0.3.13
langchain-community 0.3.13
langchain-core 0.3.28
langchain-openai 0.2.14
langchain-text-splitters 0.3.4
langgraph 0.2.59
langgraph-checkpoint 2.0.9
langgraph-sdk 0.1.44
langsmith 0.2.3
marshmallow 3.23.2
matplotlib-inline 0.1.7
msgpack 1.1.0
multidict 6.1.0
mypy-extensions 1.0.0
nbformat 5.10.4
nest_asyncio 1.6.0
numpy 1.26.4
openai 1.58.1
openpyxl 3.1.5
orjson 3.10.12
packaging 24.2
pandas 2.2.3
parso 0.8.4
pexpect 4.9.0
pickleshare 0.7.5
pip 24.2
platformdirs 4.3.6
prompt_toolkit 3.0.48
propcache 0.2.1
psutil 6.1.0
ptyprocess 0.7.0
pure_eval 0.2.3
pydantic 2.10.4
pydantic_core 2.27.2
pydantic-settings 2.7.0
Pygments 2.18.0
python-dateutil 2.9.0.post0
python-dotenv 1.0.1
pytz 2024.2
PyYAML 6.0.2
pyzmq 26.2.0
referencing 0.35.1
regex 2024.11.6
requests 2.32.3
requests-toolbelt 1.0.0
rpds-py 0.22.3
setuptools 75.1.0
six 1.17.0
sniffio 1.3.1
soupsieve 2.6
SQLAlchemy 2.0.36
stack_data 0.6.3
tenacity 9.0.0
tiktoken 0.8.0
tornado 6.4.2
tqdm 4.67.1
traitlets 5.14.3
typing_extensions 4.12.2
typing-inspect 0.9.0
tzdata 2024.2
urllib3 2.2.3
wcwidth 0.2.13
wheel 0.44.0
yarl 1.18.3
zipp 3.21.0

Solution

  • This error occurs because one of your chains returns a string instead of a proper LangChain message type. Here's how to fix it:

    from langchain_core.messages import HumanMessage
    from langchain_core.output_parsers import StrOutputParser
    from langchain.prompts import PromptTemplate
    from operator import itemgetter
    from langchain_core.runnables import RunnablePassthrough
    
    def wrap_chain_output(chain_output):
        if isinstance(chain_output, str):
            return HumanMessage(content=chain_output)
        return chain_output
    
    chain = (
        {
            "chain1": chain1 | (lambda x: wrap_chain_output(x)), 
            "chain2": chain2 | (lambda x: wrap_chain_output(x)),
            "chain3": chain3 | (lambda x: wrap_chain_output(x)),
            "chain4": chain4 | (lambda x: wrap_chain_output(x)),
            "chain5": chain5 | (lambda x: wrap_chain_output(x)),
            "chain6": chain6 | (lambda x: wrap_chain_output(x)),
            "chain7": chain7 | (lambda x: wrap_chain_output(x)),
            "company": itemgetter("company")
        }
        | PromptTemplate.from_template(
            """
            <text>
            {chain1}
            {chain2}
            {chain3}
            {chain4}
            {chain5}
            {chain6}
            {chain7}
            </text>
            """
        ) 
        | self.o1 
        | StrOutputParser()
    )
    
    result = chain.invoke({"company": symbol})
    

    The error occurs because the OpenAI chat model expects either a dictionary or an object with a model_dump() method, but it's receiving a string. These modifications should resolve the issue by ensuring proper type handling throughout the chain. If that doesn't work then you can try this way to handle string outputs directly like this-

    chain = (
        {
            "chain1": chain1, 
            "chain2": chain2,
            "chain3": chain3,
            "chain4": chain4,
            "chain5": chain5,
            "chain6": chain6,
            "chain7": chain7,
            "company": itemgetter("company")
        }
        | PromptTemplate.from_template(
            """
            <text>
            {chain1.content if hasattr(chain1, 'content') else chain1}
            {chain2.content if hasattr(chain2, 'content') else chain2}
            {chain3.content if hasattr(chain3, 'content') else chain3}
            {chain4.content if hasattr(chain4, 'content') else chain4}
            {chain5.content if hasattr(chain5, 'content') else chain5}
            {chain6.content if hasattr(chain6, 'content') else chain6}
            {chain7.content if hasattr(chain7, 'content') else chain7}
            </text>
            """
        ) 
        | self.o1 
        | StrOutputParser()
    )