Search code examples
pythonnltkwordnet

AttributeError: 'list' object has no attribute 'hypernyms'


I am trying to get distractors for a list of words and return them in arrays, but i keep getting this error.

Traceback (most recent call last):
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/uvicorn/protocols/http/httptools_impl.py", line 401, in run_asgi
    result = await app(self.scope, self.receive, self.send)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/uvicorn/middleware/proxy_headers.py", line 78, in __call__
    return await self.app(scope, receive, send)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/fastapi/applications.py", line 199, in __call__
    await super().__call__(scope, receive, send)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/starlette/applications.py", line 111, in __call__
    await self.middleware_stack(scope, receive, send)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/starlette/middleware/errors.py", line 181, in __call__
    raise exc from None
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/starlette/middleware/errors.py", line 159, in __call__
    await self.app(scope, receive, _send)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/starlette/exceptions.py", line 82, in __call__
    raise exc from None
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/starlette/exceptions.py", line 71, in __call__
    await self.app(scope, receive, sender)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/starlette/routing.py", line 566, in __call__
    await route.handle(scope, receive, send)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/starlette/routing.py", line 227, in handle
    await self.app(scope, receive, send)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/starlette/routing.py", line 41, in app
    response = await func(request)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/fastapi/routing.py", line 201, in app
    raw_response = await run_endpoint_function(
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/fastapi/routing.py", line 150, in run_endpoint_function
    return await run_in_threadpool(dependant.call, **values)
  File "/home/maro/Documents/codefiles/nlp/venv/lib/python3.8/site-packages/starlette/concurrency.py", line 34, in run_in_threadpool
    return await loop.run_in_executor(None, func, *args)
  File "/usr/lib/python3.8/concurrent/futures/thread.py", line 57, in run
    result = self.fn(*self.args, **self.kwargs)
  File "/home/maro/Documents/codefiles/nlx/./main.py", line 104, in getquestion
    distractors.append(get_distractors_wordnet(wn.synsets(word),word))
  File "/home/maro/Documents/codefiles/nlx/./main.py", line 76, in get_distractors_wordnet
    hypernym = syn.hypernyms()
AttributeError: 'list' object has no attribute 'hypernyms'

This is my entire code

from typing import List
from fastT5 import get_onnx_model,get_onnx_runtime_sessions,OnnxT5
from transformers import AutoTokenizer
from pathlib import Path
import os
from fastapi import FastAPI
from pydantic import BaseModel
from textblob import TextBlob

import nltk
from nltk.corpus import wordnet as wn

app = FastAPI()

class QuestionRequest(BaseModel):
    context: str
    

class QuestionResponse(BaseModel):
    question: List[str] = []
    answer: List[str] = []
    distractors_sublist: List[List[str]] = [ [] ]



trained_model_path = './t5_squad_v1/'

pretrained_model_name = Path(trained_model_path).stem


encoder_path = os.path.join(trained_model_path,f"{pretrained_model_name}-encoder-quantized.onnx")
decoder_path = os.path.join(trained_model_path,f"{pretrained_model_name}-decoder-quantized.onnx")
init_decoder_path = os.path.join(trained_model_path,f"{pretrained_model_name}-init-decoder-quantized.onnx")

model_paths = encoder_path, decoder_path, init_decoder_path
model_sessions = get_onnx_runtime_sessions(model_paths)
model = OnnxT5(trained_model_path, model_sessions)

tokenizer = AutoTokenizer.from_pretrained(trained_model_path)


def get_question(sentence,mdl,tknizer):
    gfg = TextBlob(sentence)
    gfg = gfg.noun_phrases
    array=[]
    for i in gfg:
        text = "context: {} answer: {}".format(sentence,i)
        array.append(text)
        
    max_len = 256
    question_array =[]
    for text in array:
        encoding = tknizer.encode_plus(text,max_length=max_len, pad_to_max_length=False,truncation=True, return_tensors="pt")
        input_ids, attention_mask = encoding["input_ids"], encoding["attention_mask"]
        outs = mdl.generate(input_ids=input_ids,
                                        attention_mask=attention_mask,
                                        early_stopping=True,
                                        num_beams=5,
                                        num_return_sequences=1,
                                        no_repeat_ngram_size=2,
                                        max_length=128)
        dec = [tknizer.decode(ids,skip_special_tokens=True) for ids in outs]
        Question = dec[0].replace("question:","")
        Question= Question.strip()
        question_array.append(Question)
        print (question_array)
    return question_array, gfg

# Distractors from Wordnet
def get_distractors_wordnet(syn,word):
    distractors=[]
    word= word.lower()
    orig_word = word
    if len(word.split())>0:
        word = word.replace(" ","_")
    hypernym = syn.hypernyms()
    if len(hypernym) == 0: 
        return distractors
    for item in hypernym[0].hyponyms():
        name = item.lemmas()[0].name()
        #print ("name ",name, " word",orig_word)
        if name == orig_word:
            continue
        name = name.replace("_"," ")
        name = " ".join(w.capitalize() for w in name.split())
        if name is not None and name not in distractors:
            distractors.append(name)
    return distractors


@app.get('/')
def index():
    return {'message':'hello world'}


@app.post("/getquestion", response_model= QuestionResponse)
def getquestion(question: QuestionRequest):
    context = question.context
    question_array, gfg = get_question(context,model,tokenizer)
    answer = gfg[0]

    distractors = []
    for word in gfg:
        distractors.append(get_distractors_wordnet(wn.synsets(word),word))
    distractors_sublist = []
    for i in range(len(distractors)):
        distractors_sublist.append(distractors[i])
    return QuestionResponse(question=question_array, answer=answer, distractors_sublist=distractors_sublist)

    distractors = get_distractors_wordnet(wn.synsets(answer),answer)
    distractors_sublist = [distractors[i:i+3] for i in range(0, len(distractors), 3)]
    return QuestionResponse(question=question_array, answer=answer, distractors_sublist=distractors_sublist)

I have tried so many iterations, none seem to work. I am using Fastapi and some personal models. The response should return an array of distractors that contain several sub arrays each representing the distractor for each answer or gfg


Solution

  • I took a look at the documentation for NLTK at https://www.nltk.org/howto/wordnet.html. It looks like the culprit is wn.synsets(word), in your invocation of get_distractors_wordnet. An easy typo - wn.synset returns a single synonym set, whereas wn.synsets returns a list of synonym sets. You're getting that error because you're trying to use a member function of a synset on a list of synsets (hence, "'list' object has no attribute 'hypernyms'").

    All you have to do is either iterate over the list, or refactor to use wn.synset instead. Hope this helps!