Search code examples
pythonarabicstemminglsa

Python 2 : AttributeError: 'list' object has no attribute 'split'


this is my program of LSA, in this fonction i want to tokenize all my text and then transform it to stem. i'm trying to integrate them program of stemming and then i get this: for word in titles.split(" "): AttributeError: 'list' object has no attribute 'split'

this code lsa:

# -*- coding: utf-8 -*-

from numpy import zeros
from scipy.linalg import svd
from math import log
from numpy import asarray, sum
#from nltk.corpus import stopwords
from sklearn.metrics.pairwise import cosine_similarity
#from nltk.stem import PorterStemmer
#from nltk.stem.isri import ISRIStemmer
import nltk
#from matplotlib import pyplot as plt
from snowballstemmer import stemmer 


titles = [" ذهبت الاخت الى المدرسة","تقع المدرسة في الجبال",
    "ذهب الام لزيارة ابنتها في المدرسة ","تحضر الام الكعكة" ]

ar_stemmer = stemmer("arabic")

stopwords = ['ثم','و','حتى','الى','على','في']

ignorechars = ''',:'!'''



class LSA(object):
def __init__(self, stopwords, ignorechars):
    self.stopwords = stopwords
    self.ignorechars = ignorechars
    self.wdict = {}
    self.dcount = 0    


def parse(self, doc):

    for word in titles.split(" "):
             stem = ar_stemmer.stemWord(word)

    if stem in self.stopwords:
       pass
    elif stem in self.wdict:
            self.wdict[stem].append(self.dcount)
    else:
            self.wdict[stem] = [self.dcount]
            self.dcount += 1

and this is what i want integrate:

from snowballstemmer import stemmer
ar_stemmer = stemmer("arabic")
sentence = u" ذهبت الاخت الى المدرسة, تقع المدرسة في الجبال"

for word in sentence.split(" "):
stem = ar_stemmer.stemWord(word)
print stem

Solution

  • titles is already a list; do this instead:

    for sentence in titles:
        for word in sentence.split(" "):
            ...