Search code examples
pythonkerashyperas

Can't use intermediate function in hyperas


I am trying to use hyperas to optimize my keras model but I keep getting NameError: processing (function_name) is not defined. I have already looked at this and this example from hyperas and done exactly that. It doesn't seem to work for me.

This is my code:

def processing():
    df = pd.read_json('balanced_all.json')

    def label (df):
        if df['rating'] < 3:
            return 0
        if df['rating'] > 3:
            return 1

    df['label'] = df.apply (lambda df: label(df), axis=1)

    df = df[['review_text', 'label']]

    maxlen = 100
    max_words = 2000

    tokenizer = Tokenizer(num_words=max_words)
    tokenizer.fit_on_texts(df['review_text'].values)
    sequences = tokenizer.texts_to_sequences(df['review_text'].values)
    word_index = tokenizer.word_index


    sequences = pad_sequences(sequences, maxlen=maxlen)
    labels = pd.get_dummies(df['label']).values

    glove_dir = '/home/uttam/Documents/Thesis/Glove'
    embeddings_index = {}
    f = open(os.path.join(glove_dir, 'glove.6B.100d.txt'), 'r', encoding='utf-8')
    for line in f:
        values = line.split()
        word = values[0]
        coefs = np.asarray(values[1:], dtype='float32')
        embeddings_index[word] = coefs
    f.close()

    embedding_dim = 100


    embedding_matrix = np.zeros((max_words, embedding_dim))
    for word, i in word_index.items():
        if i < max_words:
            embedding_vector = embeddings_index.get(word)
            if embedding_vector is not None:
                embedding_matrix[i] = embedding_vector

    return sequences, labels, embedding_matrix



def data():
    sequences = processing()[0]
    labels = processing()[1]
    x_train, x_test, y_train, y_test = train_test_split(sequences,labels, test_size = 0.33, random_state = 42)
    return x_train, y_train, x_test, y_test



def create_model(x_train, y_train, x_test, y_test):
    embedding_dim = 100
    max_words = 2000
    embedding_matrix = processing()[2]


    model = Sequential()
    model.add(Embedding(max_words, embedding_dim, input_length=100))
    model.add(LSTM(128))
    model.add(Dropout({{uniform(0, 1)}}))
    model.add(Dense(2, activation='sigmoid'))

    model.layers[0].set_weights([embedding_matrix])
    model.layers[0].trainable = False


    model.compile(optimizer={{choice(['rmsprop', 'adam', 'sgd'])}}, loss='binary_crossentropy',metrics=['acc'])
    result = model.fit(x_train, y_train, epochs=20, batch_size={{choice([64, 128])}}, validation_split=0.2)
    model.save('pre_trained_glove_model.h5')


    validation_acc = np.amax(result.history['val_acc'])
    print('Best validation acc of epoch:', validation_acc)
    return {'loss': -validation_acc, 'status': STATUS_OK, 'model': model}


if __name__ == '__main__':
    best_run, best_model = optim.minimize(model=create_model,
                                          data=data,
                                          algo=tpe.suggest,
                                          max_evals=5,
                                          trials=Trials())
    x_train, y_train, x_test, y_test = data()
    print("Evalutation of best performing model:")
    print(best_model.evaluate(x_test, y_test))
    print("Best performing model chosen hyper-parameters:")
    print(best_run)

I don't even need the intermediate function, I had to create it because hyperas didn't find the global variable. for e.g. if I had a variable x outside the hyperas function say create_model(), It would say NameError: x is not defined

I need this because as you can see I am using pre-trained glove embedding. I can't put everything in either data() or create_model(). For e.g. data()needs the variable sequences and label and create_model needs the variable embedding_matrix, so there is no way (as far as I know) to split everything in two functions.

Only way this worked for me was by putting everything in both data() and create_model() functions, which definitely is not efficient and not the way to do.


Solution

  • A little bit late but for future reference, you are right hyperas doesn't recognize global variables. You can pass the function in a list of functions in minimize:

    best_run, best_model = optim.minimize(model=create_model,
                                          data=data,
                                          functions=[processing], # <<
                                          algo=tpe.suggest,
                                          max_evals=5,
                                          trials=Trials())
    

    As you mentioned if you need to pass a global variable in hyperas. You can choose one of these options:

    Using data():

    def data():
        ## ... my  code ...
        return x_train, y_train, x_test, y_test, foo
    
    def create_model(x_train, y_train, x_test, y_test, foo):
    

    or defining a new function and pass it in the list of functions:

    def my_funct():
        return foo
    
    def data():
       return x_train, y_train, x_test, y_test
    
    def create_model(x_train, y_train, x_test, y_test):
        foo = my_funct()
    
    best_run, best_model = optim.minimize(model=create_model,
                                          data=data,
                                          functions=[my_funct], # << foo
                                          algo=tpe.suggest,
                                          max_evals=5,
                                          trials=Trials())