Search code examples
pythontensorflowtensorflow-estimator

TensorFlow estimator.DNNClassifier: export_savedmodel caused "ValueError: Invalid feature"


I read a lot of topics, but none of the answers helped me...

I have DNN Classifier:

import tensorflow as tf
feature_columns = []
for key in X_train.keys():
    feature_columns.append(tf.feature_column.numeric_column(key=key))

classifier = tf.estimator.DNNClassifier(
    feature_columns=feature_columns,
    hidden_units=[10, 20, 10],
    n_classes=2
    )

def train_input_fn(features, labels, batch_size):
    """An input function for training"""
    dataset = tf.data.Dataset.from_tensor_slices((dict(features), labels))
    dataset = dataset.shuffle(10).repeat().batch(batch_size)
    return dataset

#train the Model
batch_size = 100
train_steps = 400

for i in range(0,100):
    classifier.train(
        input_fn=lambda:train_input_fn(X_train, y_train, batch_size),
        steps=train_steps
        )

DataFrame X_train contains 452 numeric columns (most of them - trasformed by OneHodEncode dummy columns): shape is (84692, 452). And the same is len(feature_columns) = 452

But when I trying to save the model using script:

def serving_input_receiver_fn():
    feature_spec = tf.feature_column.make_parse_example_spec(feature_columns)
    return tf.estimator.export.build_parsing_serving_input_receiver_fn(feature_spec)()

classifier.export_savedmodel(export_dir_base="export_model/", serving_input_receiver_fn=_serving_input_receiver_fn)

I am getting an error:

ValueError: Invalid feature dummy_feature_N_value_M:0.

Tried also to save using a bit another script (but here I understanding not every parameters values...):

def serving_input_receiver_fn():
    serialized_tf_example = tf.placeholder(dtype=tf.string, shape=[None], name='input_tensors')
    receiver_tensors      = {"predictor_inputs": serialized_tf_example}
    feature_spec          = {"words": tf.FixedLenFeature([452],tf.float32)}
    features              = tf.parse_example(serialized_tf_example, feature_spec)
    return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)

classifier.export_savedmodel(export_dir_base="export_model/", serving_input_receiver_fn=serving_input_receiver_fn)

But it also returns nearly error:

ValueError: Feature dummy_feature_N_value_M is not in features dictionary.

When I am checking the feature_columns list - is there:

_NumericColumn(key='dummy_feature_N_value_M', shape=(1,), default_value=None, dtype=tf.float32, normalizer_fn=None),

What I am doing wrong?


Solution

  • don't know what it was... But now everything works.

    First, I tried not to use created myself OneHodEncode dummy columns, but input initial dataframe "train_dummy_features" with categorical columns:

    # split columns and indexes of categorical and continues columns
    categorical_columns = list(train_dummy_features.select_dtypes(include=['category','object']))
    print(categorical_columns)
    numeric_columns = list(train_dummy_features.select_dtypes(include=['int','uint8']))
    print(numeric_columns)
    cat_features_indexes = [train_dummy_features.columns.get_loc(c) for c in train_dummy_features.columns if c in categorical_columns] 
    print(cat_features_indexes)
    continues_features_indexes = [train_dummy_features.columns.get_loc(c) for c in train_dummy_features.columns if c not in categorical_columns] 
    print(continues_features_indexes)
    

    And then created list of feature_columns using functions of TensorFlow:

    numeric_features = [tf.feature_column.numeric_column(key = column) for column in numeric_columns]
    print(numeric_features)
    categorical_features = [
        tf.feature_column.embedding_column(
            categorical_column = tf.feature_column.categorical_column_with_vocabulary_list
                                 (key = column
                                  , vocabulary_list = train_dummy_features[column].unique()
                                 ),
            dimension = len(train_dummy_features[column].unique())
            ) 
        for column in categorical_columns
        ]
    print(categorical_features[3])
    
    feature_columns = numeric_features + categorical_features
    feature_columns[2]
    

    and put initial dataframe "train_dummy_features" with categorical columns to X_train:

    X = train_dummy_features
    y = train_measure # since we already have dataframe with the measure
    
    X_train, y_train = X, y
    

    Declared "classifier" and "train_input_fn" as specified in the initial post, trained classifier.

    After that both

    def serving_input_receiver_fn():
        #feature_spec = {INPUT_TENSOR_NAME: tf.FixedLenFeature(dtype=tf.float32, shape=[452])}
        feature_spec = tf.feature_column.make_parse_example_spec(feature_columns)
        return tf.estimator.export.build_parsing_serving_input_receiver_fn(feature_spec)()
    
    classifier.export_savedmodel(export_dir_base="export_model2/", serving_input_receiver_fn=serving_input_receiver_fn)
    

    and

    def serving_input_receiver_fn():
        serialized_tf_example = tf.placeholder(dtype=tf.string, shape=[None], name='input_tensors')
        receiver_tensors      = {"predictor_inputs": serialized_tf_example}
        feature_spec          = tf.feature_column.make_parse_example_spec(feature_columns) #{"words": tf.FixedLenFeature([len(feature_columns)],tf.float32)}
        features              = tf.parse_example(serialized_tf_example, feature_spec)
        return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
    
    classifier.export_savedmodel(export_dir_base="export_model3/", serving_input_receiver_fn=serving_input_receiver_fn)
    

    successfully exported the model.

    I tried to repeat the first version of steps which caused an error yesterday - but can't repeat the error now.

    So, described steps are successfully train and export tf.estimator.DNNClassifier model