I am trying to train a model with Amazon Sagemaker and I want serve it using with Tensorflow serving. To achieve that, I am downloading the model to a Tensorflow serving docker and I am trying to serve it from there.
The Sagemaker's training and evaluating stages are completed without errors, but when I load my model to the Tensorflow serving server and try to invoke it I get Tensorflow serving errors that suggest that my model has no defined inputs. It can be seen that the Tensorflow serving server that the model is being served.
For debugging purposes, I tried to serve it with Sagemaker but all I got was a vague error message saying I have an error invoking the endpoint.
I think that the problem is that I am not defining well either the serving_input_fn or invoking it wrong or both. Can anyone help?
Tensorflow serving server invocation curl:
curl -d '{"instances": [{"col3": 1.0}]}' -X POST http://localhost:8501/v1/models/test_model:predict
The error I receive from Tensorflow serving:
{ "error": "Failed to process element: 0 key: col3 of \'instances\' list. Error: Invalid argument: JSON object: does not have named input: col3" }%
Sagemaker's training python file:
import os
import tensorflow as tf
from tensorflow.python.ops import nn
TRAIN_FILENAME = 'test.csv'
TEST_FILENAME = 'train.csv'
NODES_IN_LAYER = 6
LAYERS_NUM = 10
NUM_LINES_TO_SKIP = 1
CSV_COLUMNS = ['col1', 'col2', 'col3', 'col4', 'col5', 'col6', 'col7', 'col8', 'label']
RECORDS_DEFAULTS = [[0], [0], [0.0], [0.0], [0], [0.0], [0.0], [0], [0.0]]
BATCH_SIZE = 32
FEATURE_SPEC = {
'col3': tf.FixedLenFeature(dtype=tf.float32, shape=[]),
}
def estimator_fn(run_config, params):
feature_columns = [
tf.feature_column.numeric_column('col3')]
return tf.estimator.DNNRegressor(feature_columns=feature_columns,
hidden_units=[NODES_IN_LAYER] * LAYERS_NUM,
activation_fn=nn.tanh,
config=run_config)
def serving_input_fn(params):
return tf.estimator.export.build_raw_serving_input_receiver_fn(FEATURE_SPEC)
def train_input_fn(training_dir, params):
"""Returns input function that would feed the model during training"""
return _generate_input_fn(training_dir, TRAIN_FILENAME)
def eval_input_fn(training_dir, params):
"""Returns input function that would feed the model during evaluation"""
return _generate_input_fn(training_dir, TEST_FILENAME)
def parse_csv(line):
columns = tf.decode_csv(line, record_defaults=RECORDS_DEFAULTS)
line_features = dict(zip(CSV_COLUMNS, columns))
line_label = line_features.pop('label')
return {'col3': line_features.pop('col3')}, line_label
def _generate_input_fn(training_dir, training_filename):
filename = os.path.join(training_dir, training_filename)
dataset = tf.data.TextLineDataset(filename)
dataset = dataset.skip(NUM_LINES_TO_SKIP).map(parse_csv).batch(BATCH_SIZE)
return dataset