【问题标题】:tensorflow serving produces wrong answer when I export keras model当我导出 keras 模型时,张量流服务会产生错误的答案
【发布时间】:2018-10-18 08:31:48
【问题描述】:

我尝试将我的 keras 模型导出到 tensorflow 服务,一切正常。我要做的是从客户端接受 b64 编码的输入图像字符串并输出 True/False 值。我的 keras 模型输出 3 个值,第一个值表示从模型预测的程度,我会将其与另一个固定值进行比较,然后将整个算法从获取图像字符串到输出 True/False 值输出到使用 RESTful API 的 tensorflow 服务。但是,我没有从我的客户端程序中得到正确的输出。长话短说,我给个代码

我的导出保存模型的程序:

import tensorflow as tf
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import tag_constants, signature_constants, signature_def_utils_impl
from keras.models import load_model
from keras.layers import Input
import os

tf.app.flags.DEFINE_string('model_dir', './keras_models',
                           '''Directory which contains keras models''')
tf.app.flags.DEFINE_string('output_dir', './model_output',
                           '''Directory where to export the model''')
tf.app.flags.DEFINE_string('model_version', '1',
                           '''version number of the model''')
tf.app.flags.DEFINE_string('model_file', 'pointer_model.json',
                           '''json file which contains model architecture''')
tf.app.flags.DEFINE_string('weights_file', 'pointer_model.h5',
                           '''h5 file that contains model weights''')

FLAGS = tf.app.flags.FLAGS


def preprocess_image(image_buffer):
    '''
    Preprocess JPEG encoded bytes to 3D floate tensor

    :param image_buffer:
    :return: 4D image tensor (1, width, height, channels)
    '''

    image = tf.image.decode_jpeg(image_buffer, channels=3)
    image = tf.image.convert_image_dtype(image, dtype=tf.float32)

    return image


def main(_):
    with tf.Graph().as_default():
        serialized_tf_example = tf.placeholder(tf.string, name='input_image')
        feature_configs = {
            'image/encoded': tf.FixedLenFeature(
                shape=[], dtype=tf.string),
        }
        tf_example = tf.parse_example(serialized_tf_example, feature_configs)
        jpegs = tf_example['image/encoded']
        images = tf.map_fn(preprocess_image, jpegs, dtype=tf.float32)

        images = tf.squeeze(images, [0])
        images = tf.expand_dims(images, axis=0)
        # now the image shape is [1, ?, ?, 3]
        images = tf.image.resize_images(images, tf.constant([224, 224]))

        model = load_model('./keras_models/my_model.h5')

        x = Input(tensor=images)
        y = model(x)

        model.summary()
        compare_value = tf.Variable(100.0)
        bool_out = tf.math.greater(y, compare_value)

        bool_out = bool_out[:,0]

        bool_out = tf.cast(bool_out, tf.float32)
        bool_out = tf.expand_dims(bool_out, axis=0)
        final_out = tf.concat([tf.transpose(y), bool_out], axis=0)
        init = tf.global_variables_initializer()
        with tf.Session() as sess:
            sess.run(init)

            # predict_tensor_input_info = tf.saved_model.utils.build_tensor_info(jpegs)
            # predict_tensor_score_info = tf.saved_model.utils.build_tensor_info(bool_out)
            prediction_signature = \
                (tf.saved_model.signature_def_utils.predict_signature_def(
                    inputs={'images': jpegs},
                    outputs={'scores': final_out}
                )
            )

            export_path = os.path.join(
                tf.compat.as_bytes(FLAGS.output_dir),
                tf.compat.as_bytes(FLAGS.model_version)
            )

            builder = saved_model_builder.SavedModelBuilder(export_path)

            legacy_init_op = tf.group(tf.tables_initializer(),
                                      name = 'legacy_init_op')

            builder.add_meta_graph_and_variables(
                sess, [tag_constants.SERVING],
                signature_def_map={
                    signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:prediction_signature,
                },
                legacy_init_op = legacy_init_op
            )

            builder.save()

if __name__ =="__main__":
    tf.app.run()

这是我的客户程序:

import base64
import requests
import json
import argparse
import time
from glob import glob

image_path = glob('./segmented_image/*.jpg')


for i in range(len(image_path)):

    input_image = open(image_path[i], 'rb').read()

    encoded_input_string = base64.b64encode(input_image)
    input_string = encoded_input_string.decode('utf-8')
    # input_image_recover = base64.b64decode(input_string)
    # with open('recovered_image.jpg', 'wb') as output_file:
    #     output_file.write(input_image_recover)
    #
    # print('Base64 encoded string: ' + input_string[:10] + '...' + input_string[-10:])

    instance = [{"b64": input_string}]
    data = json.dumps({"instances": instance})
    print(data[:30]+ '...' + data[-10:])

    json_response = requests.post('http://localhost:8501/v1/models/pointer_model:predict',
                                  data=data)

    print(json_response.text)
    end_time = time.time()

json_response.text 的输出如下:

{"instances": [{"b64": "/9j/4A...Y//9k="}]}
{
    "predictions": [[-0.00015692], [-0.000967527], [0.000567942], [0.0]
    ]
}

{"instances": [{"b64": "/9j/4A...if/9k="}]}
{
    "predictions": [[-0.000157582], [-0.000998327], [0.000598866], [0.0]
    ]
}

......

预测键中的前 3 个值应该是度数,图像中的 x,y 坐标应该是数百个值...最后一个值是与 100.0 相比转换为 float32 的 True/False 值

好的.. 最后,我还使用 model.predict 测试了我的模型,它给出了正确的答案...

现在我完全糊涂了。谁能告诉我我的代码哪里出了问题?

【问题讨论】:

    标签: python tensorflow keras tensorflow-serving


    【解决方案1】:

    使用我的脚本以 tensorflow 服务格式导出

    import sys
    from keras.models import load_model
    import tensorflow as tf
    from keras import backend as K
    from tensorflow.python.framework import graph_util
    from tensorflow.python.framework import graph_io
    from tensorflow.python.saved_model import signature_constants
    from tensorflow.python.saved_model import tag_constants
    
    
    K.set_learning_phase(0)
    K.set_image_data_format('channels_last')
    
    INPUT_MODEL = sys.argv[1]
    NUMBER_OF_OUTPUTS = 1
    OUTPUT_NODE_PREFIX = 'output_node'
    OUTPUT_FOLDER= 'frozen'
    OUTPUT_GRAPH = 'frozen_model.pb'
    OUTPUT_SERVABLE_FOLDER = sys.argv[2]
    INPUT_TENSOR = sys.argv[3]
    
    
    try:
        model = load_model(INPUT_MODEL)
    except ValueError as err:
        print('Please check the input saved model file')
        raise err
    
    output = [None]*NUMBER_OF_OUTPUTS
    output_node_names = [None]*NUMBER_OF_OUTPUTS
    for i in range(NUMBER_OF_OUTPUTS):
        output_node_names[i] = OUTPUT_NODE_PREFIX+str(i)
        output[i] = tf.identity(model.outputs[i], name=output_node_names[i])
    print('Output Tensor names: ', output_node_names)
    
    
    sess = K.get_session()
    try:
        frozen_graph = graph_util.convert_variables_to_constants(sess,             sess.graph.as_graph_def(), output_node_names)    
        graph_io.write_graph(frozen_graph, OUTPUT_FOLDER, OUTPUT_GRAPH, as_text=False)
        print(f'Frozen graph ready for inference/serving at     {OUTPUT_FOLDER}/{OUTPUT_GRAPH}')
    except:
        print('Error Occured')
    
    
    
    builder = tf.saved_model.builder.SavedModelBuilder(OUTPUT_SERVABLE_FOLDER)
    
    with tf.gfile.GFile(f'{OUTPUT_FOLDER}/{OUTPUT_GRAPH}', "rb") as f:
        graph_def = tf.GraphDef()
        graph_def.ParseFromString(f.read())
    
    sigs = {}
    OUTPUT_TENSOR = output_node_names
    with tf.Session(graph=tf.Graph()) as sess:
        tf.import_graph_def(graph_def, name="")
        g = tf.get_default_graph()
        inp = g.get_tensor_by_name(INPUT_TENSOR)
        out = g.get_tensor_by_name(OUTPUT_TENSOR[0] + ':0')
    
        sigs[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = \
            tf.saved_model.signature_def_utils.predict_signature_def(
                {"input": inp}, {"outout": out})
    
        builder.add_meta_graph_and_variables(sess,
                                             [tag_constants.SERVING],
                                             signature_def_map=sigs)
        try:
            builder.save()
            print(f'Model ready for deployment at     {OUTPUT_SERVABLE_FOLDER}/saved_model.pb')
            print('Prediction signature : ')
            print(sigs['serving_default'])
        except:
            print('Error Occured, please checked frozen graph')
    

    【讨论】:

    • 已经解决了我的问题,但仍然感谢。您的代码提供了一个很好的管道来构建和读取 tensorflow freeze_graph.pb,这将对我未来有很大帮助
    • @C.Wang 包括您如何解决问题,以便对其他人有所帮助。
    猜你喜欢
    • 2019-05-16
    • 1970-01-01
    • 1970-01-01
    • 1970-01-01
    • 1970-01-01
    • 1970-01-01
    • 2017-11-28
    • 2020-07-11
    • 2021-08-07
    相关资源
    最近更新 更多