您必须使用 dtype 字符串和形状 [1] 为占位符张量 'input_example_tensor' 提供一个值

You must feed a value for placeholder tensor 'input_example_tensor' with dtype string and shape [1]

我正在使用 chatbot-retrieval 项目开发一个 tensorflow 服务 client/server 应用程序。

我的代码有两部分,即服务部分和客户端部分。

下面是服务部分的代码片段。

def get_features(context, utterance):

  context_len = 50
  utterance_len = 50

  features = {
    "context": context,
    "context_len": tf.constant(context_len, shape=[1,1], dtype=tf.int64),
    "utterance": utterance,
    "utterance_len": tf.constant(utterance_len, shape=[1,1], dtype=tf.int64),
  }

  return features


def my_input_fn(estimator, input_example_tensor ):
      feature_configs = {
              'context':tf.FixedLenFeature(shape=[50], dtype=tf.int64),
              'utterance':tf.FixedLenFeature(shape=[50], dtype=tf.int64)
              }
      tf_example = tf.parse_example(input_example_tensor, feature_configs)
      context = tf.identity(tf_example['context'], name='context')
      utterance = tf.identity(tf_example['utterance'], name='utterance')
      features = get_features(context, utterance)
      return features

def my_signature_fn(input_example_tensor, features, predictions):
  feature_configs = {
          'context':tf.FixedLenFeature(shape=[50], dtype=tf.int64),
          'utterance':tf.FixedLenFeature(shape=[50], dtype=tf.int64)
          }

  tf_example = tf.parse_example(input_example_tensor, feature_configs)
  tf_context = tf.identity(tf_example['context'], name='tf_context_utterance')
  tf_utterance = tf.identity(tf_example['utterance'], name='tf_utterance')

  default_graph_signature = exporter.regression_signature(
              input_tensor=input_example_tensor,
              output_tensor=tf.identity(predictions)
              )

  named_graph_signatures = {
              'inputs':exporter.generic_signature(
                  {
                      'context':tf_context,
                      'utterance':tf_utterance
                  }
               ),
              'outputs':exporter.generic_signature(
                  {
                      'scores':predictions
                  }
               )
              }

  return default_graph_signature, named_graph_signatures

def main():
      ##preliminary codes here##

      estimator.fit(input_fn=input_fn_train, steps=100, monitors=[eval_monitor])

      estimator.export(
              export_dir = FLAGS.export_dir,
              input_fn = my_input_fn,
              use_deprecated_input_fn = True,
              signature_fn = my_signature_fn,
              exports_to_keep = 1
              )

下面是客户端部分的代码片段。

def tokenizer_fn(iterator):
   return (x.split(" ") for x in iterator)

    vp = tf.contrib.learn.preprocessing.VocabularyProcessor.restore(FLAGS.vocab_processor_file)

input_context = "biz banka kart farkli bir banka atmsinde para"
input_utterance = "farkli banka kart biz banka atmsinde para"

context_feature = np.array(list(vp.transform([input_context])))
utterance_feature = np.array(list(vp.transform([input_utterance])))

context_tensor = tf.contrib.util.make_tensor_proto(context_feature, shape=[1, context_feature.size])
utterance_tensor = tf.contrib.util.make_tensor_proto(context_feature, shape=[1, context_feature.size])

request.inputs['context'].CopyFrom(context_tensor)
request.inputs['utterance'].CopyFrom(utterance_tensor)

result_counter.throttle()
result_future = stub.Predict.future(request, 5.0)  # 5 seconds
result_future.add_done_callback(
_create_rpc_callback(label[0], result_counter))
   return result_counter.get_error_rate()

服务部分和客户端部分均已正确构建。在 运行 服务应用程序和客户端应用程序之后,当 rpc 调用完成时,我收到以下传播到客户端应用程序的奇怪错误。

以下是 rpc 调用完成时出现的错误

AbortionError(code=StatusCode.INVALID_ARGUMENT, details="You must feed a value for placeholder tensor 'input_example_tensor' with dtype string and shape [1]
         [[Node: input_example_tensor = Placeholder[_output_shapes=[[1]], dtype=DT_STRING, shape=[1], _device="/job:localhost/replica:0/task:0/cpu:0"]()]]")

错误很奇怪,因为似乎无法从客户端应用程序提供占位符。

如果我通过 tensorflow 服务访问模型,如何为占位符 'input_example_tensor' 提供数据?

答案: (我 post 在这里编辑了我的答案,因为由于缺少 Whosebug 徽章我无法 post 它作为答案。任何自愿将其作为 his/her 问题答案提交的人是非常欢迎。我会批准它作为答案。)

我可以通过在 estimator.export 函数中使用选项 use_deprecated_input_fn = False 并相应地更改输入签名来解决问题。

下面是最终代码,运行没问题。

def get_features(input_example_tensor, context, utterance):
  context_len = 50
  utterance_len = 50
  features = {
    "my_input_example_tensor": input_example_tensor,
    "context": context,
    "context_len": tf.constant(context_len, shape=[1,1], dtype=tf.int64),
    "utterance": utterance,
    "utterance_len": tf.constant(utterance_len, shape=[1,1], dtype=tf.int64),
  }

  return features

def my_input_fn():
  input_example_tensor = tf.placeholder(tf.string, name='tf_example_placeholder')

  feature_configs = {
          'context':tf.FixedLenFeature(shape=[50], dtype=tf.int64),
          'utterance':tf.FixedLenFeature(shape=[50], dtype=tf.int64)
          }
  tf_example = tf.parse_example(input_example_tensor, feature_configs)
  context = tf.identity(tf_example['context'], name='context')
  utterance = tf.identity(tf_example['utterance'], name='utterance')
  features = get_features(input_example_tensor, context, utterance)

  return features, None

def my_signature_fn(input_example_tensor, features, predictions):   
  default_graph_signature = exporter.regression_signature(
              input_tensor=input_example_tensor,
              output_tensor=predictions
              )

  named_graph_signatures = {
              'inputs':exporter.generic_signature(
                  {
                      'context':features['context'],
                      'utterance':features['utterance']
                  }
               ),
              'outputs':exporter.generic_signature(
                  {
                      'scores':predictions
                  }
               )
              }

  return default_graph_signature, named_graph_signatures

def main():
  ##preliminary codes here##

  estimator.fit(input_fn=input_fn_train, steps=100, monitors=[eval_monitor])

  estimator._targets_info = tf.contrib.learn.estimators.tensor_signature.TensorSignature(tf.constant(0, shape=[1,1]))

  estimator.export(
          export_dir = FLAGS.export_dir,
          input_fn = my_input_fn,
          input_feature_key ="my_input_example_tensor",
          use_deprecated_input_fn = False,
          signature_fn = my_signature_fn,
          exports_to_keep = 1
          )

OP 自行解决但无法自行回答,所以这是他们的答案:

问题已通过在 estimator.export 函数中使用选项 use_deprecated_input_fn = False 并相应地更改输入签名得到解决:

def my_signature_fn(input_example_tensor, features, predictions):   
  default_graph_signature = exporter.regression_signature(
      input_tensor=input_example_tensor,
      output_tensor=predictions
      )

  named_graph_signatures = {
      'inputs':exporter.generic_signature(
          {
          'context':features['context'],
          'utterance':features['utterance']
          }
       ),
      'outputs':exporter.generic_signature(
          {
          'scores':predictions
          }
       )
      }

  return default_graph_signature, named_graph_signatures

def main():
  ##preliminary codes here##

  estimator.fit(input_fn=input_fn_train, steps=100, monitors=[eval_monitor])

  estimator._targets_info = tf.contrib.learn.estimators.tensor_signature.TensorSignature(tf.constant(0, shape=[1,1]))

  estimator.export(
      export_dir = FLAGS.export_dir,
      input_fn = my_input_fn,
      input_feature_key ="my_input_example_tensor",
      use_deprecated_input_fn = False,
      signature_fn = my_signature_fn,
      exports_to_keep = 1
      )