[Serve] Improve batch size inconsistency error (#8315)

This commit is contained in:
Simon Mo 2020-05-04 20:32:12 -07:00 committed by GitHub
parent ca929671b6
commit 1480bf4295
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -197,11 +197,12 @@ class RayServeWorker:
self.latency_list.append(time.time() - start_timestamp)
if (not isinstance(result_list,
list)) or (len(result_list) != batch_size):
raise RayServeException("__call__ function "
"doesn't preserve batch-size. "
"Please return a list of result "
"with length equals to the batch "
"size.")
error_message = ("Worker doesn't preserve batch size. The "
"input has length {} but the returned list "
"has length {}. Please return a list of "
"results with length equal to the batch size"
".".format(batch_size, len(result_list)))
raise RayServeException(error_message)
return result_list
except Exception as e:
wrapped_exception = wrap_to_ray_error(e)