Skip to content

Commit 28c9226

Browse files
authored
Improve argument type checking (triton-inference-server#134)
* Improve argument type checking * Review edit
1 parent b7cec48 commit 28c9226

File tree

5 files changed

+60
-15
lines changed

5 files changed

+60
-15
lines changed

src/infer_request.cc

Lines changed: 20 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,10 +41,27 @@ InferRequest::InferRequest(
4141
const std::vector<std::string>& requested_output_names,
4242
const std::string& model_name, const int64_t model_version,
4343
const uint32_t flags)
44-
: request_id_(request_id), correlation_id_(correlation_id), inputs_(inputs),
45-
requested_output_names_(requested_output_names), model_name_(model_name),
46-
model_version_(model_version), flags_(flags)
44+
: request_id_(request_id), correlation_id_(correlation_id),
45+
model_name_(model_name), model_version_(model_version), flags_(flags)
4746
{
47+
for (auto& input : inputs) {
48+
if (!input) {
49+
throw PythonBackendException(
50+
"Input tensor for request with id '" + request_id +
51+
"' and model name '" + model_name + "' should not be empty.");
52+
}
53+
}
54+
55+
for (auto& requested_output_name : requested_output_names) {
56+
if (requested_output_name == "") {
57+
throw PythonBackendException(
58+
"Requested output name for request with id '" + request_id +
59+
"' and model name '" + model_name + "' should not be empty.");
60+
}
61+
}
62+
63+
inputs_ = inputs;
64+
requested_output_names_ = requested_output_names;
4865
}
4966

5067
const std::vector<std::shared_ptr<PbTensor>>&

src/infer_response.cc

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,16 @@ namespace triton { namespace backend { namespace python {
3636
InferResponse::InferResponse(
3737
const std::vector<std::shared_ptr<PbTensor>>& output_tensors,
3838
std::shared_ptr<PbError> error)
39-
: output_tensors_(std::move(output_tensors)), error_(error)
39+
: error_(error)
4040
{
41+
for (auto& output : output_tensors) {
42+
if (!output) {
43+
throw PythonBackendException(
44+
"Output tensor for inference response should not be empty.");
45+
}
46+
}
47+
48+
output_tensors_ = output_tensors;
4149
}
4250

4351
std::vector<std::shared_ptr<PbTensor>>&

src/pb_stub.cc

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -714,10 +714,13 @@ PYBIND11_EMBEDDED_MODULE(c_python_backend_utils, module)
714714
const std::vector<std::shared_ptr<PbTensor>>&,
715715
const std::vector<std::string>&, const std::string&,
716716
const int64_t, const uint32_t>(),
717-
py::arg("request_id") = "", py::arg("correlation_id") = 0,
718-
py::arg("inputs"), py::arg("requested_output_names"),
719-
py::arg("model_name"), py::arg("model_version") = -1,
720-
py::arg("flags") = 0)
717+
py::arg("request_id").none(false) = "",
718+
py::arg("correlation_id").none(false) = 0,
719+
py::arg("inputs").none(false),
720+
py::arg("requested_output_names").none(false),
721+
py::arg("model_name").none(false),
722+
py::arg("model_version").none(false) = -1,
723+
py::arg("flags").none(false) = 0)
721724
.def(
722725
"inputs", &InferRequest::Inputs,
723726
py::return_value_policy::reference_internal)
@@ -757,7 +760,8 @@ PYBIND11_EMBEDDED_MODULE(c_python_backend_utils, module)
757760
py::init<
758761
const std::vector<std::shared_ptr<PbTensor>>&,
759762
std::shared_ptr<PbError>>(),
760-
py::arg("output_tensors"), py::arg("error") = nullptr)
763+
py::arg("output_tensors").none(false),
764+
py::arg("error") = static_cast<std::shared_ptr<PbError>>(nullptr))
761765
.def(
762766
"output_tensors", &InferResponse::OutputTensors,
763767
py::return_value_policy::reference)

src/pb_tensor.cc

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -38,9 +38,13 @@ namespace py = pybind11;
3838
namespace triton { namespace backend { namespace python {
3939

4040
#ifdef TRITON_PB_STUB
41-
PbTensor::PbTensor(const std::string& name, py::object numpy_array)
41+
PbTensor::PbTensor(const std::string& name, py::array& numpy_array)
4242
: name_(name)
4343
{
44+
if (name == "") {
45+
throw PythonBackendException("Tensor name cannot be an empty string.");
46+
}
47+
4448
dtype_ = numpy_to_triton_type(numpy_array.attr("dtype"));
4549
memory_type_ = TRITONSERVER_MEMORY_CPU;
4650
memory_type_id_ = 0;
@@ -76,10 +80,14 @@ PbTensor::PbTensor(const std::string& name, py::object numpy_array)
7680
}
7781

7882
PbTensor::PbTensor(
79-
const std::string& name, py::object numpy_array,
83+
const std::string& name, py::array& numpy_array,
8084
TRITONSERVER_DataType dtype)
8185
: name_(name)
8286
{
87+
if (name == "") {
88+
throw PythonBackendException("Tensor name cannot be an empty string.");
89+
}
90+
8391
if (numpy_to_triton_type(numpy_array.attr("dtype")) != dtype) {
8492
numpy_array = numpy_array.attr("view")(triton_to_numpy_type(dtype));
8593
}
@@ -124,6 +132,10 @@ PbTensor::PbTensor(
124132
int64_t memory_type_id, void* memory_ptr, uint64_t byte_size,
125133
DLManagedTensor* dl_managed_tensor)
126134
{
135+
if (name == "") {
136+
throw PythonBackendException("Tensor name cannot be an empty string.");
137+
}
138+
127139
name_ = name;
128140
memory_ptr_ = memory_ptr;
129141
memory_type_ = memory_type;
@@ -214,7 +226,7 @@ delete_unused_dltensor(PyObject* dlp)
214226
}
215227

216228
std::shared_ptr<PbTensor>
217-
PbTensor::FromNumpy(const std::string& name, py::object numpy_array)
229+
PbTensor::FromNumpy(const std::string& name, py::array& numpy_array)
218230
{
219231
return std::make_shared<PbTensor>(name, numpy_array);
220232
}
@@ -292,6 +304,10 @@ PbTensor::Memory()
292304
std::shared_ptr<PbTensor>
293305
PbTensor::FromDLPack(const std::string& name, const py::capsule& dlpack_tensor)
294306
{
307+
if (name == "") {
308+
throw PythonBackendException("Tensor name cannot be an empty string.");
309+
}
310+
295311
DLManagedTensor* dl_managed_tensor =
296312
static_cast<DLManagedTensor*>(dlpack_tensor.get_pointer());
297313

src/pb_tensor.h

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ class PbTensor {
6767
/// Create a PbTensor using a numpy array
6868
/// \param name The name of the tensor
6969
/// \param numpy_array Numpy array to use for the initialization of the tensor
70-
PbTensor(const std::string& name, py::object numpy_array);
70+
PbTensor(const std::string& name, py::array& numpy_array);
7171

7272
/// Create a PbTensor using a numpy array. This constructor is used for types
7373
/// that are not natively available in C++ such as float16. This constructor
@@ -76,7 +76,7 @@ class PbTensor {
7676
/// \param numpy_array Numpy array to use for the initialization of the tensor
7777
/// \param dtype The triton dtype
7878
PbTensor(
79-
const std::string& name, py::object numpy_array,
79+
const std::string& name, py::array& numpy_array,
8080
TRITONSERVER_DataType dtype);
8181
#endif
8282

@@ -123,7 +123,7 @@ class PbTensor {
123123
/// \param numpy_array Numpy array
124124
/// \param name name of the tensor
125125
static std::shared_ptr<PbTensor> FromNumpy(
126-
const std::string& name, py::object numpy_array);
126+
const std::string& name, py::array& numpy_array);
127127

128128
/// Get a PyCapsule object containing the DLPack representation of the tensor.
129129
/// \return Capsule object containing pointer to a DLPack object.

0 commit comments

Comments
 (0)