in runtime/native/python/treelite_runtime/predictor.py [0:0]
def predict(self, batch, verbose=False, pred_margin=False):
"""
Perform batch prediction with a 2D sparse data matrix. Worker threads will
internally divide up work for batch prediction. **Note that this function
may be called by only one thread at a time.** In order to use multiple
threads to process multiple prediction requests simultaneously, use
:py:meth:`predict_instance` instead.
Parameters
----------
batch: object of type :py:class:`Batch`
batch of rows for which predictions will be made
verbose : :py:class:`bool <python:bool>`, optional
Whether to print extra messages during prediction
pred_margin: :py:class:`bool <python:bool>`, optional
whether to produce raw margins rather than transformed probabilities
"""
if not isinstance(batch, Batch):
raise TreeliteError('batch must be of type Batch')
if batch.handle is None or batch.kind is None:
raise TreeliteError('batch cannot be empty')
result_size = ctypes.c_size_t()
_check_call(_LIB.TreelitePredictorQueryResultSize(
self.handle,
batch.handle,
ctypes.c_int(1 if batch.kind == 'sparse' else 0),
ctypes.byref(result_size)))
out_result = np.zeros(result_size.value, dtype=np.float32, order='C')
out_result_size = ctypes.c_size_t()
_check_call(_LIB.TreelitePredictorPredictBatch(
self.handle,
batch.handle,
ctypes.c_int(1 if batch.kind == 'sparse' else 0),
ctypes.c_int(1 if verbose else 0),
ctypes.c_int(1 if pred_margin else 0),
out_result.ctypes.data_as(ctypes.POINTER(ctypes.c_float)),
ctypes.byref(out_result_size)))
idx = int(out_result_size.value)
res = out_result[0:idx].reshape((batch.shape()[0], -1)).squeeze()
if self.num_output_group_ > 1 and batch.shape()[0] != idx:
res = res.reshape((-1, self.num_output_group_))
return res