def predict()

in lab/03-Package-Deploy/greengrass-v2/artifacts/aws.samples.windturbine.detector/1.0.0/inference/edgeagentclient.py [0:0]


    def predict(self, model_name, x, shm=False):
        """
        Invokes the model and get the predictions
        """
        try:
            if self.model_map.get(model_name) is None:
                raise Exception('Model %s not loaded' % model_name)
            # Create a request
            req = agent.PredictRequest()
            req.name = model_name
            # Then load the data into a temp Tensor
            tensor = agent.Tensor()
            meta = self.model_map[model_name]['in'][0]
            tensor.tensor_metadata.name = meta.name
            tensor.tensor_metadata.data_type = meta.data_type
            for s in meta.shape: tensor.tensor_metadata.shape.append(s)

            if shm:
                tensor.shared_memory_handle.offset = 0
                tensor.shared_memory_handle.segment_id = x
            else:
                tensor.byte_data = x.astype(np.float32).tobytes()

            req.tensors.append(tensor)

            # Invoke the model
            resp = self.agent.Predict(req)

            # Parse the output
            meta = self.model_map[model_name]['out'][0]
            tensor = resp.tensors[0]
            data = np.frombuffer(tensor.byte_data, dtype=np.float32)
            return data.reshape(tensor.tensor_metadata.shape)
        except Exception as e:
            logging.error(e)
            return None