def _check_fp16_weight_param_exists()

in coremltools/models/neural_network/builder.py [0:0]


    def _check_fp16_weight_param_exists(self, layers):
        """
        Checks if the network has at least one ``weight_param`` which is in FP16 format.

        Parameters
        ----------
        layers: list of nn_spec.layer
            List of layers.
        """

        for layer in layers:
            layer_type = layer.WhichOneof("layer")

            # Convolution
            if layer_type == "convolution":
                if len(layer.convolution.weights.float16Value) > 0:
                    return True
                if layer.convolution.hasBias and len(layer.convolution.bias.float16Value) > 0:
                    return True
            # Batchnorm
            elif layer_type == "batchnorm":
                if len(layer.batchnorm.mean.float16Value) > 0:
                    return True

            # InnerProduct
            elif layer_type == "innerProduct":
                if len(layer.innerProduct.weights.float16Value) > 0:
                    return True
                if layer.innerProduct.hasBias and len(layer.innerProduct.bias.float16Value) > 0:
                    return True

            # BatchedMatmul
            elif layer_type == "batchedMatmul":
                if len(layer.batchedMatmul.weights.float16Value) > 0:
                    return True
                if layer.batchedMatmul.hasBias and len(layer.batchedMatmul.bias.float16Value) > 0:
                    return True

            # Embedding layer
            elif layer_type == "embedding":
                if len(layer.embedding.weights.float16Value) > 0:
                    return True
                if layer.embedding.hasBias and len(layer.embedding.bias.float16Value) > 0:
                    return True

            # Embedding ND layer
            elif layer_type == "embeddingND":
                if len(layer.embeddingND.weights.float16Value) > 0:
                    return True
                if layer.embeddingND.hasBias and len(layer.embeddingND.bias.float16Value) > 0:
                    return True

            # Scale layer
            elif layer_type == "scale":
                if len(layer.scale.shapeScale.float16Value) > 0:
                    return True
                if layer.scale.hasBias and len(layer.scale.bias.float16Value) > 0:
                    return True

            # Bias layer
            elif layer_type == "bias":
                if len(layer.bias.bias.float16Value) > 0:
                    return True

            # LoadConstant layer
            elif layer_type == "loadConstant":
                if len(layer.loadConstant.data.float16Value) > 0:
                    return True

            # Simple Recurrent
            elif layer_type == "simpleRecurrent":
                if len(layer.simpleRecurrent.weightMatrix.float16Value) > 0:
                    return True
                if layer.simpleRecurrent.hasBiasVector and len(layer.simpleRecurrent.biasVector.float16Value) > 0:
                    return True

            # GRU
            elif layer_type == "gru":
                if len(layer.gru.updateGateWeightMatrix.float16Value) > 0:
                    return True
                if layer.gru.hasBiasVectors and len(layer.gru.outputGateBiasVector.float16Value) > 0:
                    return True

            # uniDirectionalLSTM Layers
            elif layer_type == "uniDirectionalLSTM":
                return self._check_fp16_weight_params_lstms(lstm_wp=layer.uniDirectionalLSTM.weightParams,
                                                            has_peephole=layer.uniDirectionalLSTM.params.hasPeepholeVectors)

            # biDirectionalLSTM Layers
            elif layer_type == "biDirectionalLSTM":
                for lstm_wp in layer.biDirectionalLSTM.weightParams:
                    if self._check_fp16_weight_params_lstms(lstm_wp=lstm_wp,
                                                            has_peephole=layer.biDirectionalLSTM.params.hasPeepholeVectors):
                        return True

            # branch Layers
            elif layer_type == "branch":
                if len(layer.branch.ifBranch.float16Value) > 0:
                    return True
                if len(layer.branch.elseBranch.float16Value) > 0:
                    return True

            # loop Layers
            elif layer_type == "loop":
                if len(layer.loop.conditionNetwork.float16Value) > 0:
                    return True
                if len(layer.loop.bodyNetwork.float16Value) > 0:
                    return True

            return False