in DxDispatch/src/model/JsonParsersGenerated.cpp [7:27]
DML_TENSOR_DATA_TYPE ParseDmlTensorDataType(const rapidjson::Value& value)
{
if (value.GetType() != rapidjson::Type::kStringType)
{
throw std::invalid_argument("DML_TENSOR_DATA_TYPE must be a string.");
}
auto valueString = value.GetString();
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_UNKNOWN") || !strcmp(valueString, "UNKNOWN")) { return DML_TENSOR_DATA_TYPE_UNKNOWN; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_FLOAT32") || !strcmp(valueString, "FLOAT32")) { return DML_TENSOR_DATA_TYPE_FLOAT32; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_FLOAT16") || !strcmp(valueString, "FLOAT16")) { return DML_TENSOR_DATA_TYPE_FLOAT16; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_UINT32") || !strcmp(valueString, "UINT32")) { return DML_TENSOR_DATA_TYPE_UINT32; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_UINT16") || !strcmp(valueString, "UINT16")) { return DML_TENSOR_DATA_TYPE_UINT16; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_UINT8") || !strcmp(valueString, "UINT8")) { return DML_TENSOR_DATA_TYPE_UINT8; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_INT32") || !strcmp(valueString, "INT32")) { return DML_TENSOR_DATA_TYPE_INT32; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_INT16") || !strcmp(valueString, "INT16")) { return DML_TENSOR_DATA_TYPE_INT16; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_INT8") || !strcmp(valueString, "INT8")) { return DML_TENSOR_DATA_TYPE_INT8; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_FLOAT64") || !strcmp(valueString, "FLOAT64")) { return DML_TENSOR_DATA_TYPE_FLOAT64; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_UINT64") || !strcmp(valueString, "UINT64")) { return DML_TENSOR_DATA_TYPE_UINT64; }
if (!strcmp(valueString, "DML_TENSOR_DATA_TYPE_INT64") || !strcmp(valueString, "INT64")) { return DML_TENSOR_DATA_TYPE_INT64; }
throw std::invalid_argument(fmt::format("'{}' is not a recognized value for DML_TENSOR_DATA_TYPE.", valueString));
}