in pydeequ/checks.py [0:0]
def hasDataType(self, column, datatype: ConstrainableDataTypes, assertion=None, hint=None):
"""
Check to run against the fraction of rows that conform to the given data type.
:param str column: The Column in DataFrame to be checked.
:param ConstrainableDataTypes datatype: Data type that the columns should be compared against
:param lambda assertion: A function with an int or float parameter.
:param str hint: A hint that states why a constraint could have failed.
:return: hasDataType self: A Check object that runs the compliance on the column.
"""
datatype_jvm = datatype._get_java_object(self._jvm)
assertion = (
ScalaFunction1(self._spark_session.sparkContext._gateway, assertion)
if assertion
else getattr(self._Check, "hasDataType$default$3")()
)
hint = self._jvm.scala.Option.apply(hint)
self._Check = self._Check.hasDataType(column, datatype_jvm, assertion, hint)
return self