tensorflow_io/python/ops/json_dataset_ops.py [75:108]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                )
                columns_function.append(function)

            for (column, function) in zip(columns, columns_function):
                column_dataset = tf.compat.v2.data.Dataset.range(
                    0, sys.maxsize, capacity
                )
                column_dataset = column_dataset.map(
                    lambda index: function(index, index + capacity)
                )
                column_dataset = column_dataset.apply(
                    tf.data.experimental.take_while(
                        lambda v: tf.greater(tf.shape(v)[0], 0)
                    )
                )
                columns_dataset.append(column_dataset)
            if len(columns_dataset) == 1:
                dataset = columns_dataset[0]
            else:
                dataset = tf.compat.v2.data.Dataset.zip(tuple(columns_dataset))
            dataset = dataset.unbatch()

            self._function = columns_function
            self._dataset = dataset
            super().__init__(
                self._dataset._variant_tensor
            )  # pylint: disable=protected-access

    def _inputs(self):
        return []

    @property
    def element_spec(self):
        return self._dataset.element_spec
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



tensorflow_io/python/ops/orc_dataset_ops.py [69:102]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
                )
                columns_function.append(function)

            for (column, function) in zip(columns, columns_function):
                column_dataset = tf.compat.v2.data.Dataset.range(
                    0, sys.maxsize, capacity
                )
                column_dataset = column_dataset.map(
                    lambda index: function(index, index + capacity)
                )
                column_dataset = column_dataset.apply(
                    tf.data.experimental.take_while(
                        lambda v: tf.greater(tf.shape(v)[0], 0)
                    )
                )
                columns_dataset.append(column_dataset)
            if len(columns_dataset) == 1:
                dataset = columns_dataset[0]
            else:
                dataset = tf.compat.v2.data.Dataset.zip(tuple(columns_dataset))
            dataset = dataset.unbatch()

            self._function = columns_function
            self._dataset = dataset
            super().__init__(
                self._dataset._variant_tensor
            )  # pylint: disable=protected-access

    def _inputs(self):
        return []

    @property
    def element_spec(self):
        return self._dataset.element_spec
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



