cassandra-analytics-spark-converter/src/main/scala-2.11-spark-2/org/apache/cassandra/spark/utils/SparkTypeUtils.java [30:53]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
public final class SparkTypeUtils
{
    private SparkTypeUtils()
    {
        throw new IllegalStateException(getClass() + " is static utility class and shall not be instantiated");
    }

    public static final Comparator<CalendarInterval> CALENDAR_INTERVAL_COMPARATOR =
    Comparator.<CalendarInterval>comparingInt(interval -> interval.months)
              .thenComparingLong(interval -> interval.microseconds);

    public static CalendarInterval convertDuration(InternalDuration duration)
    {
        // Unfortunately, it loses precision when converting to the spark data type.
        long micros = TimeUnit.NANOSECONDS.toMicros(duration.nanoseconds);
        micros += duration.days * CalendarInterval.MICROS_PER_DAY;
        return new CalendarInterval(duration.months, micros);
    }

    public static InternalDuration convertDuration(CalendarInterval interval)
    {
        int days = Ints.checkedCast(interval.microseconds / CalendarInterval.MICROS_PER_DAY);
        long microsRemain = interval.microseconds % CalendarInterval.MICROS_PER_DAY;
        return new InternalDuration(interval.months, days, TimeUnit.MICROSECONDS.toNanos(microsRemain));
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



cassandra-analytics-spark-converter/src/main/scala-2.12-spark-2/org/apache/cassandra/spark/utils/SparkTypeUtils.java [30:53]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
public final class SparkTypeUtils
{
    private SparkTypeUtils()
    {
        throw new IllegalStateException(getClass() + " is static utility class and shall not be instantiated");
    }

    public static final Comparator<CalendarInterval> CALENDAR_INTERVAL_COMPARATOR =
    Comparator.<CalendarInterval>comparingInt(interval -> interval.months)
              .thenComparingLong(interval -> interval.microseconds);

    public static CalendarInterval convertDuration(InternalDuration duration)
    {
        // Unfortunately, it loses precision when converting to the spark data type.
        long micros = TimeUnit.NANOSECONDS.toMicros(duration.nanoseconds);
        micros += duration.days * CalendarInterval.MICROS_PER_DAY;
        return new CalendarInterval(duration.months, micros);
    }

    public static InternalDuration convertDuration(CalendarInterval interval)
    {
        int days = Ints.checkedCast(interval.microseconds / CalendarInterval.MICROS_PER_DAY);
        long microsRemain = interval.microseconds % CalendarInterval.MICROS_PER_DAY;
        return new InternalDuration(interval.months, days, TimeUnit.MICROSECONDS.toNanos(microsRemain));
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



