public void testOrderByWithExpression()

in phoenix5-spark/src/it/java/org/apache/phoenix/spark/OrderByIT.java [351:407]


    public void testOrderByWithExpression() throws Exception {
        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
        Connection conn = DriverManager.getConnection(getUrl(), props);
        conn.setAutoCommit(false);

        try {
            String tableName = generateUniqueName();
            String ddl = "CREATE TABLE " + tableName +
                    "  (a_string varchar not null, col1 integer, col2 integer, col3 timestamp, col4 varchar" +
                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
            createTestTable(getUrl(), ddl);

            Date date = new Date(System.currentTimeMillis());
            String dml = "UPSERT INTO " + tableName + " VALUES(?, ?, ?, ?, ?)";
            PreparedStatement stmt = conn.prepareStatement(dml);
            stmt.setString(1, "a");
            stmt.setInt(2, 40);
            stmt.setInt(3, 20);
            stmt.setDate(4, new Date(date.getTime()));
            stmt.setString(5, "xxyy");
            stmt.execute();
            stmt.setString(1, "b");
            stmt.setInt(2, 50);
            stmt.setInt(3, 30);
            stmt.setDate(4, new Date(date.getTime()-500));
            stmt.setString(5, "yyzz");
            stmt.execute();
            stmt.setString(1, "c");
            stmt.setInt(2, 60);
            stmt.setInt(3, 20);
            stmt.setDate(4, new Date(date.getTime()-300));
            stmt.setString(5, "ddee");
            stmt.execute();
            conn.commit();

            SQLContext sqlContext = SparkUtil.getSparkSession().sqlContext();
            Dataset phoenixDataSet = SparkUtil.getSparkSession().read().format("phoenix")
                    .option(DataSourceOptions.TABLE_KEY, tableName)
                    .option(PhoenixDataSource.JDBC_URL, getUrl()).load();
            phoenixDataSet.createOrReplaceTempView(tableName);
            Dataset<Row> dataset =
                    sqlContext.sql("SELECT col1+col2, col4, a_string FROM " + tableName
                            + " ORDER BY col1+col2, col4");
            List<Row> rows = dataset.collectAsList();
            ResultSet rs = new SparkResultSet(rows, dataset.columns());
            assertTrue(rs.next());
            assertEquals("a", rs.getString(3));
            assertTrue(rs.next());
            assertEquals("c", rs.getString(3));
            assertTrue(rs.next());
            assertEquals("b", rs.getString(3));
            assertFalse(rs.next());
        } catch (SQLException e) {
        } finally {
            conn.close();
        }
    }