protected ADBResultSet executeGetTablesQuery()

in asterixdb-jdbc/asterix-jdbc-core/src/main/java/org/apache/asterix/jdbc/core/ADBMetaStatement.java [136:205]


    protected ADBResultSet executeGetTablesQuery(String catalog, String schemaPattern, String tableNamePattern,
            String[] types) throws SQLException {
        checkClosed();

        String datasetTermTabular = getDatasetTerm(true);
        String datasetTermNonTabular = getDatasetTerm(false);
        String viewTermTabular = getViewTerm(true);
        String viewTermNonTabular = getViewTerm(false);

        StringBuilder sql = new StringBuilder(1024);
        populateQueryProlog(sql, "JDBC-GetTables");

        sql.append("select TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, null REMARKS, null TYPE_CAT, ");
        sql.append("null TYPE_SCHEM, null TYPE_NAME, null SELF_REFERENCING_COL_NAME, null REF_GENERATION ");
        sql.append("from Metadata.`Dataset` ds join Metadata.`Datatype` dt ");
        sql.append("on ds.DatatypeDataverseName = dt.DataverseName and ds.DatatypeName = dt.DatatypeName ");
        sql.append("let ");
        switch (connection.catalogDataverseMode) {
            case CATALOG:
                sql.append("TABLE_CAT = ds.DataverseName, ");
                sql.append("TABLE_SCHEM = null, ");
                break;
            case CATALOG_SCHEMA:
                sql.append("dvname = decode_dataverse_name(ds.DataverseName), ");
                sql.append("TABLE_CAT = dvname[0], ");
                sql.append("TABLE_SCHEM = case array_length(dvname) when 1 then null else dvname[1] end, ");
                break;
            default:
                throw new IllegalStateException();
        }
        sql.append("TABLE_NAME = ds.DatasetName, ");
        sql.append("isDataset = (ds.DatasetType = 'INTERNAL' or ds.DatasetType = 'EXTERNAL'), ");
        sql.append("isView = ds.DatasetType = 'VIEW', ");
        sql.append("hasFields = array_length(dt.Derived.Record.Fields) > 0, ");
        sql.append("TABLE_TYPE = case ");
        sql.append("when isDataset then (case when hasFields then '").append(datasetTermTabular).append("' else '")
                .append(datasetTermNonTabular).append("' end) ");
        sql.append("when isView then (case when hasFields then '").append(viewTermTabular).append("' else '")
                .append(viewTermNonTabular).append("' end) ");
        sql.append("else null end ");

        sql.append("where ");
        sql.append("(TABLE_TYPE ").append(types != null ? "in $1" : "is not null").append(") ");
        if (catalog != null) {
            sql.append("and (TABLE_CAT = $2) ");
        }
        if (schemaPattern != null) {
            sql.append("and (if_null(TABLE_SCHEM, '') like $3) ");
        }
        if (tableNamePattern != null) {
            sql.append("and (TABLE_NAME like $4) ");
        }
        switch (connection.catalogDataverseMode) {
            case CATALOG:
                break;
            case CATALOG_SCHEMA:
                sql.append("and (array_length(dvname) between 1 and 2) ");
                break;
            default:
                throw new IllegalStateException();
        }
        if (!connection.catalogIncludesSchemaless) {
            sql.append("and hasFields ");
        }

        sql.append("order by TABLE_TYPE, TABLE_CAT, TABLE_SCHEM, TABLE_NAME");

        List<String> typesList = types != null ? Arrays.asList(types) : null;
        return executeQueryImpl(sql.toString(), Arrays.asList(typesList, catalog, schemaPattern, tableNamePattern));
    }