metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/configs/HiveConnectorClientConfig.java [89:212]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    @Bean
    public Warehouse warehouse(final ConnectorContext connectorContext) {
        try {
            final HiveConf conf = this.getDefaultConf(connectorContext);
            connectorContext.getConfiguration().forEach(conf::set);
            return new Warehouse(conf);
        } catch (Exception e) {
            throw new IllegalArgumentException(
                String.format(
                    "Failed creating the hive warehouse for catalog: %s",
                    connectorContext.getCatalogName()
                ),
                e
            );
        }
    }

    /**
     * hive DataSource.
     *
     * @param connectorContext connector config.
     * @return data source
     */
    @Bean
    public DataSource hiveDataSource(final ConnectorContext connectorContext) {
        final HiveConf conf = this.getDefaultConf(connectorContext);
        connectorContext.getConfiguration().forEach(conf::set);
        DataSourceManager.get().load(
            connectorContext.getCatalogShardName(),
            connectorContext.getConfiguration()
        );
        return DataSourceManager.get().get(connectorContext.getCatalogShardName());
    }

    /**
     * hive metadata Transaction Manager.
     *
     * @param hiveDataSource hive data source
     * @return hive transaction manager
     */
    @Bean
    public DataSourceTransactionManager hiveTxManager(
        @Qualifier("hiveDataSource") final DataSource hiveDataSource) {
        return new DataSourceTransactionManager(hiveDataSource);
    }

    /**
     * hive metadata read JDBC template. Query timeout is set to control long running read queries.
     *
     * @param connectorContext connector config.
     * @param hiveDataSource hive data source
     * @return hive JDBC Template
     */
    @Bean
    public JdbcTemplate hiveReadJdbcTemplate(
        final ConnectorContext connectorContext,
        @Qualifier("hiveDataSource") final DataSource hiveDataSource) {
        final JdbcTemplate result = new JdbcTemplate(hiveDataSource);
        result.setQueryTimeout(getDataStoreReadTimeout(connectorContext) / 1000);
        return result;
    }

    /**
     * hive metadata write JDBC template. Query timeout is set to control long running write queries.
     *
     * @param connectorContext connector config.
     * @param hiveDataSource hive data source
     * @return hive JDBC Template
     */
    @Bean
    public JdbcTemplate hiveWriteJdbcTemplate(
        final ConnectorContext connectorContext,
        @Qualifier("hiveDataSource") final DataSource hiveDataSource) {
        final JdbcTemplate result = new JdbcTemplate(hiveDataSource);
        result.setQueryTimeout(getDataStoreWriteTimeout(connectorContext) / 1000);
        return result;
    }

    @VisibleForTesting
    private HiveConf getDefaultConf(
        final ConnectorContext connectorContext
    ) {
        final HiveConf result = new HiveConf();
        result.setBoolean(HiveConfigConstants.USE_METASTORE_LOCAL, true);

        final int dataStoreTimeout = getDataStoreTimeout(connectorContext);
        result.setInt(HiveConfigConstants.JAVAX_JDO_DATASTORETIMEOUT, dataStoreTimeout);
        result.setInt(HiveConfigConstants.JAVAX_JDO_DATASTOREREADTIMEOUT, dataStoreTimeout);
        result.setInt(HiveConfigConstants.JAVAX_JDO_DATASTOREWRITETIMEOUT, getDataStoreWriteTimeout(connectorContext));
        result.setInt(HiveConfigConstants.HIVE_METASTORE_DS_RETRY, 0);
        result.setInt(HiveConfigConstants.HIVE_HMSHANDLER_RETRY, 0);
        result.set(
            HiveConfigConstants.JAVAX_JDO_PERSISTENCEMANAGER_FACTORY_CLASS,
            HiveConfigConstants.JAVAX_JDO_PERSISTENCEMANAGER_FACTORY
        );
        result.setBoolean(HiveConfigConstants.HIVE_STATS_AUTOGATHER, false);
        return result;
    }

    private int getDataStoreTimeout(final ConnectorContext connectorContext) {
        int result = DEFAULT_DATASTORE_TIMEOUT;
        try {
            result = Integer.parseInt(
                connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTORETIMEOUT));
        } catch (final Exception ignored) { }
        return result;
    }

    private int getDataStoreReadTimeout(final ConnectorContext connectorContext) {
        int result = DEFAULT_DATASTORE_READ_TIMEOUT;
        try {
            result = Integer.parseInt(
                connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTOREREADTIMEOUT));
        } catch (final Exception ignored) { }
        return result;
    }

    private int getDataStoreWriteTimeout(final ConnectorContext connectorContext) {
        int result = DEFAULT_DATASTORE_WRITE_TIMEOUT;
        try {
            result = Integer.parseInt(
                connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTOREWRITETIMEOUT));
        } catch (final Exception ignored) { }
        return result;
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/configs/HiveConnectorConfig.java [179:302]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    @Bean
    public Warehouse warehouse(final ConnectorContext connectorContext) {
        try {
            final HiveConf conf = this.getDefaultConf(connectorContext);
            connectorContext.getConfiguration().forEach(conf::set);
            return new Warehouse(conf);
        } catch (Exception e) {
            throw new IllegalArgumentException(
                String.format(
                    "Failed creating the hive warehouse for catalog: %s",
                    connectorContext.getCatalogName()
                ),
                e
            );
        }
    }

    /**
     * hive DataSource.
     *
     * @param connectorContext connector config.
     * @return data source
     */
    @Bean
    public DataSource hiveDataSource(final ConnectorContext connectorContext) {
        final HiveConf conf = this.getDefaultConf(connectorContext);
        connectorContext.getConfiguration().forEach(conf::set);
        DataSourceManager.get().load(
            connectorContext.getCatalogShardName(),
            connectorContext.getConfiguration()
        );
        return DataSourceManager.get().get(connectorContext.getCatalogShardName());
    }

    /**
     * hive metadata Transaction Manager.
     *
     * @param hiveDataSource hive data source
     * @return hive transaction manager
     */
    @Bean
    public DataSourceTransactionManager hiveTxManager(
        @Qualifier("hiveDataSource") final DataSource hiveDataSource) {
        return new DataSourceTransactionManager(hiveDataSource);
    }

    /**
     * hive metadata read JDBC template. Query timeout is set to control long running read queries.
     *
     * @param connectorContext connector config.
     * @param hiveDataSource hive data source
     * @return hive JDBC Template
     */
    @Bean
    public JdbcTemplate hiveReadJdbcTemplate(
        final ConnectorContext connectorContext,
        @Qualifier("hiveDataSource") final DataSource hiveDataSource) {
        final JdbcTemplate result = new JdbcTemplate(hiveDataSource);
        result.setQueryTimeout(getDataStoreReadTimeout(connectorContext) / 1000);
        return result;
    }

    /**
     * hive metadata write JDBC template. Query timeout is set to control long running write queries.
     *
     * @param connectorContext connector config.
     * @param hiveDataSource hive data source
     * @return hive JDBC Template
     */
    @Bean
    public JdbcTemplate hiveWriteJdbcTemplate(
        final ConnectorContext connectorContext,
        @Qualifier("hiveDataSource") final DataSource hiveDataSource) {
        final JdbcTemplate result = new JdbcTemplate(hiveDataSource);
        result.setQueryTimeout(getDataStoreWriteTimeout(connectorContext) / 1000);
        return result;
    }

    @VisibleForTesting
    private HiveConf getDefaultConf(
        final ConnectorContext connectorContext
    ) {
        final HiveConf result = new HiveConf();
        result.setBoolean(HiveConfigConstants.USE_METASTORE_LOCAL, true);

        final int dataStoreTimeout = getDataStoreTimeout(connectorContext);
        result.setInt(HiveConfigConstants.JAVAX_JDO_DATASTORETIMEOUT, dataStoreTimeout);
        result.setInt(HiveConfigConstants.JAVAX_JDO_DATASTOREREADTIMEOUT, dataStoreTimeout);
        result.setInt(HiveConfigConstants.JAVAX_JDO_DATASTOREWRITETIMEOUT, getDataStoreWriteTimeout(connectorContext));
        result.setInt(HiveConfigConstants.HIVE_METASTORE_DS_RETRY, 0);
        result.setInt(HiveConfigConstants.HIVE_HMSHANDLER_RETRY, 0);
        result.set(
            HiveConfigConstants.JAVAX_JDO_PERSISTENCEMANAGER_FACTORY_CLASS,
            HiveConfigConstants.JAVAX_JDO_PERSISTENCEMANAGER_FACTORY
        );
        result.setBoolean(HiveConfigConstants.HIVE_STATS_AUTOGATHER, false);
        return result;
    }

    private int getDataStoreTimeout(final ConnectorContext connectorContext) {
        int result = DEFAULT_DATASTORE_TIMEOUT;
        try {
            result = Integer.parseInt(
                connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTORETIMEOUT));
        } catch (final Exception ignored) { }
        return result;
    }

    private int getDataStoreReadTimeout(final ConnectorContext connectorContext) {
        int result = DEFAULT_DATASTORE_READ_TIMEOUT;
        try {
            result = Integer.parseInt(
                connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTOREREADTIMEOUT));
        } catch (final Exception ignored) { }
        return result;
    }

    private int getDataStoreWriteTimeout(final ConnectorContext connectorContext) {
        int result = DEFAULT_DATASTORE_WRITE_TIMEOUT;
        try {
            result = Integer.parseInt(
                connectorContext.getConfiguration().get(HiveConfigConstants.JAVAX_JDO_DATASTOREWRITETIMEOUT));
        } catch (final Exception ignored) { }
        return result;
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



