def main()

in utilities/Hive_metastore_migration/src/hive_metastore_migration.py [0:0]


def main():
    options = parse_arguments(sys.argv)

    connection = {
        'url': options['jdbc_url'],
        'user': options['jdbc_username'],
        'password': options['jdbc_password']
    }
    db_prefix = options.get('database_prefix') or ''
    table_prefix = options.get('table_prefix') or ''

    # spark env
    (conf, sc, sql_context) = get_spark_env()
    # extract
    hive_metastore = HiveMetastore(connection, sql_context)

    if options['mode'] == FROM_METASTORE:
        etl_from_metastore(sc, sql_context, db_prefix, table_prefix, hive_metastore, options)
    else:
        etl_to_metastore(sc, sql_context, hive_metastore, options)