def main()

in tools/vm-migrator/src/migrator/subnet_region_migrator.py [0:0]


def main(step, machine_image_region, source_project,
         source_subnet_uri: uri.Subnet, source_zone, source_zone_2,
         source_zone_3, target_project, target_service_account, target_scopes,
         target_subnet_uri: uri.Subnet, backup_subnet_uri: uri.Subnet,
         source_csv, filter_csv, input_csv, rollback_csv,
         log_level) -> bool:
    """
    The main method to trigger the VM migration.
    """
    if not target_project:
        target_project = source_project
    if not target_subnet_uri:
        target_subnet_uri = copy.deepcopy(source_subnet_uri)
    if source_project != target_project:
        if not target_service_account:
            target_service_account = \
                "{}-compute@developer.gserviceaccount.com".format(
                    project.get_number(target_project))
        if target_scopes:
            target_scopes = target_scopes.split(',')
        else:
            target_scopes = [
                'https://www.googleapis.com/auth/devstorage.read_only',
                'https://www.googleapis.com/auth/logging.write',
                'https://www.googleapis.com/auth/monitoring.write',
                'https://www.googleapis.com/auth/service.management.readonly',
                'https://www.googleapis.com/auth/servicecontrol'
            ]

    numeric_level = getattr(logging, log_level.upper(), None)
    if not isinstance(numeric_level, int):
        raise ValueError('Invalid log level: %s' % log_level)
    logging.basicConfig(filename='migrator.log',
                        format='%(asctime)s  %(levelname)s %(message)s',
                        level=numeric_level)

    logging.info('executing step %s', step)
    if step == 'prepare_inventory':
        logging.info('Exporting the inventory')
        if subnet.export_instances(source_project, source_zone, source_zone_2,
                                   source_zone_3, source_subnet_uri,
                                   source_csv):
            logging.info('%s now has exported records', source_csv)
        else:
            logging.info('File %s was not overwritten', source_csv)
            return False

    elif step == 'filter_inventory':
        logging.info('Exporting the inventory')
        if subnet.export_instances(source_project, source_zone, source_zone_2,
                                   source_zone_3, source_subnet_uri,
                                   source_csv):
            logging.info('%s now has exported records', source_csv)
        else:
            logging.info('File %s was not overwritten', source_csv)
            return False

        logging.info('Filtering out the exported records')
        if filter_records(source_csv, filter_csv, input_csv):
            logging.info('%s now has filtered records', input_csv)
        else:
            logging.info('File %s was not overwritten', input_csv)
            return False

    elif step == 'prepare_rollback':
        logging.info('Listing the VMs to roll back')
        if subnet.list_instances_for_rollback(source_project, source_zone, backup_subnet_uri, input_csv, rollback_csv):
            logging.info('%s now has exported records', rollback_csv)
        else:
            logging.info('File %s was not overwritten', rollback_csv)
            return False

    elif step == 'rollback_instances':
        logging.info('Performing rollback of instances in file %s', rollback_csv)
        if bulk_move_instances_to_subnet(rollback_csv, source_subnet_uri, 'rollback'):
            logging.info('Instances rollback completed successfully')
        else:
            logging.info('Rollback failed, please see the log file for details')
            return False

    elif step == 'shutdown_instances':
        with open(input_csv, 'r') as read_obj:
            csv_dict_reader = DictReader(read_obj)
            count = len(list(csv_dict_reader))
        shutdown_response = query_yes_no(
            'Are you sure you want to shut down all (%s) '
            'instances present in the inventory ?' % count,
            default='no')

        if shutdown_response:
            logging.info('Shutting down all instances')

            if bulk_instance_shutdown(input_csv):
                logging.info('Successfully shut down all instances')
            else:
                logging.info('Shutting down all instances failed')
                return False
        else:
            return False

    elif step == 'start_instances':
        start_response = query_yes_no(
            'Are you sure you want to start all '
            'instances present in the inventory ?',
            default='no')
        if start_response:
            logging.info('Starting all instances')

            if bulk_instance_start(input_csv):
                logging.info('Successfully started all instances')
            else:
                logging.info('Starting all instances failed')
                return False
        else:
            return False

    elif step == 'create_machine_images':
        logging.info('Creating Machine Images')
        if bulk_image_create(source_project, machine_image_region, input_csv):
            logging.info('Successfully created all machine images')
        else:
            logging.info('Creating all machine images failed')
            return False

    elif step == 'disable_deletionprotection_instances':
        with open(input_csv, 'r') as read_obj:
            csv_dict_reader = DictReader(read_obj)
            count = len(list(csv_dict_reader))
        shutdown_response = query_yes_no(
            'Are you sure you want to disable deletion protection for all (%s) '
            'instances present in the inventory?' % count,
            default='no')

        if shutdown_response:
            logging.info('Disabling deletion protection for all instances')

            if bulk_instance_disable_deletionprotection(input_csv):
                logging.info('Successfully disabled deletion protection for all '
                             'instances')
            else:
                logging.info('Disabling deletion protection for all instances '
                             'failed')
                return False
        else:
            return False

    elif step == 'delete_instances':
        with open(input_csv, 'r') as read_obj:
            csv_dict_reader = DictReader(read_obj)
            count = len(list(csv_dict_reader))
        response = query_yes_no('Are you sure you want to delete the (%s) '
                                'instances and disks present in the inventory '
                                '?' % count, default='no')
        if response:
            logging.info('Deleting all the instances and disks present in the '
                         'inventory')
            if bulk_delete_instances_and_disks(input_csv, source_project):
                logging.info('Successfully deleted all instances and disks '
                             'present in the inventory')
            else:
                logging.info('Deleting all instances and disks in the '
                             'inventory failed')
                return False
        else:
            logging.info('Not deleting any instances nor disks')
            return False

    elif step == 'clone_subnet':
        logging.info('Cloning Subnet')
        if subnet.duplicate(source_subnet_uri, target_subnet_uri):
            logging.info('Successfully cloned subnet in the provided region')
        else:
            logging.info('Cloning subnet in the provided region failed')
            return False

    elif step == 'add_machineimage_iampolicies':
        logging.info('Setting IAM policies of created machine images with '
                     'input_csv=%s, source_project=%s, target_service_account='
                     '%s', input_csv, source_project, target_service_account)
        if add_machineimage_iampolicies(input_csv, source_project,
                                        target_service_account):
            logging.info('Successfully set IAM policies of created machine '
                         'images')
        else:
            logging.info('Setting IAM policies of created machine images '
                         'failed')
            return False

    elif step == 'create_instances':
        logging.info(
            'Creating instances retaining the original ips in file %s with '
            'source_project=%s, target_project=%s, target_service_account=%s, '
            'target_scopes=%s, target_subnet_uri=%s', input_csv,
            source_project, target_project, target_service_account,
            target_scopes, target_subnet_uri)
        if bulk_create_instances(input_csv, target_project,
                                 target_service_account, target_scopes,
                                 target_subnet_uri, source_project, True):
            logging.info('Instances created successfully')
        else:
            logging.error('Creation of instances failed')
            return False

    elif step == 'create_instances_without_ip':
        logging.info(
            'Creating instances without retaining the original ips in file %s '
            'with source_project=%s, target_project=%s, target_service_account'
            '=%s, target_scopes=%s, target_subnet_uri=%s', input_csv,
            source_project, target_project, target_service_account,
            target_scopes, target_subnet_uri)
        if bulk_create_instances(input_csv, target_project,
                                 target_service_account, target_scopes,
                                 target_subnet_uri, source_project, False):
            logging.info('Instances created successfully')
        else:
            logging.error('Creation of instances failed')
            return False

    elif step == 'backup_instances':
        logging.info(
            'Backing up instances in file %s to backup_subnet_uri=%s',
            input_csv, backup_subnet_uri)
        if bulk_move_instances_to_subnet(input_csv, backup_subnet_uri, 'backup'):
            logging.info('Instances backed up successfully')
        else:
            logging.error('Backup of instances failed')
            return False

    elif step == 'release_ip_for_subnet':
        logging.info('Releasing all IPs of project %s present in '
                     'subnet %s', source_project, source_subnet_uri)
        if subnet.release_ip(source_project, source_subnet_uri):
            logging.info('All IPs of project %s present in subnet %s released '
                         'sucessfully', source_project,  source_subnet_uri)
        else:
            logging.error('Releasing the IPs of project %s present in subnet '
                          '%s failed', source_project, source_subnet_uri)
            return False

    elif step == 'release_ip':
        logging.info('Releasing the IPs present in the %s file', input_csv)
        if release_individual_ips(source_subnet_uri, input_csv):
            logging.info('IPs present in the file %s released successfully',
                         input_csv)
        else:
            logging.error('Releasing ips present in the file %s failed',
                          input_csv)
            return False
    else:
        logging.error('Step %s unknown', step)
        return False

    return True