in ambari-infra-solr-client/src/main/python/migrationHelper.py [0:0]
def update_state_json(original_collection, collection, config, options):
solr_znode='/infra-solr'
if config.has_section('infra_solr') and config.has_option('infra_solr', 'znode'):
solr_znode=config.get('infra_solr', 'znode')
coll_data_dir = "{0}migrate/data/{1}".format(INFRA_SOLR_CLIENT_BASE_PATH, collection)
if not os.path.exists(coll_data_dir):
os.makedirs(coll_data_dir)
copy_znode(options, config, "{0}/collections/{1}/state.json".format(solr_znode, collection), "{0}/state.json".format(coll_data_dir), copy_to_local=True)
copy_znode(options, config, "{0}/restore_metadata/{1}".format(solr_znode, collection), "{0}".format(coll_data_dir), copy_to_local=True)
json_file_list=glob.glob("{0}/*.json".format(coll_data_dir))
logger.debug("Downloaded json files list: {0}".format(str(json_file_list)))
cores_data_json_list = [k for k in json_file_list if 'state.json' not in k and 'new_state.json' not in k and 'restore_core_pairs.json' not in k]
state_json_list = [k for k in json_file_list if '/state.json' in k]
if not cores_data_json_list:
raise Exception('Cannot find any downloaded restore core metadata for {0}'.format(collection))
if not state_json_list:
raise Exception('Cannot find any downloaded restore collection state metadata for {0}'.format(collection))
core_pairs = generate_core_pairs(original_collection, collection, config, options)
cores_to_skip = []
logger.debug("Generated core pairs: {0}".format(str(core_pairs)))
if options.skip_cores:
cores_to_skip = options.skip_cores.split(',')
logger.debug("Cores to skip: {0}".format(str(cores_to_skip)))
state_json_file=state_json_list[0]
state_data = read_json(state_json_file)
core_json_data=[]
for core_data_json_file in cores_data_json_list:
core_json_data.append(read_json(core_data_json_file))
logger.debug("collection data content: {0}".format(str(state_data)))
core_details={}
for core in core_json_data:
core_details[core['core_node']]=core
logger.debug("core data contents: {0}".format(str(core_details)))
collection_data = state_data[collection]
shards = collection_data['shards']
new_state_json_data=copy.deepcopy(state_data)
for shard in shards:
replicas = shards[shard]['replicas']
for replica in replicas:
core_data = replicas[replica]
core = core_data['core']
base_url = core_data['base_url']
node_name = core_data['node_name']
data_dir = core_data['dataDir'] if 'dataDir' in core_data else None
ulog_dir = core_data['ulogDir'] if 'ulogDir' in core_data else None
if cores_to_skip and (core in cores_to_skip or (core in core_pairs and core_pairs[core] in cores_to_skip)):
print "Skipping core '{0}' as it is in skip-cores list (or its original pair: '{1}')".format(core, core_pairs[core])
elif replica in core_details:
old_core_node=core_details[replica]['core_node']
new_core_node=core_details[replica]['new_core_node']
new_state_core = copy.deepcopy(state_data[collection]['shards'][shard]['replicas'][replica])
new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]=new_state_core
if old_core_node != new_core_node:
if old_core_node in new_state_json_data[collection]['shards'][shard]['replicas']:
del new_state_json_data[collection]['shards'][shard]['replicas'][old_core_node]
if data_dir:
new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]['dataDir']=data_dir.replace(old_core_node, new_core_node)
if ulog_dir:
new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]['ulogDir']=ulog_dir.replace(old_core_node, new_core_node)
old_host=core_details[replica]['old_host']
new_host=core_details[replica]['new_host']
if old_host != new_host and old_core_node != new_core_node:
new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]['base_url']=base_url.replace(old_host, new_host)
new_state_json_data[collection]['shards'][shard]['replicas'][new_core_node]['node_name']=node_name.replace(old_host, new_host)
elif old_host != new_host:
new_state_json_data[collection]['shards'][shard]['replicas'][replica]['base_url']=base_url.replace(old_host, new_host)
new_state_json_data[collection]['shards'][shard]['replicas'][replica]['node_name']=node_name.replace(old_host, new_host)
with open("{0}/new_state.json".format(coll_data_dir), 'w') as outfile:
json.dump(new_state_json_data, outfile)
copy_znode(options, config, "{0}/new_state.json".format(coll_data_dir), "{0}/collections/{1}/state.json".format(solr_znode, collection), copy_from_local=True)