in tensorflow_examples/lite/model_maker/core/data_util/object_detector_dataloader_util.py [0:0]
def write_files(self, cache_files: CacheFiles, *args, **kwargs) -> None:
"""Writes TFRecord, meta_data and annotations json files.
Args:
cache_files: CacheFiles object including a list of TFRecord files, the
annotations json file with COCO data format containing golden bounding
boxes and the meda data yaml file to save the meta_data including data
size and label_map.
*args: Non-keyword of parameters used in the `_get_xml_dict` method.
**kwargs: Keyword parameters used in the `_get_xml_dict` method.
"""
writers = [
tf.io.TFRecordWriter(path) for path in cache_files.tfrecord_files
]
ann_json_dict = {'images': [], 'annotations': [], 'categories': []}
for class_id, class_name in self.label_map.items():
c = {'supercategory': 'none', 'id': class_id, 'name': class_name}
ann_json_dict['categories'].append(c)
# Writes tf.Example into TFRecord files.
size = 0
for idx, xml_dict in enumerate(self._get_xml_dict(*args, **kwargs)):
if self.max_num_images and idx >= self.max_num_images:
break
if idx % 100 == 0:
tf.compat.v1.logging.info('On image %d' % idx)
tf_example = create_pascal_tfrecord.dict_to_tf_example(
xml_dict,
self.images_dir,
self.label_name2id_dict,
self.unique_id,
ignore_difficult_instances=self.ignore_difficult_instances,
ann_json_dict=ann_json_dict)
writers[idx % self.num_shards].write(tf_example.SerializeToString())
size = idx + 1
for writer in writers:
writer.close()
# Writes meta_data into meta_data_file.
meta_data = {'size': size, 'label_map': self.label_map}
with tf.io.gfile.GFile(cache_files.meta_data_file, 'w') as f:
yaml.dump(meta_data, f)
# Writes ann_json_dict into annotations_json_file.
with tf.io.gfile.GFile(cache_files.annotations_json_file, 'w') as f:
json.dump(ann_json_dict, f, indent=2)