in data_loaders/generate_tfr/imagenet_oord.py [0:0]
def dump(fn_root, tfrecord_dir, max_res, expected_images, shards, write):
"""Main converter function."""
# fn_root = FLAGS.fn_root
# max_res = FLAGS.max_res
resolution_log2 = int(np.log2(max_res))
tfr_prefix = os.path.join(tfrecord_dir, os.path.basename(tfrecord_dir))
print("Checking in", fn_root)
img_fn_list = os.listdir(fn_root)
img_fn_list = [img_fn for img_fn in img_fn_list
if img_fn.endswith('.png')]
num_examples = len(img_fn_list)
print("Found", num_examples)
assert num_examples == expected_images
# Sharding
tfr_opt = tf.python_io.TFRecordOptions(
tf.python_io.TFRecordCompressionType.NONE)
p_shard = np.array_split(np.random.permutation(expected_images), shards)
img_to_shard = np.zeros(expected_images, dtype=np.int)
writers = []
for shard in range(shards):
img_to_shard[p_shard[shard]] = shard
tfr_file = tfr_prefix + \
'-r%02d-s-%04d-of-%04d.tfrecords' % (
resolution_log2, shard, shards)
writers.append(tf.python_io.TFRecordWriter(tfr_file, tfr_opt))
# print(np.unique(img_to_shard, return_counts=True))
counts = np.unique(img_to_shard, return_counts=True)[1]
assert len(counts) == shards
print("Smallest and largest shards have size",
np.min(counts), np.max(counts))
for example_idx, img_fn in enumerate(tqdm(img_fn_list)):
shard = img_to_shard[example_idx]
img = scipy.ndimage.imread(os.path.join(fn_root, img_fn))
rows = img.shape[0]
cols = img.shape[1]
depth = img.shape[2]
shape = (rows, cols, depth)
img = img.astype("uint8")
img = img.tostring()
example = tf.train.Example(
features=tf.train.Features(
feature={
"shape": _int64_feature(shape),
"data": _bytes_feature(img),
"label": _int64_feature(0)
}
)
)
if write:
writers[shard].write(example.SerializeToString())
print('%-40s\r' % 'Flushing data...', end='', flush=True)
for writer in writers:
writer.close()
print('%-40s\r' % '', end='', flush=True)
print('Added %d images.' % num_examples)