in python/graph_util.py [0:0]
def dynamic_inputs_outputs(self, op):
if op.type in DynamicBatchSizeHelper.unary_ops:
return list(op.inputs), op.outputs
elif op.type in DynamicBatchSizeHelper.binary_broadcast_ops:
shape0, shape1 = [ts.shape for ts in op.inputs]
if shape0.rank is None or shape1.rank is None:
return [], []
if shape0.rank > shape1.rank:
return [op.inputs[0]], op.outputs
elif shape0.rank < shape1.rank:
return [op.inputs[1]], op.outputs
else: # same rank
inputs = []
if len(shape0) > 0 and shape0.as_list()[0] is None:
inputs.append(op.inputs[0])
if len(shape1) > 0 and shape0.as_list()[0] is None:
inputs.append(op.inputs[1])
return inputs, op.outputs
elif op.type in DynamicBatchSizeHelper.reduce_axis_ops:
axis_op = op.inputs[-1].op
if axis_op.type == 'Const':
axis_list = _get_int32_values(axis_op)
if axis_list and 0 not in axis_list:
return list(op.inputs[:-1]), op.outputs
elif op.type in DynamicBatchSizeHelper.pseudo_unary_ops:
return list(op.inputs[:1]), op.outputs
elif op.type in {'Concat', 'ConcatV2'}:
axis_op = op.inputs[-1].op
if axis_op.type == 'Const':
axis_list = _get_int32_values(axis_op)
if any(axis < 0 for axis in axis_list):
rank = op.inputs[0].shape.rank
if rank is None:
return [], []
axis_list = [axis if axis >= 0 else (axis + rank) for axis in axis_list]
if axis_list and 0 not in axis_list:
return list(op.inputs[:-1]), op.outputs
elif op.type == 'ExpandDims':
pass
elif op.type == 'Stack':
pass
elif op.type in {'BatchMatMul', 'BatchMatMulV2'}:
pass
elif op.type == 'Cumprod':
pass
elif op.type == 'Cumsum':
pass
elif op.type == 'MatMul':
if not op.node_def.attr['transpose_a'].b:
return list(op.inputs[:1]), op.outputs
elif op.type == 'Slice':
pass
elif op.type == 'StridedSlice':
pass
elif op.type == 'Shape':
pass
elif op.type == 'Reshape':
pass
elif op.type == 'Squeeze':
pass
elif op.type == 'Transpose':
pass
elif op.type == 'Unstack':
pass
return [], []