torchbiggraph/parameter_sharing.py [170:180]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        else:
            outstanding_work = []
            flattened_data = data.flatten()
            flattened_size = flattened_data.shape[0]
            for idx, (pg, slice_) in enumerate(
                zip(
                    self.groups,
                    split_almost_equally(flattened_size, num_parts=len(self.groups)),
                )
            ):
                outstanding_work.append(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



torchbiggraph/parameter_sharing.py [216:226]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        else:
            outstanding_work = []
            flattened_data = data.flatten()
            flattened_size = flattened_data.shape[0]
            for idx, (pg, slice_) in enumerate(
                zip(
                    self.groups,
                    split_almost_equally(flattened_size, num_parts=len(self.groups)),
                )
            ):
                outstanding_work.append(
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



