in csrc/liars_dice/rela/prioritized_replay.h [263:296]
std::tuple<DataType, torch::Tensor> sample(int batchsize,
const std::string& device) {
if (!sampledIds_.empty()) {
if (use_priority_) {
std::cout << "Error: previous samples' priority has not been updated."
<< std::endl;
assert(false);
}
}
DataType batch;
torch::Tensor priority;
if (prefetch_ == 0) {
std::tie(batch, priority, sampledIds_) = sample_(batchsize, device);
return std::make_tuple(batch, priority);
}
if (futures_.empty()) {
std::tie(batch, priority, sampledIds_) = sample_(batchsize, device);
} else {
// assert(futures_.size() == 1);
std::tie(batch, priority, sampledIds_) = futures_.front().get();
futures_.pop();
}
while ((int)futures_.size() < prefetch_) {
auto f =
std::async(std::launch::async, &PrioritizedReplay<DataType>::sample_,
this, batchsize, device);
futures_.push(std::move(f));
}
return std::make_tuple(batch, priority);
}