in rela/prioritized_replay.h [202:233]
std::tuple<DataType, torch::Tensor> sample(int batchsize, const std::string& device) {
if (!sampledIds_.empty()) {
std::cout << "Error: previous samples' priority has not been updated." << std::endl;
assert(false);
}
DataType batch;
torch::Tensor priority;
if (prefetch_ == 0) {
std::tie(batch, priority, sampledIds_) = sample_(batchsize, device);
return std::make_tuple(batch, priority);
}
if (futures_.empty()) {
std::tie(batch, priority, sampledIds_) = sample_(batchsize, device);
} else {
// assert(futures_.size() == 1);
std::tie(batch, priority, sampledIds_) = futures_.front().get();
futures_.pop();
}
while ((int)futures_.size() < prefetch_) {
auto f = std::async(std::launch::async,
&PrioritizedReplay<DataType>::sample_,
this,
batchsize,
device);
futures_.push(std::move(f));
}
return std::make_tuple(batch, priority);
}