in e2e-examples/gcs/benchmark/gcscpp_runner.cc [157:216]
bool GcscppRunner::DoRandomRead(int thread_id,
google::cloud::storage::Client storage_client) {
if (parameters_.read_limit <= 0) {
std::cerr << "read_limit should be greater than 0." << std::endl;
return false;
}
int64_t read_span =
parameters_.read_limit - std::max(int64_t(0), parameters_.read_offset);
if (read_span <= 0) {
std::cerr << "read_limit should be greater than read_offset." << std::endl;
return false;
}
if (parameters_.chunk_size == 0) {
std::cerr << "chunk_size should be greater than 0." << std::endl;
return false;
}
int64_t chunks = read_span / parameters_.chunk_size;
if (chunks <= 0) {
std::cerr
<< "read_limit should be greater than or equal to readable window."
<< std::endl;
return false;
}
std::string object = object_resolver_.Resolve(thread_id, 0);
absl::BitGen gen;
std::vector<char> buffer(4 * 1024 * 1024);
for (int run = 0; run < parameters_.runs; run++) {
int64_t offset = absl::Uniform(gen, 0, chunks) * parameters_.chunk_size;
absl::Time run_start = absl::Now();
auto reader =
storage_client.ReadObject(parameters_.bucket, object,
google::cloud::storage::ReadRange(
offset, offset + parameters_.chunk_size));
if (!reader) {
std::cerr << "Error reading object: " << reader.status() << "\n";
return false;
}
int64_t total_bytes = 0;
std::vector<RunnerWatcher::Chunk> chunks;
chunks.reserve(256);
while (total_bytes < parameters_.chunk_size) {
reader.read(buffer.data(),
std::min(buffer.size(), (size_t)parameters_.chunk_size));
int64_t content_size = reader.gcount();
RunnerWatcher::Chunk chunk = {absl::Now(), content_size};
chunks.push_back(chunk);
total_bytes += content_size;
}
reader.Close();
absl::Time run_end = absl::Now();
watcher_->NotifyCompleted(OperationType::Read, thread_id, 0,
ExtractPeer(reader.headers()), parameters_.bucket,
object, grpc::Status::OK, total_bytes, run_start,
run_end - run_start, chunks);
}
return true;
}