Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 9 additions & 4 deletions docs/developer/benchmark_tool/state_store.md
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,10 @@
Comma-separated list of operations to run in the specified order. Following operations are supported:

- `writebatch`: write N key/values in sequential key order in async mode.
- `deleterandom`: delete N keys in random order. May delete a key/value many times even it has been deleted before during this operation. If the state store is already empty before this operation, randomly-generated keys would be deleted.
- `getrandom`: read N keys in random order. May read a key/value many times even it has been read before during this operation. If the state store is already empty before this operation, randomly-generated keys would be read instead.
- `getseq`: read N times sequentially. Panic if keys in the state store are less than number to get.
- `prefixscanrandom`: prefix scan N times in random order. May scan a prefix many times even it has been scanned before during this operation. If the state store is already empty before this operation, randomly-generated prefixes would be scanned in this empty state store.
- `deleterandom`: delete N keys in random order. May delete a key/value many times even it has been deleted before during this operation. If the state store is already completely empty before this operation, randomly-generated keys would be deleted.
- `getrandom`: read N keys in random order. May read a key/value many times even it has been read before during this operation. If the state store is already completely empty before this operation, randomly-generated keys would be read instead.
- `getseq`: read N times sequentially. Panic if keys in the state store are less than number to get. But if the state store is completely empty, sequentially-generated keys would be read.
- `prefixscanrandom`: prefix scan N times in random order. May scan a prefix many times even it has been scanned before during this operation. If the state store is already completely empty before this operation, randomly-generated prefixes would be scanned in this empty state store.

Example: `--benchmarks "writebatch,prefixscanrandom,getrandom"`

Expand All @@ -108,6 +108,11 @@ Example: `--benchmarks "writebatch,prefixscanrandom,getrandom"`
- Number of read keys. If negative, do `--num` reads.
- Default: -1

- `--scans`

- Number of scanned prefixes. If negative, do `--num` scans.
- Default: -1

- `--writes`

- Number of written key/values. If negative, do `--num` reads.
Expand Down
6 changes: 6 additions & 0 deletions rust/bench/ss_bench/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,9 @@ pub(crate) struct Opts {
#[clap(long, default_value_t = -1)]
reads: i64,

#[clap(long, default_value_t = -1)]
scans: i64,

#[clap(long, default_value_t = -1)]
writes: i64,

Expand Down Expand Up @@ -179,6 +182,9 @@ fn preprocess_options(opts: &mut Opts) {
if opts.reads < 0 {
opts.reads = opts.num;
}
if opts.scans < 0 {
opts.scans = opts.num;
}
if opts.deletes < 0 {
opts.deletes = opts.num;
}
Expand Down
5 changes: 2 additions & 3 deletions rust/bench/ss_bench/operations/prefix_scan_random.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,10 @@ impl Operations {
pub(crate) async fn prefix_scan_random(&mut self, store: &impl StateStore, opts: &Opts) {
// generate queried prefixes
let mut scan_prefixes = match self.prefixes.is_empty() {
// if prefixes is empty, use default prefix: ["a"*key_prefix_size]
true => Workload::new_random_keys(opts, opts.reads as u64, &mut self.rng).0,
true => Workload::new_random_prefixes(opts, opts.scans as u64, &mut self.rng),
false => {
let dist = Uniform::from(0..self.prefixes.len());
(0..opts.reads as usize)
(0..opts.scans as usize)
.into_iter()
.map(|_| self.prefixes[dist.sample(&mut self.rng)].clone())
.collect_vec()
Expand Down
7 changes: 6 additions & 1 deletion rust/bench/ss_bench/operations/write_batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,12 @@ impl Operations {
mut batches: Vec<Batch>,
) -> PerfMetrics {
let batches_len = batches.len();
let size = size_of_val(&batches);
// TODO(Ting Sun): use sizes from metrics directly
let size = batches
.iter()
.flat_map(|batch| batch.iter())
.map(|(key, value)| size_of_val(key) + size_of_val(value))
.sum::<usize>();

// partitioned these batches for each concurrency
let mut grouped_batches = vec![vec![]; opts.concurrency_num as usize];
Expand Down
18 changes: 12 additions & 6 deletions rust/bench/ss_bench/utils/workload.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,11 @@ impl Workload {
(keys_num + opts.keys_per_prefix as u64 - 1) / opts.keys_per_prefix as u64
}

/// Generate the random keys of given number
pub(crate) fn new_random_keys(opts: &Opts, key_num: u64, rng: &mut StdRng) -> (Prefixes, Keys) {
// --- get prefixes ---
/// Generate the random prefixes of given number
pub(crate) fn new_random_prefixes(opts: &Opts, prefix_num: u64, rng: &mut StdRng) -> Prefixes {
let str_dist = Uniform::new_inclusive(0, 255);

let prefix_num = Self::prefix_num(opts, key_num);
let prefixes = (0..prefix_num)
(0..prefix_num)
.into_iter()
.map(|_| {
let prefix = rng
Expand All @@ -84,9 +82,17 @@ impl Workload {

Bytes::from(prefix)
})
.collect_vec();
.collect_vec()
}

/// Generate the random keys of given number
pub(crate) fn new_random_keys(opts: &Opts, key_num: u64, rng: &mut StdRng) -> (Prefixes, Keys) {
// --- get prefixes ---
let prefix_num = Self::prefix_num(opts, key_num);
let prefixes = Workload::new_random_prefixes(opts, prefix_num, rng);

// --- get keys ---
let str_dist = Uniform::new_inclusive(0, 255);
let keys = (0..key_num as u64)
.into_iter()
.map(|i| {
Expand Down