in eden/mononoke/walker/src/commands/setup.rs [705:1040]
fn setup_subcommand_args<'a, 'b>(subcmd: App<'a, 'b>) -> App<'a, 'b> {
return subcmd
.arg(
Arg::with_name(QUIET_ARG)
.long(QUIET_ARG)
.short("q")
.takes_value(false)
.required(false)
.help("Log a lot less"),
)
.arg(
Arg::with_name(ENABLE_REDACTION_ARG)
.long(ENABLE_REDACTION_ARG)
.takes_value(false)
.required(false)
.help("Use redaction from config. Default is redaction off."),
)
.arg(
Arg::with_name(SCHEDULED_MAX_ARG)
.long(SCHEDULED_MAX_ARG)
.takes_value(true)
.required(false)
.help("Maximum number of walk step tasks to attempt to execute at once. Default 4096."),
)
.arg(
Arg::with_name(TAIL_INTERVAL_ARG)
.long(TAIL_INTERVAL_ARG)
.takes_value(true)
.required(false)
.help("Tail by polling the entry points at interval of TAIL seconds"),
)
.arg(
Arg::with_name(PROGRESS_INTERVAL_ARG)
.long(PROGRESS_INTERVAL_ARG)
.takes_value(true)
.required(false)
.help("Minimum interval between progress reports in seconds."),
)
.arg(
Arg::with_name(PROGRESS_SAMPLE_RATE_ARG)
.long(PROGRESS_SAMPLE_RATE_ARG)
.takes_value(true)
.required(false)
.help("Sample the walk output stream for progress roughly 1 in N steps. Only log if progress-interval has passed."),
)
.arg(
Arg::with_name(ENABLE_DERIVE_ARG)
.long(ENABLE_DERIVE_ARG)
.takes_value(false)
.required(false)
.help("Enable derivation of data (e.g. hg, file metadata). Default is false"),
)
.arg(
Arg::with_name(EXCLUDE_NODE_TYPE_ARG)
.long(EXCLUDE_NODE_TYPE_ARG)
.short("x")
.takes_value(true)
.multiple(true)
.number_of_values(1)
.required(false)
.possible_values(&NODE_TYPE_POSSIBLE_VALUES)
.help("Graph node types to exclude from walk. They are removed from the include node types."),
)
.arg(
Arg::with_name(INCLUDE_NODE_TYPE_ARG)
.long(INCLUDE_NODE_TYPE_ARG)
.short("i")
.takes_value(true)
.multiple(true)
.number_of_values(1)
.required(false)
.default_value(DEFAULT_VALUE_ARG)
.possible_values(&NODE_TYPE_POSSIBLE_VALUES)
.hide_possible_values(true)
.help(&INCLUDE_NODE_TYPE_HELP),
)
.arg(
Arg::with_name(EXCLUDE_EDGE_TYPE_ARG)
.long(EXCLUDE_EDGE_TYPE_ARG)
.short("X")
.takes_value(true)
.multiple(true)
.number_of_values(1)
.required(false)
.possible_values(&EDGE_TYPE_POSSIBLE_VALUES)
.help("Graph edge types to exclude from walk. Can pass pre-configured sets via deep, shallow, hg, bonsai, etc as well as individual types."),
)
.arg(
Arg::with_name(INCLUDE_EDGE_TYPE_ARG)
.long(INCLUDE_EDGE_TYPE_ARG)
.short("I")
.takes_value(true)
.multiple(true)
.number_of_values(1)
.required(false)
.default_value(DEEP_VALUE_ARG)
.possible_values(&EDGE_TYPE_POSSIBLE_VALUES)
.hide_possible_values(true)
.help(&INCLUDE_EDGE_TYPE_HELP),
)
.arg(
Arg::with_name(BOOKMARK_ARG)
.long(BOOKMARK_ARG)
.short("b")
.takes_value(true)
.required(false)
.multiple(true)
.number_of_values(1)
.help("Bookmark(s) to start traversal from"),
)
.arg(
Arg::with_name(WALK_ROOT_ARG)
.long(WALK_ROOT_ARG)
.short("r")
.takes_value(true)
.required(false)
.multiple(true)
.number_of_values(1)
.help("Root(s) to start traversal from in format <NodeType>:<node_key>, e.g. Bookmark:master or HgChangeset:7712b62acdc858689504945ac8965a303ded6626"),
)
.arg(
Arg::with_name(CHUNK_BY_PUBLIC_ARG)
.long(CHUNK_BY_PUBLIC_ARG)
// p because its chunk from public changeset id (c and C already taken)
.short("p")
.takes_value(true)
.required(false)
.multiple(true)
.number_of_values(1)
.possible_values(&CHUNK_BY_PUBLIC_POSSIBLE_VALUES)
.help("Traverse using chunks of public changesets as roots to the specified node type"),
)
.arg(
Arg::with_name(CHUNK_DIRECTION_ARG)
.long(CHUNK_DIRECTION_ARG)
.short("d")
.takes_value(true)
.multiple(false)
.number_of_values(1)
.possible_values(Direction::VARIANTS)
.requires(CHUNK_BY_PUBLIC_ARG)
.required(false)
.help("Set the direction to proceed through changesets"),
)
.arg(
Arg::with_name(CHUNK_SIZE_ARG)
.long(CHUNK_SIZE_ARG)
.short("k")
.default_value("100000")
.takes_value(true)
.required(false)
.multiple(false)
.number_of_values(1)
.help("How many changesets to include in a chunk."),
)
.arg(
Arg::with_name(CHUNK_CLEAR_SAMPLE_RATE_ARG)
.long(CHUNK_CLEAR_SAMPLE_RATE_ARG)
.short("K")
.takes_value(true)
.required(false)
.help("Clear the saved walk state 1 in N steps."),
)
.arg(
Arg::with_name(INCLUDE_CHUNK_CLEAR_INTERNED_TYPE_ARG)
.long(INCLUDE_CHUNK_CLEAR_INTERNED_TYPE_ARG)
.short("t")
.takes_value(true)
.required(false)
.multiple(false)
.number_of_values(1)
.possible_values(&INTERNED_TYPE_POSSIBLE_VALUES)
.help("Include in InternedTypes to flush between chunks"),
)
.arg(
Arg::with_name(EXCLUDE_CHUNK_CLEAR_INTERNED_TYPE_ARG)
.long(EXCLUDE_CHUNK_CLEAR_INTERNED_TYPE_ARG)
.short("T")
.takes_value(true)
.required(false)
.multiple(false)
.number_of_values(1)
.possible_values(&INTERNED_TYPE_POSSIBLE_VALUES)
.help("Exclude from InternedTypes to flush between chunks"),
)
.arg(
Arg::with_name(INCLUDE_CHUNK_CLEAR_NODE_TYPE_ARG)
.long(INCLUDE_CHUNK_CLEAR_NODE_TYPE_ARG)
.short("n")
.takes_value(true)
.required(false)
.multiple(false)
.number_of_values(1)
.possible_values(&NODE_TYPE_POSSIBLE_VALUES)
.help("Include in NodeTypes to flush between chunks"),
)
.arg(
Arg::with_name(EXCLUDE_CHUNK_CLEAR_NODE_TYPE_ARG)
.long(EXCLUDE_CHUNK_CLEAR_NODE_TYPE_ARG)
.short("N")
.takes_value(true)
.required(false)
.multiple(false)
.number_of_values(1)
.possible_values(&NODE_TYPE_POSSIBLE_VALUES)
.help("Exclude from NodeTypes to flush between chunks"),
)
.arg(
Arg::with_name(CHECKPOINT_NAME_ARG)
.long(CHECKPOINT_NAME_ARG)
.takes_value(true)
.required(false)
.help("Name of checkpoint."),
)
.arg(
Arg::with_name(CHECKPOINT_PATH_ARG)
.long(CHECKPOINT_PATH_ARG)
.takes_value(true)
.required(false)
.requires(CHECKPOINT_NAME_ARG)
.help("Path for sqlite checkpoint db if using sqlite"),
)
.arg(
Arg::with_name(CHECKPOINT_SAMPLE_RATE_ARG)
.long(CHECKPOINT_SAMPLE_RATE_ARG)
.takes_value(true)
.required(false)
.default_value("1")
.help("Checkpoint the walk covered bounds 1 in N steps."),
)
.arg(
Arg::with_name(STATE_MAX_AGE_ARG)
.long(STATE_MAX_AGE_ARG)
.takes_value(true)
.required(false)
// 5 days = 5 * 24 * 3600 seconds = 432000
.default_value("432000")
.help("Max age of walk state held internally ot loaded from checkpoint that we will attempt to continue from, in seconds."),
)
.arg(
Arg::with_name(REPO_LOWER_BOUND)
.long(REPO_LOWER_BOUND)
.takes_value(true)
.required(false)
.requires(CHUNK_BY_PUBLIC_ARG)
.help("Set the repo upper bound used by chunking instead of loading it. Inclusive. Useful for reproducing issues from a particular chunk."),
)
.arg(
Arg::with_name(REPO_UPPER_BOUND)
.long(REPO_UPPER_BOUND)
.takes_value(true)
.required(false)
.requires(CHUNK_BY_PUBLIC_ARG)
.help("Set the repo lower bound used by chunking instead of loading it. Exclusive (used in rust ranges). Useful for reproducing issues from a particular chunk."),
)
.arg(
Arg::with_name(ALLOW_REMAINING_DEFERRED_ARG)
.long(ALLOW_REMAINING_DEFERRED_ARG)
.takes_value(true)
.required(false)
.default_value("false")
.help("Whether to allow remaining deferred edges after chunks complete. Well structured repos should have none."),
)
.arg(
Arg::with_name(ERROR_AS_DATA_NODE_TYPE_ARG)
.long(ERROR_AS_DATA_NODE_TYPE_ARG)
.short("e")
.takes_value(true)
.multiple(true)
.number_of_values(1)
.required(false)
.help("Use this to continue walking even walker found an error. Types of nodes to allow the walker to convert an ErrorKind::NotTraversable to a NodeData::ErrorAsData(NotTraversable)"),
)
.arg(
Arg::with_name(ERROR_AS_DATA_EDGE_TYPE_ARG)
.long(ERROR_AS_DATA_EDGE_TYPE_ARG)
.short("E")
.takes_value(true)
.multiple(true)
.number_of_values(1)
.required(false)
.help("Types of edges to allow the walker to convert an ErrorKind::NotTraversable to a NodeData::ErrorAsData(NotTraversable). If empty then allow all edges for the nodes specified via error-as-data-node-type"),
)
.arg(
Arg::with_name(INNER_BLOBSTORE_ID_ARG)
.long(INNER_BLOBSTORE_ID_ARG)
.takes_value(true)
.required(false)
.help("If main blobstore in the storage config is a multiplexed one, use inner blobstore with this id")
)
.arg(
Arg::with_name(SCUBA_TABLE_ARG)
.long(SCUBA_TABLE_ARG)
.takes_value(true)
.multiple(false)
.required(false)
.help("Scuba table for logging nodes with issues. e.g. mononoke_walker"),
)
.arg(
Arg::with_name(SCUBA_LOG_FILE_ARG)
.long(SCUBA_LOG_FILE_ARG)
.takes_value(true)
.multiple(false)
.required(false)
.help("A log file to write Scuba logs to (primarily useful in testing)"),
)
.arg(
Arg::with_name(EXCLUDE_HASH_VALIDATION_NODE_TYPE_ARG)
.long(EXCLUDE_HASH_VALIDATION_NODE_TYPE_ARG)
.takes_value(true)
.multiple(true)
.number_of_values(1)
.required(false)
.possible_values(&NODE_HASH_VALIDATION_POSSIBLE_VALUES)
.help("Node types for which we don't want to do hash validation"),
)
.arg(
Arg::with_name(INCLUDE_HASH_VALIDATION_NODE_TYPE_ARG)
.long(INCLUDE_HASH_VALIDATION_NODE_TYPE_ARG)
.takes_value(true)
.multiple(true)
.number_of_values(1)
.required(false)
.possible_values(&NODE_HASH_VALIDATION_POSSIBLE_VALUES)
.hide_possible_values(true)
.help("Node types for which we want to do hash validation"),
)
.arg(
Arg::with_name(BLOBSTORE_SAMPLING_MULTIPLIER)
.long(BLOBSTORE_SAMPLING_MULTIPLIER)
.takes_value(true)
.required(false)
.default_value("100")
.help("Add a multiplier on sampling requests")
);
}