pyrit/auxiliary_attacks/gcg/experiments/configs/individual_mistral.yaml (31 lines of code) (raw):
transfer: False
target_weight: 1.0
control_weight: 0.0
progressive_goals: False
progressive_models: False
anneal: False
incr_control: False
stop_on_success: False
verbose: True
allow_non_ascii: False
num_train_models: 1
result_prefix: "results/individual_mistral"
tokenizer_paths: ["mistralai/Mistral-7B-Instruct-v0.1"]
tokenizer_kwargs: [{"use_fast": False}]
model_paths: ["mistralai/Mistral-7B-Instruct-v0.1"]
model_kwargs: [{"low_cpu_mem_usage": True, "use_cache": False}]
conversation_templates: ["mistral"]
devices: ["cuda:0"]
train_data: ""
test_data: ""
n_train_data: 50
n_test_data: 0
control_init: "! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! !"
n_steps: 500
test_steps: 50
batch_size: 512
lr: 0.01
topk: 256
temp: 1
filter_cand: True
gbda_deterministic: True