pyrit/auxiliary_attacks/gcg/experiments/configs/transfer_llama_2.yaml (10 lines of code) (raw):
transfer: True
logfile: ""
progressive_goals: False
stop_on_success: False
tokenizer_paths: ["meta-llama/Llama-2-7b-chat-hf"]
tokenizer_kwargs: [{"use_fast": False}]
model_paths: ["meta-llama/Llama-2-7b-chat-hf"]
model_kwargs: [{"low_cpu_mem_usage": True, "use_cache": False}]
conversation_templates: ["llama-2"]
devices: ["cuda:0"]