{ "T": 1.0, "base_model_name_or_path": "meta-llama/Llama-2-7b-hf", "bias": "none", "drop_out": 0.0, "inference_mode": false, "layers_to_transform": null, "modules_to_save": null, "num_rotations": 1, "peft_type": "ROTATION", "r": 16, "revision": null, "target_modules": [ "q_proj", "v_proj" ], "target_modules_to_skip": null, "task_type": "CAUSAL_LM" }