| { | |
| "attn_implementation": "flash_attention_2", | |
| "bos_token_id": 128000, | |
| "cache_config": null, | |
| "do_sample": true, | |
| "eos_token_id": 128001, | |
| "max_length": 4096, | |
| "temperature": 0.6, | |
| "top_p": 0.9, | |
| "transformers_version": "4.37.2", | |
| "watermarking_config": null | |
| } | |