grimjim commited on
Commit
36781fd
1 Parent(s): 9ade540

Upload Llama 3 Smoothie Presets 8k context.json

Browse files
presets/Llama 3 Smoothie Presets 8k context.json ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "temp": 1,
3
+ "temperature_last": true,
4
+ "top_p": 1,
5
+ "top_k": 0,
6
+ "top_a": 0,
7
+ "tfs": 1,
8
+ "epsilon_cutoff": 0,
9
+ "eta_cutoff": 0,
10
+ "typical_p": 1,
11
+ "min_p": 0.01,
12
+ "rep_pen": 1,
13
+ "rep_pen_range": 2048,
14
+ "rep_pen_decay": 0,
15
+ "rep_pen_slope": 1,
16
+ "no_repeat_ngram_size": 0,
17
+ "penalty_alpha": 0,
18
+ "num_beams": 1,
19
+ "length_penalty": 1,
20
+ "min_length": 0,
21
+ "encoder_rep_pen": 1,
22
+ "freq_pen": 0,
23
+ "presence_pen": 0,
24
+ "skew": 0,
25
+ "do_sample": true,
26
+ "early_stopping": false,
27
+ "dynatemp": false,
28
+ "min_temp": 0,
29
+ "max_temp": 2,
30
+ "dynatemp_exponent": 1,
31
+ "smoothing_factor": 0.23,
32
+ "smoothing_curve": 4.32,
33
+ "dry_allowed_length": 2,
34
+ "dry_multiplier": 0,
35
+ "dry_base": 1.75,
36
+ "dry_sequence_breakers": "[\"\\n\", \":\", \"\\\"\", \"*\"]",
37
+ "dry_penalty_last_n": 0,
38
+ "add_bos_token": false,
39
+ "truncation_length": 2048,
40
+ "ban_eos_token": false,
41
+ "skip_special_tokens": false,
42
+ "streaming": true,
43
+ "mirostat_mode": 0,
44
+ "mirostat_tau": 5,
45
+ "mirostat_eta": 0.1,
46
+ "guidance_scale": 1,
47
+ "negative_prompt": "",
48
+ "grammar_string": "",
49
+ "json_schema": {},
50
+ "banned_tokens": "",
51
+ "sampler_priority": [
52
+ "dynamic_temperature",
53
+ "quadratic_sampling",
54
+ "top_k",
55
+ "top_p",
56
+ "typical_p",
57
+ "epsilon_cutoff",
58
+ "eta_cutoff",
59
+ "tfs",
60
+ "top_a",
61
+ "min_p",
62
+ "mirostat",
63
+ "temperature"
64
+ ],
65
+ "samplers": [
66
+ "top_k",
67
+ "tfs_z",
68
+ "typical_p",
69
+ "top_p",
70
+ "min_p",
71
+ "temperature"
72
+ ],
73
+ "ignore_eos_token": false,
74
+ "spaces_between_special_tokens": true,
75
+ "speculative_ngram": false,
76
+ "sampler_order": [
77
+ 6,
78
+ 0,
79
+ 1,
80
+ 3,
81
+ 4,
82
+ 2,
83
+ 5
84
+ ],
85
+ "logit_bias": [],
86
+ "n": 1,
87
+ "ignore_eos_token_aphrodite": false,
88
+ "spaces_between_special_tokens_aphrodite": true,
89
+ "rep_pen_size": 0,
90
+ "genamt": 512,
91
+ "max_length": 8192
92
+ }