File size: 432 Bytes
310ad98
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
{
  "add_reasoning_tokens": false,
  "apply_augments": true,
  "bbox_size": 1024,
  "beacon_token_interval": 512,
  "blank_bbox_token_id": 1025,
  "merge_size": 2,
  "num_beacon_tokens": 4,
  "num_register_tokens": 4,
  "patch_size": 14,
  "processor_class": "MultimodalFoundationProcessor",
  "styles": [
    "Plain",
    "Handwriting",
    "Math",
    "Chemical",
    "Code"
  ],
  "use_script_tokens": true
}