0-ma commited on
Commit
45aa242
1 Parent(s): 7818a3e

Upload BeitForImageClassification

Browse files
Files changed (2) hide show
  1. config.json +81 -0
  2. model.safetensors +3 -0
config.json ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/beit-base-patch16-224-pt22k-ft22k",
3
+ "add_fpn": false,
4
+ "architectures": [
5
+ "BeitForImageClassification"
6
+ ],
7
+ "attention_probs_dropout_prob": 0.0,
8
+ "auxiliary_channels": 256,
9
+ "auxiliary_concat_input": false,
10
+ "auxiliary_loss_weight": 0.4,
11
+ "auxiliary_num_convs": 1,
12
+ "drop_path_rate": 0.1,
13
+ "hidden_act": "gelu",
14
+ "hidden_dropout_prob": 0.0,
15
+ "hidden_size": 768,
16
+ "id2label": {
17
+ "0": "1",
18
+ "1": "2",
19
+ "2": "3",
20
+ "3": "4",
21
+ "4": "5",
22
+ "5": "6"
23
+ },
24
+ "image_size": 224,
25
+ "initializer_range": 0.02,
26
+ "intermediate_size": 3072,
27
+ "label2id": {
28
+ "1": "0",
29
+ "2": "1",
30
+ "3": "2",
31
+ "4": "3",
32
+ "5": "4",
33
+ "6": "5"
34
+ },
35
+ "layer_norm_eps": 1e-12,
36
+ "layer_scale_init_value": 0.1,
37
+ "model_type": "beit",
38
+ "num_attention_heads": 12,
39
+ "num_channels": 3,
40
+ "num_hidden_layers": 12,
41
+ "out_features": [
42
+ "stage12"
43
+ ],
44
+ "out_indices": [
45
+ 12
46
+ ],
47
+ "patch_size": 16,
48
+ "pool_scales": [
49
+ 1,
50
+ 2,
51
+ 3,
52
+ 6
53
+ ],
54
+ "problem_type": "single_label_classification",
55
+ "reshape_hidden_states": true,
56
+ "semantic_loss_ignore_index": 255,
57
+ "stage_names": [
58
+ "stem",
59
+ "stage1",
60
+ "stage2",
61
+ "stage3",
62
+ "stage4",
63
+ "stage5",
64
+ "stage6",
65
+ "stage7",
66
+ "stage8",
67
+ "stage9",
68
+ "stage10",
69
+ "stage11",
70
+ "stage12"
71
+ ],
72
+ "torch_dtype": "float32",
73
+ "transformers_version": "4.44.2",
74
+ "use_absolute_position_embeddings": false,
75
+ "use_auxiliary_head": true,
76
+ "use_mask_token": false,
77
+ "use_mean_pooling": true,
78
+ "use_relative_position_bias": true,
79
+ "use_shared_relative_position_bias": false,
80
+ "vocab_size": 8192
81
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a5566aed0414dab06af23674292d38f27af92ecc7a693937c213bf1bce94402
3
+ size 343092632