patrickvonplaten commited on
Commit
9323626
1 Parent(s): 083e923

update model for pretraining compatible

Browse files
Files changed (2) hide show
  1. config.json +15 -3
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -2,10 +2,12 @@
2
  "activation_dropout": 0.0,
3
  "apply_spec_augment": true,
4
  "architectures": [
5
- "Wav2Vec2Model"
6
  ],
7
  "attention_dropout": 0.1,
8
  "bos_token_id": 1,
 
 
9
  "conv_bias": false,
10
  "conv_dim": [
11
  512,
@@ -34,19 +36,23 @@
34
  2,
35
  2
36
  ],
 
 
 
37
  "do_stable_layer_norm": false,
38
  "eos_token_id": 2,
39
  "feat_extract_activation": "gelu",
40
  "feat_extract_norm": "group",
41
  "feat_proj_dropout": 0.1,
 
42
  "final_dropout": 0.0,
43
  "freeze_feat_extract_train": true,
 
44
  "hidden_act": "gelu",
45
  "hidden_dropout": 0.1,
46
  "hidden_size": 768,
47
  "initializer_range": 0.02,
48
  "intermediate_size": 3072,
49
- "gradient_checkpointing": true,
50
  "layer_norm_eps": 1e-05,
51
  "layerdrop": 0.05,
52
  "mask_channel_length": 10,
@@ -54,6 +60,8 @@
54
  "mask_channel_other": 0.0,
55
  "mask_channel_prob": 0.0,
56
  "mask_channel_selection": "static",
 
 
57
  "mask_time_length": 10,
58
  "mask_time_min_space": 1,
59
  "mask_time_other": 0.0,
@@ -63,11 +71,15 @@
63
  "no_mask_channel_overlap": false,
64
  "no_mask_time_overlap": false,
65
  "num_attention_heads": 12,
 
 
66
  "num_conv_pos_embedding_groups": 16,
67
  "num_conv_pos_embeddings": 128,
68
  "num_feat_extract_layers": 7,
69
  "num_hidden_layers": 12,
 
70
  "pad_token_id": 0,
71
- "transformers_version": "4.4.0.dev0",
 
72
  "vocab_size": 32
73
  }
 
2
  "activation_dropout": 0.0,
3
  "apply_spec_augment": true,
4
  "architectures": [
5
+ "Wav2Vec2ForPreTraining"
6
  ],
7
  "attention_dropout": 0.1,
8
  "bos_token_id": 1,
9
+ "codevector_dim": 256,
10
+ "contrastive_logits_temperature": 0.1,
11
  "conv_bias": false,
12
  "conv_dim": [
13
  512,
 
36
  2,
37
  2
38
  ],
39
+ "ctc_loss_reduction": "sum",
40
+ "ctc_zero_infinity": false,
41
+ "diversity_loss_weight": 0.1,
42
  "do_stable_layer_norm": false,
43
  "eos_token_id": 2,
44
  "feat_extract_activation": "gelu",
45
  "feat_extract_norm": "group",
46
  "feat_proj_dropout": 0.1,
47
+ "feat_quantizer_dropout": 0.0,
48
  "final_dropout": 0.0,
49
  "freeze_feat_extract_train": true,
50
+ "gradient_checkpointing": true,
51
  "hidden_act": "gelu",
52
  "hidden_dropout": 0.1,
53
  "hidden_size": 768,
54
  "initializer_range": 0.02,
55
  "intermediate_size": 3072,
 
56
  "layer_norm_eps": 1e-05,
57
  "layerdrop": 0.05,
58
  "mask_channel_length": 10,
 
60
  "mask_channel_other": 0.0,
61
  "mask_channel_prob": 0.0,
62
  "mask_channel_selection": "static",
63
+ "mask_feature_length": 10,
64
+ "mask_feature_prob": 0.0,
65
  "mask_time_length": 10,
66
  "mask_time_min_space": 1,
67
  "mask_time_other": 0.0,
 
71
  "no_mask_channel_overlap": false,
72
  "no_mask_time_overlap": false,
73
  "num_attention_heads": 12,
74
+ "num_codevector_groups": 2,
75
+ "num_codevectors_per_group": 320,
76
  "num_conv_pos_embedding_groups": 16,
77
  "num_conv_pos_embeddings": 128,
78
  "num_feat_extract_layers": 7,
79
  "num_hidden_layers": 12,
80
+ "num_negatives": 100,
81
  "pad_token_id": 0,
82
+ "proj_codevector_dim": 256,
83
+ "transformers_version": "4.7.0.dev0",
84
  "vocab_size": 32
85
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4373c7ec550c4bc5a3b606994b5343a80c23db87899129e05f967d2688b370dd
3
- size 377569754
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3249fe98bfc62fcbc26067f724716a6ec49d12c4728a2af1df659013905dff21
3
+ size 380267417