Narsil HF staff commited on
Commit
6aa135b
1 Parent(s): 4086a73

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "tiiuae/falcon-40b",
3
+ "alibi": false,
4
+ "apply_residual_connection_post_layernorm": false,
5
+ "architectures": [
6
+ "RWForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "auto_map": {
10
+ "AutoConfig": "tiiuae/falcon-40b--configuration_RW.RWConfig",
11
+ "AutoModel": "tiiuae/falcon-40b--modelling_RW.RWModel",
12
+ "AutoModelForCausalLM": "tiiuae/falcon-40b--modelling_RW.RWForCausalLM",
13
+ "AutoModelForQuestionAnswering": "tiiuae/falcon-40b--modelling_RW.RWForQuestionAnswering",
14
+ "AutoModelForSequenceClassification": "tiiuae/falcon-40b--modelling_RW.RWForSequenceClassification",
15
+ "AutoModelForTokenClassification": "tiiuae/falcon-40b--modelling_RW.RWForTokenClassification"
16
+ },
17
+ "bias": false,
18
+ "bos_token_id": 11,
19
+ "eos_token_id": 11,
20
+ "hidden_dropout": 0.0,
21
+ "hidden_size": 8192,
22
+ "initializer_range": 0.02,
23
+ "layer_norm_epsilon": 1e-05,
24
+ "model_type": "RefinedWeb",
25
+ "n_head": 128,
26
+ "n_head_kv": 8,
27
+ "n_layer": 60,
28
+ "parallel_attn": true,
29
+ "torch_dtype": "bfloat16",
30
+ "transformers_version": "4.29.0.dev0",
31
+ "use_cache": true,
32
+ "vocab_size": 65024
33
+ }
model-00001-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5614648b0f46f2f0bef5501f1749b1e8dbd9b3bf318c6c7b1c78e94fc77bf41a
3
+ size 9972247152
model-00002-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e4c75f4b252ad69c7bf929ce816a42a37f9797a453002a2235005690fbad3aa
3
+ size 9892593864
model-00003-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55fe6072005199666be8e9d2e2cc9f4a638bb1dcd507b765c68cd6a445d2e23c
3
+ size 3464313968
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ ">>TITLE<<",
4
+ ">>ABSTRACT<<",
5
+ ">>INTRODUCTION<<",
6
+ ">>SUMMARY<<",
7
+ ">>COMMENT<<",
8
+ ">>ANSWER<<",
9
+ ">>QUESTION<<",
10
+ ">>DOMAIN<<",
11
+ ">>PREFIX<<",
12
+ ">>SUFFIX<<",
13
+ ">>MIDDLE<<"
14
+ ],
15
+ "eos_token": "<|endoftext|>"
16
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "clean_up_tokenization_spaces": true,
4
+ "eos_token": "<|endoftext|>",
5
+ "model_max_length": 2048,
6
+ "tokenizer_class": "PreTrainedTokenizerFast"
7
+ }