doubledsbv commited on
Commit
f73ca03
·
verified ·
1 Parent(s): 859237a

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. generation_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/workspace/router_experiments_token_prediction_instruct_opt/checkpoint-4000",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -34,7 +34,7 @@
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "bfloat16",
37
- "transformers_version": "4.45.0.dev0",
38
  "use_cache": false,
39
  "vocab_size": 128265
40
  }
 
1
  {
2
+ "_name_or_path": "/workspace/token_prediction/router_experiments_token_prediction_instruct_opt/checkpoint-4000",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "bfloat16",
37
+ "transformers_version": "4.44.2",
38
  "use_cache": false,
39
  "vocab_size": 128265
40
  }
generation_config.json CHANGED
@@ -8,5 +8,5 @@
8
  ],
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
- "transformers_version": "4.45.0.dev0"
12
  }
 
8
  ],
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
+ "transformers_version": "4.44.2"
12
  }