mrs83 commited on
Commit
6a2e96e
·
verified ·
1 Parent(s): 69e4e9f

Upload Qwen2ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -21,7 +21,7 @@
21
  "rope_theta": 1000000.0,
22
  "sliding_window": null,
23
  "tie_word_embeddings": true,
24
- "torch_dtype": "float32",
25
  "transformers_version": "4.47.0",
26
  "use_cache": true,
27
  "use_sliding_window": false,
 
21
  "rope_theta": 1000000.0,
22
  "sliding_window": null,
23
  "tie_word_embeddings": true,
24
+ "torch_dtype": "bfloat16",
25
  "transformers_version": "4.47.0",
26
  "use_cache": true,
27
  "use_sliding_window": false,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:07dbd8b792270542d4555de957832ba954ad72a9de022c3ec55dcc5e3f8f01f5
3
- size 1976163472
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97752576a771af40f6d124bdf92860fffad17f734aff252319ae4a5600669382
3
+ size 1235824088