edbeeching HF Staff commited on
Commit
e4c2c81
·
verified ·
1 Parent(s): 71d8ba7

Training in progress, step 620

Browse files
Files changed (5) hide show
  1. README.md +2 -5
  2. config.json +1 -1
  3. generation_config.json +3 -1
  4. model.safetensors +1 -1
  5. training_args.bin +1 -1
README.md CHANGED
@@ -4,11 +4,8 @@ library_name: transformers
4
  model_name: Qwen3-4B-Base-SFT-tr5
5
  tags:
6
  - generated_from_trainer
7
- - trackio
8
- - sft
9
  - trl
10
- - trackio:https://huggingface.co/spaces/hf-imo-colab/trackio-distillation-sft
11
- - trl-internal
12
  licence: license
13
  ---
14
 
@@ -30,7 +27,7 @@ print(output["generated_text"])
30
 
31
  ## Training procedure
32
 
33
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/huggingface/imo-distillation/runs/fvlg88kc)
34
 
35
 
36
  This model was trained with SFT.
 
4
  model_name: Qwen3-4B-Base-SFT-tr5
5
  tags:
6
  - generated_from_trainer
 
 
7
  - trl
8
+ - sft
 
9
  licence: license
10
  ---
11
 
 
27
 
28
  ## Training procedure
29
 
30
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/huggingface/imo-distillation/runs/rwhiohbr)
31
 
32
 
33
  This model was trained with SFT.
config.json CHANGED
@@ -65,7 +65,7 @@
65
  "sliding_window": null,
66
  "tie_word_embeddings": true,
67
  "transformers_version": "5.3.0.dev0",
68
- "use_cache": true,
69
  "use_sliding_window": false,
70
  "vocab_size": 151936
71
  }
 
65
  "sliding_window": null,
66
  "tie_word_embeddings": true,
67
  "transformers_version": "5.3.0.dev0",
68
+ "use_cache": false,
69
  "use_sliding_window": false,
70
  "vocab_size": 151936
71
  }
generation_config.json CHANGED
@@ -1,6 +1,8 @@
1
  {
2
  "do_sample": false,
3
- "eos_token_id": 151643,
 
 
4
  "max_new_tokens": 2048,
5
  "pad_token_id": 151643,
6
  "transformers_version": "5.3.0.dev0"
 
1
  {
2
  "do_sample": false,
3
+ "eos_token_id": [
4
+ 151643
5
+ ],
6
  "max_new_tokens": 2048,
7
  "pad_token_id": 151643,
8
  "transformers_version": "5.3.0.dev0"
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12b69e1833887117f3ab0c686945d0c483eb3ab0f3e34574d274c1b004d4fe6b
3
  size 8044982080
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68f82c8594386b72e0be03c688f8a7c1c3b2d46ddb0a79d49888cb430b43940c
3
  size 8044982080
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2a282ed565619bce54554b67302f59d5f8ed0657bb4cbb1e7ce5d17dc4b3b765
3
  size 7569
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6455c6ec9c97ef7bba70922931b589528f81f5237ba44cf493b1ed5856fe4135
3
  size 7569