dacorvo HF Staff commited on
Commit
de014da
·
verified ·
1 Parent(s): eb86d8a

Synchronizing local compiler cache.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +62 -0
  2. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Maverick-17B-128E-Instruct/92d06886229b28ba55c9.json +190 -0
  3. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Maverick-17B-128E-Instruct/e713d700b97520ccbaf3.json +190 -0
  4. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/189ee41803917b6da16a.json +220 -0
  5. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/1c21e73cac3aa14addb0.json +220 -0
  6. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/3d7c1140c7502fe98f6e.json +220 -0
  7. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/3fde3020fc5b36354a0b.json +220 -0
  8. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/a39eb45a4062f4473f18.json +220 -0
  9. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/bfb5193dff82d5fb4061.json +220 -0
  10. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/c1e026d76ac15a6be5b0.json +220 -0
  11. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/d2028eebb97faf6a698e.json +220 -0
  12. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/e432fb219f614ac9c5c3.json +220 -0
  13. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/e66e3270b649b5ec2c17.json +220 -0
  14. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/qwen2/Qwen/Qwen2.5-0.5B/7c2cfae61ea18e16664b.json +82 -0
  15. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/qwen2/Qwen/Qwen2.5-0.5B/f6c9f13c77075dffcf24.json +82 -0
  16. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/smollm3/HuggingFaceTB/SmolLM3-3B/63a4d7661cc54355920b.json +134 -0
  17. neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0/qwen2/Qwen/Qwen2.5-0.5B/9d0b9126b8fed3c361f7.json +82 -0
  18. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/compile_flags.json +1 -0
  19. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/model.done +0 -0
  20. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/model.hlo_module.pb +3 -0
  21. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/model.neff +3 -0
  22. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/wrapped_neff.hlo +3 -0
  23. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/compile_flags.json +1 -0
  24. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/model.done +0 -0
  25. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/model.hlo_module.pb +3 -0
  26. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/model.neff +3 -0
  27. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/wrapped_neff.hlo +3 -0
  28. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/compile_flags.json +1 -0
  29. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/model.done +0 -0
  30. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/model.hlo_module.pb +3 -0
  31. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/model.neff +3 -0
  32. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/wrapped_neff.hlo +3 -0
  33. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/compile_flags.json +1 -0
  34. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/model.done +0 -0
  35. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/model.hlo_module.pb +3 -0
  36. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/model.neff +3 -0
  37. neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/wrapped_neff.hlo +3 -0
  38. neuronxcc-2.21.18209.0+043b1bf7/MODULE_1023294089137432912+e30acd3a/model.neff +0 -0
  39. neuronxcc-2.21.18209.0+043b1bf7/MODULE_10907752543637211265+e30acd3a/model.neff +0 -0
  40. neuronxcc-2.21.18209.0+043b1bf7/MODULE_10945286243277389437+e30acd3a/model.neff +0 -0
  41. neuronxcc-2.21.18209.0+043b1bf7/MODULE_10d60f8fad55e1974521+a9d440f5/model.neff +1 -1
  42. neuronxcc-2.21.18209.0+043b1bf7/MODULE_10d60f8fad55e1974521+a9d440f5/wrapped_neff.hlo +1 -1
  43. neuronxcc-2.21.18209.0+043b1bf7/MODULE_11400907432531398953+e30acd3a/model.neff +0 -0
  44. neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/compile_flags.json +1 -0
  45. neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/model.done +0 -0
  46. neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/model.hlo_module.pb +3 -0
  47. neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/model.neff +3 -0
  48. neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/wrapped_neff.hlo +3 -0
  49. neuronxcc-2.21.18209.0+043b1bf7/MODULE_12311847081050718470+e30acd3a/model.neff +0 -0
  50. neuronxcc-2.21.18209.0+043b1bf7/MODULE_12324009378304635855+e30acd3a/model.neff +0 -0
.gitattributes CHANGED
@@ -5126,3 +5126,65 @@ neuronxcc-2.21.18209.0+043b1bf7/MODULE_e51f8e75d0439314e217+a9d440f5/model.neff
5126
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_e51f8e75d0439314e217+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5127
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_fc37bb64f7e86324075c+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5128
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_fef991cead1748c4d101+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5126
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_e51f8e75d0439314e217+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5127
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_fc37bb64f7e86324075c+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5128
  neuronxcc-2.21.18209.0+043b1bf7/MODULE_fef991cead1748c4d101+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5129
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5130
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5131
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5132
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5133
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5134
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5135
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5136
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5137
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/model.neff filter=lfs diff=lfs merge=lfs -text
5138
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5139
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_1a0a6acfff620b4cb3f3+58b54a50/model.neff filter=lfs diff=lfs merge=lfs -text
5140
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_1a0a6acfff620b4cb3f3+58b54a50/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5141
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_1bef594b6c63d7e78ccc+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5142
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_1bef594b6c63d7e78ccc+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5143
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_1d8119012decad3afce0+7a6033f9/model.neff filter=lfs diff=lfs merge=lfs -text
5144
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_1d8119012decad3afce0+7a6033f9/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5145
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_24c207a15e469c523101+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5146
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_315b2743497ff0386da9+0da2eea7/model.neff filter=lfs diff=lfs merge=lfs -text
5147
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_315b2743497ff0386da9+0da2eea7/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5148
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_320d87d791127581beca+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5149
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_3a0b53d0c2e168ed7240+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5150
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_3a0b53d0c2e168ed7240+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5151
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_4a6975528090471b6bd3+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5152
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_4a6975528090471b6bd3+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5153
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cd12e88bce1104ae8ee+a1098300/model.neff filter=lfs diff=lfs merge=lfs -text
5154
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_4cd12e88bce1104ae8ee+a1098300/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5155
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_56bb43c14df243f59b9e+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5156
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_580532c7b0fa4e5cdc8f+8aadbcaf/model.neff filter=lfs diff=lfs merge=lfs -text
5157
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_580532c7b0fa4e5cdc8f+8aadbcaf/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5158
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_6fa218ba1695f9493ef4+00958a24/model.neff filter=lfs diff=lfs merge=lfs -text
5159
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_6fa218ba1695f9493ef4+00958a24/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5160
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_73dd17d3d6329d01c2ed+c85d9c4e/model.neff filter=lfs diff=lfs merge=lfs -text
5161
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_73dd17d3d6329d01c2ed+c85d9c4e/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5162
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_75e22a6ef5ef139d5357+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5163
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_75e22a6ef5ef139d5357+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5164
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_793c288702976d9a9911+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5165
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_7ce961870ffaa37cc217+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5166
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_7ce961870ffaa37cc217+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5167
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_8633e15902caf849591d+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5168
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_8633e15902caf849591d+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5169
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_97c0ff77a22597fd516a+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5170
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_9c0e3086d07ddd9ff175+315bf1af/model.neff filter=lfs diff=lfs merge=lfs -text
5171
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_9c0e3086d07ddd9ff175+315bf1af/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5172
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_9f1c978a730025d88f0e+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5173
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_a4a3e81c085b07aab6a4+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5174
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_a69791d72cf8725fb00e+e805b8c0/model.neff filter=lfs diff=lfs merge=lfs -text
5175
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_a69791d72cf8725fb00e+e805b8c0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5176
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_b46b54a1df4af0029c62+747527b0/model.neff filter=lfs diff=lfs merge=lfs -text
5177
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_b46b54a1df4af0029c62+747527b0/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5178
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_b832fb0510340d63437b+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5179
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_d5188e62408152b5598c+2ebd2f30/model.neff filter=lfs diff=lfs merge=lfs -text
5180
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_d5188e62408152b5598c+2ebd2f30/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5181
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_d7db5c9ba7baaf53944a+a9d440f5/model.neff filter=lfs diff=lfs merge=lfs -text
5182
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_d7db5c9ba7baaf53944a+a9d440f5/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5183
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_debb5ac3650b1ea8899d+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
5184
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_e2afad6a215666c7282f+f4980c5f/model.neff filter=lfs diff=lfs merge=lfs -text
5185
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_e2afad6a215666c7282f+f4980c5f/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5186
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_e9b2f3506069901c000d+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5187
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_ec52a194ef2b40528966+ed72d204/model.neff filter=lfs diff=lfs merge=lfs -text
5188
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_f9a173c45d9639a1e62c+e9978ceb/model.neff filter=lfs diff=lfs merge=lfs -text
5189
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_f9a173c45d9639a1e62c+e9978ceb/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5190
+ neuronxcc-2.21.18209.0+043b1bf7/MODULE_fa62c309b752d85d2be8+877608f3/model.neff filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Maverick-17B-128E-Instruct/92d06886229b28ba55c9.json ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Maverick-17B-128E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 2,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 1048576,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 1,
73
+ 3,
74
+ 5,
75
+ 7,
76
+ 9,
77
+ 11,
78
+ 13,
79
+ 15,
80
+ 17,
81
+ 19,
82
+ 21,
83
+ 23,
84
+ 25,
85
+ 27,
86
+ 29,
87
+ 31,
88
+ 33,
89
+ 35,
90
+ 37,
91
+ 39,
92
+ 41,
93
+ 43,
94
+ 45,
95
+ 47
96
+ ],
97
+ "neuron": {
98
+ "_serialized_key": "NxDNeuronConfig",
99
+ "batch_size": 1,
100
+ "capacity_factor": null,
101
+ "checkpoint_id": "meta-llama/Llama-4-Maverick-17B-128E-Instruct",
102
+ "checkpoint_revision": "73d14711bcc77c16df3470856949c3764056b617",
103
+ "continuous_batching": false,
104
+ "enable_bucketing": false,
105
+ "ep_degree": 1,
106
+ "fused_qkv": false,
107
+ "glu_mlp": true,
108
+ "local_ranks_size": 64,
109
+ "max_batch_size": 1,
110
+ "max_context_length": 4096,
111
+ "max_topk": 256,
112
+ "n_active_tokens": 4096,
113
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
114
+ "on_device_sampling": true,
115
+ "optimum_neuron_version": "0.4.0.dev0",
116
+ "output_logits": false,
117
+ "pp_degree": 1,
118
+ "sequence_length": 4096,
119
+ "speculation_length": 0,
120
+ "start_rank_id": 0,
121
+ "target": "trn2",
122
+ "torch_dtype": "bfloat16",
123
+ "tp_degree": 64
124
+ },
125
+ "no_rope_layers": [
126
+ 1,
127
+ 1,
128
+ 1,
129
+ 0,
130
+ 1,
131
+ 1,
132
+ 1,
133
+ 0,
134
+ 1,
135
+ 1,
136
+ 1,
137
+ 0,
138
+ 1,
139
+ 1,
140
+ 1,
141
+ 0,
142
+ 1,
143
+ 1,
144
+ 1,
145
+ 0,
146
+ 1,
147
+ 1,
148
+ 1,
149
+ 0,
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0
174
+ ],
175
+ "num_attention_heads": 40,
176
+ "num_experts_per_tok": 1,
177
+ "num_hidden_layers": 48,
178
+ "num_key_value_heads": 8,
179
+ "num_local_experts": 128,
180
+ "output_router_logits": false,
181
+ "rms_norm_eps": 1e-05,
182
+ "rope_scaling": null,
183
+ "rope_theta": 500000.0,
184
+ "router_aux_loss_coef": 0.001,
185
+ "router_jitter_noise": 0.0,
186
+ "tie_word_embeddings": false,
187
+ "use_cache": true,
188
+ "use_qk_norm": false,
189
+ "vocab_size": 202048
190
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Maverick-17B-128E-Instruct/e713d700b97520ccbaf3.json ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Maverick-17B-128E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 2,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 1048576,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 1,
73
+ 3,
74
+ 5,
75
+ 7,
76
+ 9,
77
+ 11,
78
+ 13,
79
+ 15,
80
+ 17,
81
+ 19,
82
+ 21,
83
+ 23,
84
+ 25,
85
+ 27,
86
+ 29,
87
+ 31,
88
+ 33,
89
+ 35,
90
+ 37,
91
+ 39,
92
+ 41,
93
+ 43,
94
+ 45,
95
+ 47
96
+ ],
97
+ "neuron": {
98
+ "_serialized_key": "NxDNeuronConfig",
99
+ "batch_size": 4,
100
+ "capacity_factor": null,
101
+ "checkpoint_id": "meta-llama/Llama-4-Maverick-17B-128E-Instruct",
102
+ "checkpoint_revision": "73d14711bcc77c16df3470856949c3764056b617",
103
+ "continuous_batching": true,
104
+ "enable_bucketing": false,
105
+ "ep_degree": 1,
106
+ "fused_qkv": false,
107
+ "glu_mlp": true,
108
+ "local_ranks_size": 64,
109
+ "max_batch_size": 4,
110
+ "max_context_length": 4096,
111
+ "max_topk": 256,
112
+ "n_active_tokens": 4096,
113
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
114
+ "on_device_sampling": true,
115
+ "optimum_neuron_version": "0.4.0.dev0",
116
+ "output_logits": false,
117
+ "pp_degree": 1,
118
+ "sequence_length": 4096,
119
+ "speculation_length": 0,
120
+ "start_rank_id": 0,
121
+ "target": "trn2",
122
+ "torch_dtype": "bfloat16",
123
+ "tp_degree": 64
124
+ },
125
+ "no_rope_layers": [
126
+ 1,
127
+ 1,
128
+ 1,
129
+ 0,
130
+ 1,
131
+ 1,
132
+ 1,
133
+ 0,
134
+ 1,
135
+ 1,
136
+ 1,
137
+ 0,
138
+ 1,
139
+ 1,
140
+ 1,
141
+ 0,
142
+ 1,
143
+ 1,
144
+ 1,
145
+ 0,
146
+ 1,
147
+ 1,
148
+ 1,
149
+ 0,
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0
174
+ ],
175
+ "num_attention_heads": 40,
176
+ "num_experts_per_tok": 1,
177
+ "num_hidden_layers": 48,
178
+ "num_key_value_heads": 8,
179
+ "num_local_experts": 128,
180
+ "output_router_logits": false,
181
+ "rms_norm_eps": 1e-05,
182
+ "rope_scaling": null,
183
+ "rope_theta": 500000.0,
184
+ "router_aux_loss_coef": 0.001,
185
+ "router_jitter_noise": 0.0,
186
+ "tie_word_embeddings": false,
187
+ "use_cache": true,
188
+ "use_qk_norm": false,
189
+ "vocab_size": 202048
190
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/189ee41803917b6da16a.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 1,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": false,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 32,
133
+ "max_batch_size": 1,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 32
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/1c21e73cac3aa14addb0.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 4,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 32,
133
+ "max_batch_size": 4,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn1",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 32
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/3d7c1140c7502fe98f6e.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 1,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": false,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 32,
133
+ "max_batch_size": 1,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn1",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 32
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/3fde3020fc5b36354a0b.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 16,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 32,
133
+ "max_batch_size": 16,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 32
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/a39eb45a4062f4473f18.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 8,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 32,
133
+ "max_batch_size": 8,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn1",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 32
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/bfb5193dff82d5fb4061.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 4,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 16,
133
+ "max_batch_size": 4,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn1",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 16
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/c1e026d76ac15a6be5b0.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 4,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 32,
133
+ "max_batch_size": 4,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 32
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/d2028eebb97faf6a698e.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 8,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 32,
133
+ "max_batch_size": 8,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 32
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/e432fb219f614ac9c5c3.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 1,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": false,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 16,
133
+ "max_batch_size": 1,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn1",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 16
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/llama4_text/meta-llama/Llama-4-Scout-17B-16E-Instruct/e66e3270b649b5ec2c17.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 8192,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": true,
10
+ "floor_scale": 8192,
11
+ "for_llm_compressor": false,
12
+ "head_dim": 128,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 5120,
15
+ "initializer_range": 0.02,
16
+ "interleave_moe_layer_step": 1,
17
+ "intermediate_size": 8192,
18
+ "intermediate_size_mlp": 16384,
19
+ "layer_types": [
20
+ "chunked_attention",
21
+ "chunked_attention",
22
+ "chunked_attention",
23
+ "full_attention",
24
+ "chunked_attention",
25
+ "chunked_attention",
26
+ "chunked_attention",
27
+ "full_attention",
28
+ "chunked_attention",
29
+ "chunked_attention",
30
+ "chunked_attention",
31
+ "full_attention",
32
+ "chunked_attention",
33
+ "chunked_attention",
34
+ "chunked_attention",
35
+ "full_attention",
36
+ "chunked_attention",
37
+ "chunked_attention",
38
+ "chunked_attention",
39
+ "full_attention",
40
+ "chunked_attention",
41
+ "chunked_attention",
42
+ "chunked_attention",
43
+ "full_attention",
44
+ "chunked_attention",
45
+ "chunked_attention",
46
+ "chunked_attention",
47
+ "full_attention",
48
+ "chunked_attention",
49
+ "chunked_attention",
50
+ "chunked_attention",
51
+ "full_attention",
52
+ "chunked_attention",
53
+ "chunked_attention",
54
+ "chunked_attention",
55
+ "full_attention",
56
+ "chunked_attention",
57
+ "chunked_attention",
58
+ "chunked_attention",
59
+ "full_attention",
60
+ "chunked_attention",
61
+ "chunked_attention",
62
+ "chunked_attention",
63
+ "full_attention",
64
+ "chunked_attention",
65
+ "chunked_attention",
66
+ "chunked_attention",
67
+ "full_attention"
68
+ ],
69
+ "max_position_embeddings": 10485760,
70
+ "model_type": "llama4_text",
71
+ "moe_layers": [
72
+ 0,
73
+ 1,
74
+ 2,
75
+ 3,
76
+ 4,
77
+ 5,
78
+ 6,
79
+ 7,
80
+ 8,
81
+ 9,
82
+ 10,
83
+ 11,
84
+ 12,
85
+ 13,
86
+ 14,
87
+ 15,
88
+ 16,
89
+ 17,
90
+ 18,
91
+ 19,
92
+ 20,
93
+ 21,
94
+ 22,
95
+ 23,
96
+ 24,
97
+ 25,
98
+ 26,
99
+ 27,
100
+ 28,
101
+ 29,
102
+ 30,
103
+ 31,
104
+ 32,
105
+ 33,
106
+ 34,
107
+ 35,
108
+ 36,
109
+ 37,
110
+ 38,
111
+ 39,
112
+ 40,
113
+ 41,
114
+ 42,
115
+ 43,
116
+ 44,
117
+ 45,
118
+ 46,
119
+ 47
120
+ ],
121
+ "neuron": {
122
+ "_serialized_key": "NxDNeuronConfig",
123
+ "batch_size": 32,
124
+ "capacity_factor": null,
125
+ "checkpoint_id": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
126
+ "checkpoint_revision": "92f3b1597a195b523d8d9e5700e57e4fbb8f20d3",
127
+ "continuous_batching": true,
128
+ "enable_bucketing": false,
129
+ "ep_degree": 1,
130
+ "fused_qkv": false,
131
+ "glu_mlp": true,
132
+ "local_ranks_size": 32,
133
+ "max_batch_size": 32,
134
+ "max_context_length": 4096,
135
+ "max_topk": 256,
136
+ "n_active_tokens": 4096,
137
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
138
+ "on_device_sampling": true,
139
+ "optimum_neuron_version": "0.4.0.dev0",
140
+ "output_logits": false,
141
+ "pp_degree": 1,
142
+ "sequence_length": 4096,
143
+ "speculation_length": 0,
144
+ "start_rank_id": 0,
145
+ "target": "trn2",
146
+ "torch_dtype": "bfloat16",
147
+ "tp_degree": 32
148
+ },
149
+ "no_rope_layers": [
150
+ 1,
151
+ 1,
152
+ 1,
153
+ 0,
154
+ 1,
155
+ 1,
156
+ 1,
157
+ 0,
158
+ 1,
159
+ 1,
160
+ 1,
161
+ 0,
162
+ 1,
163
+ 1,
164
+ 1,
165
+ 0,
166
+ 1,
167
+ 1,
168
+ 1,
169
+ 0,
170
+ 1,
171
+ 1,
172
+ 1,
173
+ 0,
174
+ 1,
175
+ 1,
176
+ 1,
177
+ 0,
178
+ 1,
179
+ 1,
180
+ 1,
181
+ 0,
182
+ 1,
183
+ 1,
184
+ 1,
185
+ 0,
186
+ 1,
187
+ 1,
188
+ 1,
189
+ 0,
190
+ 1,
191
+ 1,
192
+ 1,
193
+ 0,
194
+ 1,
195
+ 1,
196
+ 1,
197
+ 0
198
+ ],
199
+ "num_attention_heads": 40,
200
+ "num_experts_per_tok": 1,
201
+ "num_hidden_layers": 48,
202
+ "num_key_value_heads": 8,
203
+ "num_local_experts": 16,
204
+ "output_router_logits": false,
205
+ "rms_norm_eps": 1e-05,
206
+ "rope_scaling": {
207
+ "factor": 16.0,
208
+ "high_freq_factor": 1.0,
209
+ "low_freq_factor": 1.0,
210
+ "original_max_position_embeddings": 8192,
211
+ "rope_type": "llama3"
212
+ },
213
+ "rope_theta": 500000.0,
214
+ "router_aux_loss_coef": 0.001,
215
+ "router_jitter_noise": 0.0,
216
+ "tie_word_embeddings": false,
217
+ "use_cache": true,
218
+ "use_qk_norm": true,
219
+ "vocab_size": 202048
220
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/qwen2/Qwen/Qwen2.5-0.5B/7c2cfae61ea18e16664b.json ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "Qwen/Qwen2.5-0.5B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 896,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 4864,
13
+ "layer_types": [
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention"
38
+ ],
39
+ "max_position_embeddings": 32768,
40
+ "max_window_layers": 24,
41
+ "model_type": "qwen2",
42
+ "neuron": {
43
+ "_serialized_key": "NxDNeuronConfig",
44
+ "batch_size": 1,
45
+ "capacity_factor": null,
46
+ "checkpoint_id": "Qwen/Qwen2.5-0.5B",
47
+ "checkpoint_revision": "060db6499f32faf8b98477b0a26969ef7d8b9987",
48
+ "continuous_batching": false,
49
+ "enable_bucketing": false,
50
+ "ep_degree": 1,
51
+ "fused_qkv": false,
52
+ "glu_mlp": true,
53
+ "local_ranks_size": 2,
54
+ "max_batch_size": 1,
55
+ "max_context_length": 4096,
56
+ "max_topk": 256,
57
+ "n_active_tokens": 4096,
58
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
59
+ "on_device_sampling": true,
60
+ "optimum_neuron_version": "0.4.0.dev0",
61
+ "output_logits": false,
62
+ "pp_degree": 1,
63
+ "sequence_length": 4096,
64
+ "speculation_length": 0,
65
+ "start_rank_id": 0,
66
+ "target": "trn1",
67
+ "torch_dtype": "float32",
68
+ "tp_degree": 2
69
+ },
70
+ "num_attention_heads": 14,
71
+ "num_hidden_layers": 24,
72
+ "num_key_value_heads": 2,
73
+ "rms_norm_eps": 1e-06,
74
+ "rope_scaling": null,
75
+ "rope_theta": 1000000.0,
76
+ "sliding_window": null,
77
+ "tie_word_embeddings": true,
78
+ "use_cache": true,
79
+ "use_mrope": false,
80
+ "use_sliding_window": false,
81
+ "vocab_size": 151936
82
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/qwen2/Qwen/Qwen2.5-0.5B/f6c9f13c77075dffcf24.json ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "Qwen/Qwen2.5-0.5B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 896,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 4864,
13
+ "layer_types": [
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention"
38
+ ],
39
+ "max_position_embeddings": 32768,
40
+ "max_window_layers": 24,
41
+ "model_type": "qwen2",
42
+ "neuron": {
43
+ "_serialized_key": "NxDNeuronConfig",
44
+ "batch_size": 2,
45
+ "capacity_factor": null,
46
+ "checkpoint_id": "Qwen/Qwen2.5-0.5B",
47
+ "checkpoint_revision": "060db6499f32faf8b98477b0a26969ef7d8b9987",
48
+ "continuous_batching": true,
49
+ "enable_bucketing": false,
50
+ "ep_degree": 1,
51
+ "fused_qkv": false,
52
+ "glu_mlp": true,
53
+ "local_ranks_size": 2,
54
+ "max_batch_size": 2,
55
+ "max_context_length": 1024,
56
+ "max_topk": 256,
57
+ "n_active_tokens": 1024,
58
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
59
+ "on_device_sampling": false,
60
+ "optimum_neuron_version": "0.4.0.dev0",
61
+ "output_logits": false,
62
+ "pp_degree": 1,
63
+ "sequence_length": 1024,
64
+ "speculation_length": 0,
65
+ "start_rank_id": 0,
66
+ "target": "trn1",
67
+ "torch_dtype": "float16",
68
+ "tp_degree": 2
69
+ },
70
+ "num_attention_heads": 14,
71
+ "num_hidden_layers": 24,
72
+ "num_key_value_heads": 2,
73
+ "rms_norm_eps": 1e-06,
74
+ "rope_scaling": null,
75
+ "rope_theta": 1000000.0,
76
+ "sliding_window": null,
77
+ "tie_word_embeddings": true,
78
+ "use_cache": true,
79
+ "use_mrope": false,
80
+ "use_sliding_window": false,
81
+ "vocab_size": 151936
82
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0.dev0/smollm3/HuggingFaceTB/SmolLM3-3B/63a4d7661cc54355920b.json ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "HuggingFaceTB/SmolLM3-3B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "SmolLM3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 2048,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 11008,
14
+ "layer_types": [
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention"
51
+ ],
52
+ "max_position_embeddings": 65536,
53
+ "max_window_layers": 28,
54
+ "mlp_bias": false,
55
+ "model_type": "smollm3",
56
+ "neuron": {
57
+ "_serialized_key": "NxDNeuronConfig",
58
+ "batch_size": 32,
59
+ "capacity_factor": null,
60
+ "checkpoint_id": "HuggingFaceTB/SmolLM3-3B",
61
+ "checkpoint_revision": "a07cc9a04f16550a088caea529712d1d335b0ac1",
62
+ "continuous_batching": true,
63
+ "enable_bucketing": false,
64
+ "ep_degree": 1,
65
+ "fused_qkv": true,
66
+ "glu_mlp": true,
67
+ "local_ranks_size": 8,
68
+ "max_batch_size": 32,
69
+ "max_context_length": 4096,
70
+ "max_topk": 256,
71
+ "n_active_tokens": 4096,
72
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
73
+ "on_device_sampling": true,
74
+ "optimum_neuron_version": "0.4.0.dev0",
75
+ "output_logits": false,
76
+ "pp_degree": 1,
77
+ "sequence_length": 4096,
78
+ "speculation_length": 0,
79
+ "start_rank_id": 0,
80
+ "target": "trn1",
81
+ "torch_dtype": "bfloat16",
82
+ "tp_degree": 8
83
+ },
84
+ "no_rope_layer_interval": 4,
85
+ "no_rope_layers": [
86
+ 1,
87
+ 1,
88
+ 1,
89
+ 0,
90
+ 1,
91
+ 1,
92
+ 1,
93
+ 0,
94
+ 1,
95
+ 1,
96
+ 1,
97
+ 0,
98
+ 1,
99
+ 1,
100
+ 1,
101
+ 0,
102
+ 1,
103
+ 1,
104
+ 1,
105
+ 0,
106
+ 1,
107
+ 1,
108
+ 1,
109
+ 0,
110
+ 1,
111
+ 1,
112
+ 1,
113
+ 0,
114
+ 1,
115
+ 1,
116
+ 1,
117
+ 0,
118
+ 1,
119
+ 1,
120
+ 1,
121
+ 0
122
+ ],
123
+ "num_attention_heads": 16,
124
+ "num_hidden_layers": 36,
125
+ "num_key_value_heads": 4,
126
+ "pretraining_tp": 2,
127
+ "rms_norm_eps": 1e-06,
128
+ "rope_scaling": null,
129
+ "rope_theta": 5000000.0,
130
+ "sliding_window": null,
131
+ "use_cache": false,
132
+ "use_sliding_window": false,
133
+ "vocab_size": 128256
134
+ }
neuronxcc-2.21.18209.0+043b1bf7/0_REGISTRY/0.4.0/qwen2/Qwen/Qwen2.5-0.5B/9d0b9126b8fed3c361f7.json ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "Qwen/Qwen2.5-0.5B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 896,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 4864,
13
+ "layer_types": [
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention"
38
+ ],
39
+ "max_position_embeddings": 32768,
40
+ "max_window_layers": 24,
41
+ "model_type": "qwen2",
42
+ "neuron": {
43
+ "_serialized_key": "NxDNeuronConfig",
44
+ "batch_size": 1,
45
+ "capacity_factor": null,
46
+ "checkpoint_id": "Qwen/Qwen2.5-0.5B",
47
+ "checkpoint_revision": "060db6499f32faf8b98477b0a26969ef7d8b9987",
48
+ "continuous_batching": false,
49
+ "enable_bucketing": false,
50
+ "ep_degree": 1,
51
+ "fused_qkv": false,
52
+ "glu_mlp": true,
53
+ "local_ranks_size": 2,
54
+ "max_batch_size": 1,
55
+ "max_context_length": 4096,
56
+ "max_topk": 256,
57
+ "n_active_tokens": 4096,
58
+ "neuronxcc_version": "2.21.18209.0+043b1bf7",
59
+ "on_device_sampling": true,
60
+ "optimum_neuron_version": "0.4.0",
61
+ "output_logits": false,
62
+ "pp_degree": 1,
63
+ "sequence_length": 4096,
64
+ "speculation_length": 0,
65
+ "start_rank_id": 0,
66
+ "target": "trn1",
67
+ "torch_dtype": "bfloat16",
68
+ "tp_degree": 2
69
+ },
70
+ "num_attention_heads": 14,
71
+ "num_hidden_layers": 24,
72
+ "num_key_value_heads": 2,
73
+ "rms_norm_eps": 1e-06,
74
+ "rope_scaling": null,
75
+ "rope_theta": 1000000.0,
76
+ "sliding_window": null,
77
+ "tie_word_embeddings": true,
78
+ "use_cache": true,
79
+ "use_mrope": false,
80
+ "use_sliding_window": false,
81
+ "vocab_size": 151936
82
+ }
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ff1ffbb3a791b1c36ae1319a865a5564b10e1e83ee848dd2fb3c2bc9acf0921
3
+ size 102479643
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32cc8c880a2cc4b487bae5ef92e7add8c29d88e9e74fd1de9672e5cff7db6f8e
3
+ size 4834304
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c1c92e265d540052c2e+a9d440f5/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b5e0c18a0a887bff9e5f6669decaddbb7e1483e15dacbc99bda7bde1caa1c49
3
+ size 5173724
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22e4b6de1df0c178588eef5edb3866b12ef453071858429ba8e9089a467e55e6
3
+ size 103307507
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3eee937486cface48af548d55a794c337698827c86b0bc7ef84b36941b9cf31b
3
+ size 7588864
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0c2afb109a81f924f7e9+a9d440f5/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc2c1479a5fe411b5290a2bbbe52d14ebd54a9b739775901022f3adb7f6d0dc9
3
+ size 7928284
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8815b5584c7dacbd715a9bee3bedbcf3606b5bdb06960536624ba05b7cb25eda
3
+ size 105618580
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f63a18c435dd0f739a365673f43e31532c75d2cca517648697c6d4d1ce1ce28
3
+ size 14797824
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0ca220bc75b3fb7d4e02+747527b0/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a059c8fb267fb45fc18669117aad72fea82f53cd4aea74fd01b0fce0573fe70e
3
+ size 15139158
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn2", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=2", "--logfile=/tmp/nxd_model/token_generation_model/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:533e90f4e441dc6105323721d6d979f31ced33e5299c590db811dba7e8bfd6e8
3
+ size 105706212
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:400b0601dd93f0ebb2e5b993813630eafb168ac65553108b36630d1c497d9dbe
3
+ size 25263104
neuronxcc-2.21.18209.0+043b1bf7/MODULE_0dfc4a6617f4a9f8c8d1+747527b0/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4160843e4f9dcf060943c8a66793d3132df9397ce079a911c1f6890df21b702
3
+ size 25603080
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1023294089137432912+e30acd3a/model.neff CHANGED
Binary files a/neuronxcc-2.21.18209.0+043b1bf7/MODULE_1023294089137432912+e30acd3a/model.neff and b/neuronxcc-2.21.18209.0+043b1bf7/MODULE_1023294089137432912+e30acd3a/model.neff differ
 
neuronxcc-2.21.18209.0+043b1bf7/MODULE_10907752543637211265+e30acd3a/model.neff CHANGED
Binary files a/neuronxcc-2.21.18209.0+043b1bf7/MODULE_10907752543637211265+e30acd3a/model.neff and b/neuronxcc-2.21.18209.0+043b1bf7/MODULE_10907752543637211265+e30acd3a/model.neff differ
 
neuronxcc-2.21.18209.0+043b1bf7/MODULE_10945286243277389437+e30acd3a/model.neff CHANGED
Binary files a/neuronxcc-2.21.18209.0+043b1bf7/MODULE_10945286243277389437+e30acd3a/model.neff and b/neuronxcc-2.21.18209.0+043b1bf7/MODULE_10945286243277389437+e30acd3a/model.neff differ
 
neuronxcc-2.21.18209.0+043b1bf7/MODULE_10d60f8fad55e1974521+a9d440f5/model.neff CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:25d5794197cc1c440a87ceb511f5c1be579e938e1804d40a69b8ef1de5520571
3
  size 277504
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e64a278dbf0c045ccb9ea741ee91d91ab9ba571401072f8f5fc1d0484700fc7
3
  size 277504
neuronxcc-2.21.18209.0+043b1bf7/MODULE_10d60f8fad55e1974521+a9d440f5/wrapped_neff.hlo CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f620b1d7b37b2896c0fbf86202063926fb1a18d68cc3d20eecca1c10090bc877
3
  size 285854
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7a05a0e98e0e38d6aa3fd477a824b23547291df269f411559460f9f97c66192
3
  size 285854
neuronxcc-2.21.18209.0+043b1bf7/MODULE_11400907432531398953+e30acd3a/model.neff CHANGED
Binary files a/neuronxcc-2.21.18209.0+043b1bf7/MODULE_11400907432531398953+e30acd3a/model.neff and b/neuronxcc-2.21.18209.0+043b1bf7/MODULE_11400907432531398953+e30acd3a/model.neff differ
 
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--enable-saturate-infinity", "--auto-cast=none", "--model-type=transformer", "-O1", "--logfile=/tmp/nxdi_test_8f951ccc-7e17-4fbd-94b5-f2642cb99e0b/compiler_workdir/NeuronRMSNorm/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/model.done ADDED
File without changes
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c774c6626fc91684e924b843c1fe807dadbbac6d8a8e19443663b43c13eac35
3
+ size 1165
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0c9ded24def6d0a6c3cebcdc5522c189fc492723ad673389ad51b70cae51501
3
+ size 103424
neuronxcc-2.21.18209.0+043b1bf7/MODULE_1229cd5e7cd44cac2147+3aa04906/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8838a67b9bf87d116c128ddaebe8d77d446d7111e5b1243076365791a97348e
3
+ size 104320
neuronxcc-2.21.18209.0+043b1bf7/MODULE_12311847081050718470+e30acd3a/model.neff CHANGED
Binary files a/neuronxcc-2.21.18209.0+043b1bf7/MODULE_12311847081050718470+e30acd3a/model.neff and b/neuronxcc-2.21.18209.0+043b1bf7/MODULE_12311847081050718470+e30acd3a/model.neff differ
 
neuronxcc-2.21.18209.0+043b1bf7/MODULE_12324009378304635855+e30acd3a/model.neff CHANGED
Binary files a/neuronxcc-2.21.18209.0+043b1bf7/MODULE_12324009378304635855+e30acd3a/model.neff and b/neuronxcc-2.21.18209.0+043b1bf7/MODULE_12324009378304635855+e30acd3a/model.neff differ