Skip to content

Commit

Permalink
add llama i
Browse files Browse the repository at this point in the history
add llama
  • Loading branch information
samsja committed Jul 23, 2024
1 parent e709312 commit 19814f6
Show file tree
Hide file tree
Showing 7 changed files with 859 additions and 31 deletions.
16 changes: 6 additions & 10 deletions open_diloco/configs/config_1b.json
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
{
"architectures": [
"LlamaForCausalLM"
],
"model_type": "llama",
"hidden_size": 2048,
"name": "llama",
"n_embd": 2048,
"intermediate_size": 5632,
"num_attention_heads": 32,
"num_hidden_layers": 22,
"use_cache": false,
"rms_norm_eps": 1e-05,
"num_key_value_heads": 4
"n_head": 32,
"n_layer": 22,
"n_query_groups": 4,
"vocab_size": 1024
}
15 changes: 5 additions & 10 deletions open_diloco/configs/config_2m.json
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
{
"architectures": [
"LlamaForCausalLM"
],
"model_type": "llama",
"hidden_size": 64,
"name": "llama_2m",
"n_embd": 64,
"intermediate_size": 256,
"num_attention_heads": 2,
"num_hidden_layers": 2,
"rms_norm_eps": 1e-05,
"use_cache": false,
"n_head": 2,
"n_layer": 2,
"vocab_size": 1024
}
}


Loading

0 comments on commit 19814f6

Please sign in to comment.