Skip to content

Commit

Permalink
adjust correct ngl number (#3081)
Browse files Browse the repository at this point in the history
  • Loading branch information
hahuyhoang411 authored Jun 21, 2024
1 parent d228407 commit 71a707a
Show file tree
Hide file tree
Showing 29 changed files with 29 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 8192,
"prompt_template": "<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system_prompt}<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",
"llama_model_path": "aya-23-35B-Q4_K_M.gguf",
"ngl": 40
"ngl": 41
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 8192,
"prompt_template": "<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|>{system_prompt}<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",
"llama_model_path": "aya-23-8B-Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 8192,
"prompt_template": "GPT4 Correct User: {prompt}<|end_of_turn|>GPT4 Correct Assistant:",
"llama_model_path": "codeninja-1.0-openchat-7b.Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 32000,
"prompt_template": "{system_message} [INST] {prompt} [/INST]",
"llama_model_path": "Codestral-22B-v0.1-Q4_K_M.gguf",
"ngl": 56
"ngl": 57
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 131072,
"prompt_template": "<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{prompt}<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",
"llama_model_path": "c4ai-command-r-v01-Q4_K_M.gguf",
"ngl": 40
"ngl": 41
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 16384,
"prompt_template": "### Instruction:\n{prompt}\n### Response:",
"llama_model_path": "deepseek-coder-1.3b-instruct.Q8_0.gguf",
"ngl": 24
"ngl": 25
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 16384,
"prompt_template": "### Instruction:\n{prompt}\n### Response:",
"llama_model_path": "deepseek-coder-33b-instruct.Q4_K_M.gguf",
"ngl": 62
"ngl": 63
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 8192,
"prompt_template": "<start_of_turn>user\n{prompt}<end_of_turn>\n<start_of_turn>model",
"llama_model_path": "gemma-2b-it-q4_k_m.gguf",
"ngl": 18
"ngl": 19
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 8192,
"prompt_template": "<start_of_turn>user\n{prompt}<end_of_turn>\n<start_of_turn>model",
"llama_model_path": "gemma-7b-it-q4_K_M.gguf",
"ngl": 28
"ngl": 29
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 4096,
"prompt_template": "[INST] <<SYS>>\n{system_message}<</SYS>>\n{prompt}[/INST]",
"llama_model_path": "llama-2-70b-chat.Q4_K_M.gguf",
"ngl": 80
"ngl": 81
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 4096,
"prompt_template": "[INST] <<SYS>>\n{system_message}<</SYS>>\n{prompt}[/INST]",
"llama_model_path": "llama-2-7b-chat.Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 8192,
"prompt_template": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n{system_message}<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n{prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
"llama_model_path": "Meta-Llama-3-8B-Instruct-Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 8192,
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
"llama_model_path": "Hermes-2-Pro-Llama-3-8B-Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 2048,
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
"llama_model_path": "llamacorn-1.1b-chat.Q8_0.gguf",
"ngl": 22
"ngl": 23
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 32768,
"prompt_template": "{system_message} [INST] {prompt} [/INST]",
"llama_model_path": "Mistral-7B-Instruct-v0.3-Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 32768,
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
"llama_model_path": "Noromaid-7B-0.4-DPO.q4_k_m.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 8192,
"prompt_template": "GPT4 Correct User: {prompt}<|end_of_turn|>GPT4 Correct Assistant:",
"llama_model_path": "openchat-3.5-0106.Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 4096,
"prompt_template": "<|user|>\n{prompt}<|end|>\n<|assistant|>\n",
"llama_model_path": "Phi-3-mini-4k-instruct-q4.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"max_tokens": 4096,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 128000,
"prompt_template": "<|user|>\n{prompt}<|end|>\n<|assistant|>\n",
"llama_model_path": "Phi-3-medium-128k-instruct-Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"max_tokens": 128000,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 16384,
"prompt_template": "### System Prompt\n{system_message}\n### User Message\n{prompt}\n### Assistant",
"llama_model_path": "phind-codellama-34b-v2.Q4_K_M.gguf",
"ngl": 48
"ngl": 49
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 32768,
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
"llama_model_path": "qwen1_5-7b-chat-q4_k_m.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 32768,
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
"llama_model_path": "Qwen2-7B-Instruct-Q4_K_M.gguf",
"ngl": 28
"ngl": 29
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 4096,
"prompt_template": "<|user|>\n{prompt}<|endoftext|>\n<|assistant|>",
"llama_model_path": "stablelm-zephyr-3b.Q8_0.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 32768,
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
"llama_model_path": "stealth-v1.3.Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 4096,
"prompt_template": "<|system|>\n{system_message}<|user|>\n{prompt}<|assistant|>",
"llama_model_path": "tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf",
"ngl": 22
"ngl": 23
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 32768,
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
"llama_model_path": "trinity-v1.2.Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 32768,
"prompt_template": "[INST] <<SYS>>\n{system_message}\n<</SYS>>\n{prompt} [/INST]",
"llama_model_path": "vistral-7b-chat-dpo.Q4_K_M.gguf",
"ngl": 32
"ngl": 33
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 16384,
"prompt_template": "### Instruction:\n{prompt}\n### Response:",
"llama_model_path": "wizardcoder-python-13b-v1.0.Q4_K_M.gguf",
"ngl": 40
"ngl": 41
},
"parameters": {
"temperature": 0.7,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 15,7 @@
"ctx_len": 4096,
"prompt_template": "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant",
"llama_model_path": "yi-34b-chat.Q4_K_M.gguf",
"ngl": 60
"ngl": 61
},
"parameters": {
"temperature": 0.7,
Expand Down

0 comments on commit 71a707a

Please sign in to comment.