We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7cc408a commit 23e2a32Copy full SHA for 23e2a32
extensions/llamacpp-extension/src/index.ts
@@ -1182,7 +1182,9 @@ export default class llamacpp_extension extends AIEngine {
1182
1183
// Add remaining options from the interface
1184
if (cfg.chat_template) args.push('--chat-template', cfg.chat_template)
1185
- args.push('-ngl', String(cfg.n_gpu_layers >= 0 ? cfg.n_gpu_layers : 100))
+ const gpu_layers =
1186
+ parseInt(String(cfg.n_gpu_layers)) >= 0 ? cfg.n_gpu_layers : 100
1187
+ args.push('-ngl', String(gpu_layers))
1188
if (cfg.threads > 0) args.push('--threads', String(cfg.threads))
1189
if (cfg.threads_batch > 0)
1190
args.push('--threads-batch', String(cfg.threads_batch))
0 commit comments