@@ -1263,6 +1263,18 @@ static std::string list_builtin_chat_templates() {
12631263    return  msg.str ();
12641264}
12651265
1266+ static  bool  is_truthy (const  std::string & value) {
1267+     return  value == " on" " enabled" " 1" 
1268+ }
1269+ 
1270+ static  bool  is_falsey (const  std::string & value) {
1271+     return  value == " off" " disabled" " 0" 
1272+ }
1273+ 
1274+ static  bool  is_autoy (const  std::string & value) {
1275+     return  value == " auto" " -1" 
1276+ }
1277+ 
12661278common_params_context common_params_parser_init (common_params & params, llama_example ex, void (*print_usage)(int , char  **)) {
12671279    //  load dynamic backends
12681280    ggml_backend_load_all ();
@@ -1544,21 +1556,22 @@ common_params_context common_params_parser_init(common_params & params, llama_ex
15441556            params.n_chunks  = value;
15451557        }
15461558    ).set_examples ({LLAMA_EXAMPLE_IMATRIX, LLAMA_EXAMPLE_PERPLEXITY, LLAMA_EXAMPLE_RETRIEVAL}));
1547-     add_opt (common_arg (
1548-         {" -fa" " --flash-attn" " FA" 
1549-         string_format (" set Flash Attention use ('on', 'off', or 'auto', default: '%s')" llama_flash_attn_type_name (params.flash_attn_type )),
1550-         [](common_params & params, const  std::string & value) {
1551-             if  (value == " on" " enabled" " 1" 
1552-                 params.flash_attn_type  = LLAMA_FLASH_ATTN_TYPE_ENABLED;
1553-             } else  if  (value == " off" " disabled" " 0" 
1554-                 params.flash_attn_type  = LLAMA_FLASH_ATTN_TYPE_DISABLED;
1555-             } else  if  (value == " auto" " -1" 
1556-                 params.flash_attn_type  = LLAMA_FLASH_ATTN_TYPE_AUTO;
1557-             } else  {
1558-                 throw  std::runtime_error (string_format (" error: unkown value for --flash-attn: '%s'\n " c_str ()));
1559-             }
1560-         }
1561-     ).set_env (" LLAMA_ARG_FLASH_ATTN" 
1559+     add_opt (common_arg ({ " -fa" " --flash-attn" " [on|off|auto]" 
1560+                        string_format (" set Flash Attention use ('on', 'off', or 'auto', default: '%s')" 
1561+                                      llama_flash_attn_type_name (params.flash_attn_type )),
1562+                        [](common_params & params, const  std::string & value) {
1563+                            if  (is_truthy (value)) {
1564+                                params.flash_attn_type  = LLAMA_FLASH_ATTN_TYPE_ENABLED;
1565+                            } else  if  (is_falsey (value)) {
1566+                                params.flash_attn_type  = LLAMA_FLASH_ATTN_TYPE_DISABLED;
1567+                            } else  if  (is_autoy (value)) {
1568+                                params.flash_attn_type  = LLAMA_FLASH_ATTN_TYPE_AUTO;
1569+                            } else  {
1570+                                throw  std::runtime_error (
1571+                                    string_format (" error: unkown value for --flash-attn: '%s'\n " c_str ()));
1572+                            }
1573+                        })
1574+                 .set_env (" LLAMA_ARG_FLASH_ATTN" 
15621575    add_opt (common_arg (
15631576        {" -p" " --prompt" " PROMPT" 
15641577        " prompt to start generation with; for system message, use -sys" 
@@ -3134,13 +3147,22 @@ common_params_context common_params_parser_init(common_params & params, llama_ex
31343147            common_log_set_file (common_log_main (), value.c_str ());
31353148        }
31363149    ));
3137-     add_opt (common_arg (
3138-         {" --log-colors" 
3139-         " Enable colored logging" 
3140-         [](common_params &) {
3141-             common_log_set_colors (common_log_main (), true );
3142-         }
3143-     ).set_env (" LLAMA_LOG_COLORS" 
3150+     add_opt (common_arg ({ " --log-colors" " [on|off|auto]" 
3151+                        " Enable colored logging ('on', 'off', or 'auto', default: 'auto')\n " 
3152+                        " 'auto' enables colors when output is to a terminal" 
3153+                        [](common_params &, const  std::string & value) {
3154+                            if  (is_truthy (value)) {
3155+                                common_log_set_colors (common_log_main (), LOG_COLORS_ENABLED);
3156+                            } else  if  (is_falsey (value)) {
3157+                                common_log_set_colors (common_log_main (), LOG_COLORS_DISABLED);
3158+                            } else  if  (is_autoy (value)) {
3159+                                common_log_set_colors (common_log_main (), LOG_COLORS_AUTO);
3160+                            } else  {
3161+                                throw  std::invalid_argument (
3162+                                    string_format (" error: unkown value for --log-colors: '%s'\n " c_str ()));
3163+                            }
3164+                        })
3165+                 .set_env (" LLAMA_LOG_COLORS" 
31443166    add_opt (common_arg (
31453167        {" -v" " --verbose" " --log-verbose" 
31463168        " Set verbosity level to infinity (i.e. log all messages, useful for debugging)" 
0 commit comments