@@ -1263,6 +1263,18 @@ static std::string list_builtin_chat_templates() {
12631263 return msg.str ();
12641264}
12651265
1266+ static bool is_truthy (const std::string & value) {
1267+ return value == " on" || value == " enabled" || value == " 1" ;
1268+ }
1269+
1270+ static bool is_falsey (const std::string & value) {
1271+ return value == " off" || value == " disabled" || value == " 0" ;
1272+ }
1273+
1274+ static bool is_autoy (const std::string & value) {
1275+ return value == " auto" || value == " -1" ;
1276+ }
1277+
12661278common_params_context common_params_parser_init (common_params & params, llama_example ex, void (*print_usage)(int , char **)) {
12671279 // load dynamic backends
12681280 ggml_backend_load_all ();
@@ -1544,21 +1556,21 @@ common_params_context common_params_parser_init(common_params & params, llama_ex
15441556 params.n_chunks = value;
15451557 }
15461558 ).set_examples ({LLAMA_EXAMPLE_IMATRIX, LLAMA_EXAMPLE_PERPLEXITY, LLAMA_EXAMPLE_RETRIEVAL}));
1547- add_opt (common_arg (
1548- { " -fa " , " --flash-attn " }, " FA " ,
1549- string_format ( " set Flash Attention use ('on', 'off', or 'auto', default: '%s') " , llama_flash_attn_type_name (params.flash_attn_type )),
1550- [](common_params & params, const std::string & value) {
1551- if (value == " on " || value == " enabled " || value == " 1 " ) {
1552- params.flash_attn_type = LLAMA_FLASH_ATTN_TYPE_ENABLED;
1553- } else if (value == " off " || value == " disabled " || value == " 0 " ) {
1554- params.flash_attn_type = LLAMA_FLASH_ATTN_TYPE_DISABLED;
1555- } else if (value == " auto " || value == " -1 " ) {
1556- params.flash_attn_type = LLAMA_FLASH_ATTN_TYPE_AUTO;
1557- } else {
1558- throw std::runtime_error ( string_format ( " error: unkown value for --flash-attn: '%s' \n " , value. c_str ()));
1559- }
1560- }
1561- ).set_env (" LLAMA_ARG_FLASH_ATTN" ));
1559+ add_opt (common_arg ({ " -fa " , " --flash-attn " }, " [on|off|auto] " ,
1560+ string_format ( " set Flash Attention use ('on', 'off', or 'auto', default: '%s') " ,
1561+ llama_flash_attn_type_name (params.flash_attn_type )),
1562+ [](common_params & params, const std::string & value) {
1563+ if ( is_truthy (value) ) {
1564+ params.flash_attn_type = LLAMA_FLASH_ATTN_TYPE_ENABLED;
1565+ } else if ( is_falsey (value) ) {
1566+ params.flash_attn_type = LLAMA_FLASH_ATTN_TYPE_DISABLED;
1567+ } else if ( is_autoy (value) ) {
1568+ params.flash_attn_type = LLAMA_FLASH_ATTN_TYPE_AUTO;
1569+ } else {
1570+ throw std::runtime_error (
1571+ string_format ( " error: unkown value for --flash-attn: '%s' \n " , value. c_str ()));
1572+ }
1573+ } ).set_env (" LLAMA_ARG_FLASH_ATTN" ));
15621574 add_opt (common_arg (
15631575 {" -p" , " --prompt" }, " PROMPT" ,
15641576 " prompt to start generation with; for system message, use -sys" ,
@@ -3134,13 +3146,21 @@ common_params_context common_params_parser_init(common_params & params, llama_ex
31343146 common_log_set_file (common_log_main (), value.c_str ());
31353147 }
31363148 ));
3137- add_opt (common_arg (
3138- {" --log-colors" },
3139- " Enable colored logging" ,
3140- [](common_params &) {
3141- common_log_set_colors (common_log_main (), true );
3142- }
3143- ).set_env (" LLAMA_LOG_COLORS" ));
3149+ add_opt (common_arg ({ " --log-colors" }, " [on|off|auto]" ,
3150+ " Set colored logging ('on', 'off', or 'auto', default: 'auto')\n "
3151+ " 'auto' enables colors when output is to a terminal" ,
3152+ [](common_params &, const std::string & value) {
3153+ if (is_truthy (value)) {
3154+ common_log_set_colors (common_log_main (), LOG_COLORS_ENABLED);
3155+ } else if (is_falsey (value)) {
3156+ common_log_set_colors (common_log_main (), LOG_COLORS_DISABLED);
3157+ } else if (is_autoy (value)) {
3158+ common_log_set_colors (common_log_main (), LOG_COLORS_AUTO);
3159+ } else {
3160+ throw std::invalid_argument (
3161+ string_format (" error: unkown value for --log-colors: '%s'\n " , value.c_str ()));
3162+ }
3163+ }).set_env (" LLAMA_LOG_COLORS" ));
31443164 add_opt (common_arg (
31453165 {" -v" , " --verbose" , " --log-verbose" },
31463166 " Set verbosity level to infinity (i.e. log all messages, useful for debugging)" ,
0 commit comments