@@ -1047,60 +1047,36 @@ def get_working_dir(arg):
10471047
10481048
10491049def render_keras_nlp_tags (template ):
1050- from decimal import Decimal
1051-
1052- def round_num (n , decimal = 2 ):
1053- n = Decimal (n )
1054- return n .to_integral () if n == n .to_integral () else round (n .normalize (), decimal )
1055-
1056- def numerize (n , decimal = 2 ):
1057- #60 sufixes
1058- sufixes = [ "" , "K" , "M" , "B" , "T" , "Qa" , "Qu" , "S" , "Oc" , "No" ,
1059- "D" , "Ud" , "Dd" , "Td" , "Qt" , "Qi" , "Se" , "Od" , "Nd" ,"V" ,
1060- "Uv" , "Dv" , "Tv" , "Qv" , "Qx" , "Sx" , "Ox" , "Nx" , "Tn" , "Qa" ,
1061- "Qu" , "S" , "Oc" , "No" , "D" , "Ud" , "Dd" , "Td" , "Qt" , "Qi" ,
1062- "Se" , "Od" , "Nd" , "V" , "Uv" , "Dv" , "Tv" , "Qv" , "Qx" , "Sx" ,
1063- "Ox" , "Nx" , "Tn" , "x" , "xx" , "xxx" , "X" , "XX" , "XXX" , "END" ]
1064-
1065- sci_expr = [1e0 , 1e3 , 1e6 , 1e9 , 1e12 , 1e15 , 1e18 , 1e21 , 1e24 , 1e27 ,
1066- 1e30 , 1e33 , 1e36 , 1e39 , 1e42 , 1e45 , 1e48 , 1e51 , 1e54 , 1e57 ,
1067- 1e60 , 1e63 , 1e66 , 1e69 , 1e72 , 1e75 , 1e78 , 1e81 , 1e84 , 1e87 ,
1068- 1e90 , 1e93 , 1e96 , 1e99 , 1e102 , 1e105 , 1e108 , 1e111 , 1e114 , 1e117 ,
1069- 1e120 , 1e123 , 1e126 , 1e129 , 1e132 , 1e135 , 1e138 , 1e141 , 1e144 , 1e147 ,
1070- 1e150 , 1e153 , 1e156 , 1e159 , 1e162 , 1e165 , 1e168 , 1e171 , 1e174 , 1e177 ]
1071- minus_buff = n
1072- n = abs (n )
1073- for x in range (len (sci_expr )):
1074- try :
1075- if n >= sci_expr [x ] and n < sci_expr [x + 1 ]:
1076- sufix = sufixes [x ]
1077- if n >= 1e3 :
1078- num = str (round_num (n / sci_expr [x ], decimal ))
1079- else :
1080- num = str (n )
1081- return num + sufix if minus_buff > 0 else "-" + num + sufix
1082- except IndexError :
1083- print ("You've reached the end" )
1084- if "{{backbone_presets_table}}" in template :
1085- # Import KerasNLP and do some stuff.
1086-
1087- from keras_nlp .models .bert import bert_presets
1088- from keras_nlp .models .distil_bert import distil_bert_presets
1089- from keras_nlp .models .roberta import roberta_presets
1090- from keras_nlp .models .xlm_roberta import xlm_roberta_presets
10911050
1051+ def param_count (count :int ):
1052+ if count >= 1e9 :
1053+ return f"{ int (count / 1e9 )} B"
1054+ if count >= 1e6 :
1055+ return f"{ int (count / 1e6 )} M"
1056+ if count >= 1e3 :
1057+ return f"{ int (count / 1e3 )} K"
1058+ return f"{ count } "
1059+
1060+ if "{{backbone_presets_table}}" in template :
1061+ # Import KerasNLP
1062+ import keras_nlp
10921063
10931064 # Table Header
10941065 table = "Preset ID | Model | Parameters | Description \n "
10951066
10961067 # Column alignment
10971068 table += "-------|--------|-------|------\n "
10981069
1099- presets = [ bert_presets , distil_bert_presets , roberta_presets , xlm_roberta_presets ]
1100- links = ["[BERT](bert)" , "[DistilBert](distil_bert)" , "[RoBERTa](roberta)" , "[XLM-RoBERTa](xlm_roberta)" ]
1101- for preset , link in zip (presets , links ):
1102- for key in preset .backbone_presets :
1103- table += f"{ key } | { link } | { numerize (preset .backbone_presets [key ]['metadata' ]['params' ], 0 )} | { preset .backbone_presets [key ]['metadata' ]['description' ]} \n "
1070+ # Classifier presets
1071+ for name , symbol in keras_nlp .models .__dict__ .items ():
1072+ if "Classifier" not in name :
1073+ continue
1074+ for preset in symbol .presets :
1075+ if preset in symbol .backbone_cls .presets :
1076+ # Generating table for only those which has path in metadata
1077+ if 'path' in symbol .presets [preset ]['metadata' ]:
1078+ table += (f"{ preset } | [{ symbol .presets [preset ]['metadata' ]['official_name' ]} ]({ symbol .presets [preset ]['metadata' ]['path' ]} ) | { param_count (symbol .presets [preset ]['metadata' ]['params' ])} | { symbol .presets [preset ]['metadata' ]['description' ]} \n " )
1079+
11041080 template = template .replace (
11051081 "{{backbone_presets_table}}" , table
11061082 )
@@ -1115,12 +1091,13 @@ def numerize(n, decimal=2):
11151091 table += "-------|--------|-------|------\n "
11161092
11171093 # Classifier presets
1118- presets = [bert_presets ]
1119- links = ["[BERT](bert)" ]
1120- for preset , link in zip (presets , links ):
1121- for key in preset .classifier_presets :
1122- table += f"{ key } | { link } | { numerize (preset .classifier_presets [key ]['metadata' ]['params' ], 0 )} | { preset .classifier_presets [key ]['metadata' ]['description' ]} \n "
1123-
1094+ for name , symbol in keras_nlp .models .__dict__ .items ():
1095+ if "Classifier" not in name :
1096+ continue
1097+ for preset in symbol .presets :
1098+ if not preset in symbol .backbone_cls .presets :
1099+ table += (f"{ preset } | [{ symbol .presets [preset ]['metadata' ]['official_name' ]} ]({ symbol .presets [preset ]['metadata' ]['path' ]} ) | { param_count (symbol .presets [preset ]['metadata' ]['params' ])} | { symbol .presets [preset ]['metadata' ]['description' ]} \n " )
1100+
11241101 template = template .replace (
11251102 "{{classifier_presets_table}}" , table
11261103 )
0 commit comments