This repository was archived by the owner on Jul 7, 2023. It is now read-only.
File tree Expand file tree Collapse file tree 3 files changed +3
-18
lines changed Expand file tree Collapse file tree 3 files changed +3
-18
lines changed Original file line number Diff line number Diff line change @@ -368,6 +368,7 @@ def eval_metrics(self):
368368 ]
369369
370370 def eval_metric_fns (self , model_hparams ):
371+ del model_hparams
371372 metric_names = self .eval_metrics ()
372373 if not all ([m in metrics .METRICS_FNS for m in metric_names ]):
373374 error_str = ("Unrecognized metric. Problem %s specified metrics "
Original file line number Diff line number Diff line change @@ -1121,22 +1121,6 @@ def body(self, features):
11211121
11221122 return encoder_output
11231123
1124- @registry .register_model
1125- class TransformerRegressor (TransformerEncoder ):
1126- """Transformer inheriting from Encoder, for the regression problem.
1127- Final res is a tensor that has a shape of (?, 1, 1, 1)
1128- """
1129-
1130- def top (self , body_output , features ):
1131- """Computes single scalar value from body_output
1132- """
1133- with tf .variable_scope ("reg_top_ffn" ):
1134- # scalar = common_layers.dense(body_output,hparams)
1135- x = body_output
1136- x = tf .reduce_mean (x , axis = [1 , 2 ], keepdims = True )
1137- res = tf .layers .dense (x , 1 , name = "model_top" )
1138- return res
1139-
11401124
11411125def features_to_nonpadding (features , inputs_or_targets = "inputs" ):
11421126 key = inputs_or_targets + "_segmentation"
Original file line number Diff line number Diff line change @@ -638,7 +638,7 @@ def create_eager_metrics_for_problem(problem, model_hparams):
638638 metric_fns = problem .eval_metric_fns (model_hparams )
639639 tm = problem .get_hparams (model_hparams ).modality ["targets" ]
640640 return create_eager_metrics_internal (
641- metric_fns , weights_fn = tm .targets_weights_fn )
641+ metric_fns , weights_fn = tm .targets_weights_fn )
642642
643643
644644def create_eager_metrics (metric_names , weights_fn = common_layers .weights_all ):
@@ -664,7 +664,7 @@ def create_eager_metrics_internal(metric_fns,
664664 """Create metrics accumulators and averager for Eager mode.
665665
666666 Args:
667- metric_names : dict<metric name, metric function>
667+ metric_fns : dict<metric name, metric function>
668668 weights_fn: function that takes labels and returns a weights mask. Defaults
669669 to weights of all 1, i.e. common_layers.weights_all. Use
670670 common_layers.weights_nonzero if labels have 0-padding.
You can’t perform that action at this time.
0 commit comments