@@ -123,6 +123,7 @@ def get_target_contribs_of_input_with_filter_ref_func(
123123 ** kwargs )
124124
125125 def _set_scoring_mode_for_target_layer (self , target_layer ):
126+ print ("TARGET LAYER SET TO " + str (target_layer .get_name ()))
126127 if (deeplift .util .is_type (target_layer ,
127128 layers .Activation )):
128129 raise RuntimeError ("You set the target layer to an"
@@ -131,55 +132,22 @@ def _set_scoring_mode_for_target_layer(self, target_layer):
131132 + " to set the target layer to the layer *before*"
132133 + " the activation layer instead? (recommended for "
133134 + " classification)" )
134- if (len (target_layer .get_output_layers ())== 0 ):
135- scoring_mode = ScoringMode .OneAndZeros
136- else :
137- assert len (target_layer .get_output_layers ())== 1 ,\
138- "at most one output was expected for target layer " \
139- + str (target_layer .get_name ())+ " but got: " + \
140- str (target_layer .get_output_layers ())
141- final_activation_layer = target_layer .get_output_layers ()[0 ]
142- if (deeplift .util .is_type (final_activation_layer ,
143- layers .Activation )== False ):
144- raise RuntimeError ("There is a layer after your target"
145- + " layer but it is not an activation layer"
146- + ", which seems odd...if doing regression, make"
147- + " sure to set the target layer to the last layer" )
148- deeplift .util .assert_is_type (final_activation_layer ,
149- layers .Activation ,
150- "final_activation_layer" )
151- final_activation_type = type (final_activation_layer ).__name__
152-
153- if (final_activation_type == "Sigmoid" ):
154- scoring_mode = ScoringMode .OneAndZeros
155- elif (final_activation_type == "Softmax" ):
156- #new_W, new_b =\
157- # deeplift.util.get_mean_normalised_softmax_weights(
158- # target_layer.W, target_layer.b)
159- #The weights need to be mean normalised before they are
160- #passed in because build_fwd_pass_vars() has already
161- #been called before this function is called,
162- #because get_output_layers() (used in this function)
163- #is updated during the build_fwd_pass_vars()
164- #call - that is why I can't simply mean-normalise
165- #the weights right here :-( (It is a pain and a
166- #recipe for bugs to rebuild the forward pass
167- #vars after they have already been built - in
168- #particular for a model that branches because where
169- #the branches unify you need really want them to be
170- #using the same symbolic variables - no use having
171- #needlessly complicated/redundant graphs and if a node
172- #is common to two outputs, so should its symbolic vars
173- #TODO: I should put in a 'reset_fwd_pass' function and use
174- #it to invalidate the _built_fwd_pass_vars cache and recompile
175- #if (np.allclose(target_layer.W, new_W)==False):
176- # print("Consider mean-normalising softmax layer")
177- #assert np.allclose(target_layer.b, new_b),\
178- # "Please mean-normalise weights and biases of softmax layer"
135+ scoring_mode = ScoringMode .OneAndZeros
136+ if (len (target_layer .get_output_layers ())> 0 ):
137+ if (len (target_layer .get_output_layers ())> 1 ):
138+ print ("WARNING: the target layer"
139+ + str (target_layer .get_name ())
140+ + " has multiple output layers"
141+ + str (target_layer .get_output_layers ()))
142+ else :
143+ final_activation_layer = target_layer .get_output_layers ()[0 ]
144+ if (deeplift .util .is_type (final_activation_layer ,
145+ layers .Activation )== False ):
146+ print ("\n \n WARNING!!! There is a layer after your target"
147+ + " layer but it is not an activation layer"
148+ + ", which is unusual; double check you have set"
149+ + " the target layer correctly.\n \n " )
179150 scoring_mode = ScoringMode .OneAndZeros
180- else :
181- raise RuntimeError ("Unsupported final_activation_type: "
182- + final_activation_type )
183151 target_layer .set_scoring_mode (scoring_mode )
184152
185153 def save_to_yaml_only (self , file_name ):
0 commit comments