diff --git a/lucid/optvis/objectives.py b/lucid/optvis/objectives.py index a13d85ab..990ac124 100644 --- a/lucid/optvis/objectives.py +++ b/lucid/optvis/objectives.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + # Copyright 2018 The Lucid Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -61,19 +63,16 @@ class Objective(object): def __init__(self, objective_func, name="", description=""): self.objective_func = objective_func - self.name = name self.description = description + self.value = None # This value is populated after a call def __add__(self, other): if isinstance(other, (int, float)): objective_func = lambda T: other + self(T) - name = self.name - description = self.description else: objective_func = lambda T: self(T) + other(T) - name = ", ".join([self.name, other.name]) - description = "Sum(" + " +\n".join([self.description, other.description]) + ")" - return Objective(objective_func, name=name, description=description) + description = "(" + " + ".join([str(self), str(other)]) + ")" + return Objective(objective_func, description=description) def __neg__(self): return -1 * self @@ -81,21 +80,13 @@ def __neg__(self): def __sub__(self, other): return self + (-1 * other) - @staticmethod - def sum(objs): - objective_func = lambda T: sum([obj(T) for obj in objs]) - descriptions = [obj.description for obj in objs] - description = "Sum(" + " +\n".join(descriptions) + ")" - names = [obj.name for obj in objs] - name = ", ".join(names) - return Objective(objective_func, name=name, description=description) - def __mul__(self, other): if isinstance(other, (int, float)): objective_func = lambda T: other * self(T) else: objective_func = lambda T: self(T) * other(T) - return Objective(objective_func, name=self.name, description=self.description) + description = str(self) + "·" + str(other) + return Objective(objective_func, description=description) def __rmul__(self, other): return self.__mul__(other) @@ -104,7 +95,14 @@ def __radd__(self, other): return self.__add__(other) def __call__(self, T): - return self.objective_func(T) + self.value = self.objective_func(T) + return self.value + + def __str__(self): + return self.description + + def __repr__(self): + return self.description def _make_arg_str(arg): @@ -124,7 +122,7 @@ def wrap_objective(f, *args, **kwds): """ objective_func = f(*args, **kwds) objective_name = f.__name__ - args_str = " [" + ", ".join([_make_arg_str(arg) for arg in args]) + "]" + args_str = "(" + ", ".join([_make_arg_str(arg) for arg in args]) + ")" description = objective_name.title() + args_str return Objective(objective_func, objective_name, description) @@ -180,7 +178,13 @@ def _dot_cossim(x, y, cossim_pow=0): if cossim_pow == 0: return tf.reduce_mean(xy_dot) x_mags = tf.sqrt(_dot(x,x)) y_mags = tf.sqrt(_dot(y,y)) - cossims = xy_dot / (eps + x_mags ) / (eps + y_mags) + a = xy_dot + b = (eps + x_mags ) + c = (eps + y_mags) + d = a / b + e = d / c + print(a,b,c,d,e) + cossims = e floored_cossims = tf.maximum(0.1, cossims) return tf.reduce_mean(xy_dot * floored_cossims**cossim_pow) @@ -190,10 +194,10 @@ def direction(layer, vec, batch=None, cossim_pow=0): """Visualize a direction""" if batch is None: vec = vec[None, None, None] - return lambda T: _dot_cossim(T(layer), vec) + return lambda T: _dot_cossim(T(layer), vec, cossim_pow = cossim_pow) else: vec = vec[None, None] - return lambda T: _dot_cossim(T(layer)[batch], vec) + return lambda T: _dot_cossim(T(layer)[batch], vec, cossim_pow = cossim_pow) @wrap_objective diff --git a/tests/optvis/test_objectives.py b/tests/optvis/test_objectives.py index 1408dd15..649e58cc 100644 --- a/tests/optvis/test_objectives.py +++ b/tests/optvis/test_objectives.py @@ -5,6 +5,8 @@ import tensorflow as tf import numpy as np from lucid.optvis import objectives, param, render, transform +from lucid.optvis.objectives import wrap_objective + np.random.seed(42) @@ -41,6 +43,40 @@ def test_neuron(inceptionv1): objective = objectives.neuron("mixed4a_pre_relu", 42) assert_gradient_ascent(objective, inceptionv1) +def test_composition(): + @wrap_objective + def f(a): + return lambda T: a + + a = f(1) + b = f(2) + c = f(3) + ab = a - 2*b + cab = c*(ab - 1) + + assert str(cab) == "F(3)·((F(1) + F(2)·2·-1) + -1)" + assert cab(None) == 3*(1 - 2*2 - 1) + assert a.value == 1 + assert b.value == 2 + assert c.value == 3 + assert ab.value == (a.value - 2*b.value) + assert cab.value == c.value*(ab.value - 1) + + +@pytest.mark.parametrize("cossim_pow", [0, 1, 2]) +def test_cossim(cossim_pow): + true_values = [1.0, 2**(0.5)/2, 0.5] + x = np.array([1,1], dtype = np.float32) + y = np.array([1,0], dtype = np.float32) + T = lambda _: tf.constant(x[None, None, None, :]) + objective = objectives.direction("dummy", y, cossim_pow=cossim_pow) + objective_t = objective(T) + with tf.Session() as sess: + trueval = np.dot(x,y)*(np.dot(x,y)/(np.linalg.norm(x)*np.linalg.norm(y)))**cossim_pow + print(cossim_pow, trueval) + objective = sess.run(objective_t) + assert abs(objective - true_values[cossim_pow]) < 1e-3 + def test_channel(inceptionv1): objective = objectives.channel("mixed4a_pre_relu", 42)