Skip to content
This repository has been archived by the owner on Apr 10, 2024. It is now read-only.

WIP: bugfixes and enhancements to objectives #137

Draft
wants to merge 5 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 25 additions & 21 deletions lucid/optvis/objectives.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-

# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand Down Expand Up @@ -61,41 +63,30 @@ class Objective(object):

def __init__(self, objective_func, name="", description=""):
self.objective_func = objective_func
self.name = name
ludwigschubert marked this conversation as resolved.
Show resolved Hide resolved
self.description = description
self.value = None # This value is populated after a call
gabgoh marked this conversation as resolved.
Show resolved Hide resolved

def __add__(self, other):
if isinstance(other, (int, float)):
objective_func = lambda T: other + self(T)
name = self.name
description = self.description
else:
objective_func = lambda T: self(T) + other(T)
name = ", ".join([self.name, other.name])
description = "Sum(" + " +\n".join([self.description, other.description]) + ")"
return Objective(objective_func, name=name, description=description)
description = "(" + " + ".join([str(self), str(other)]) + ")"
return Objective(objective_func, description=description)

def __neg__(self):
return -1 * self

def __sub__(self, other):
return self + (-1 * other)

@staticmethod
ludwigschubert marked this conversation as resolved.
Show resolved Hide resolved
def sum(objs):
objective_func = lambda T: sum([obj(T) for obj in objs])
descriptions = [obj.description for obj in objs]
description = "Sum(" + " +\n".join(descriptions) + ")"
names = [obj.name for obj in objs]
name = ", ".join(names)
return Objective(objective_func, name=name, description=description)

def __mul__(self, other):
if isinstance(other, (int, float)):
objective_func = lambda T: other * self(T)
else:
objective_func = lambda T: self(T) * other(T)
return Objective(objective_func, name=self.name, description=self.description)
description = str(self) + "·" + str(other)
gabgoh marked this conversation as resolved.
Show resolved Hide resolved
return Objective(objective_func, description=description)

def __rmul__(self, other):
return self.__mul__(other)
Expand All @@ -104,7 +95,14 @@ def __radd__(self, other):
return self.__add__(other)

def __call__(self, T):
return self.objective_func(T)
self.value = self.objective_func(T)
return self.value

def __str__(self):
return self.description

def __repr__(self):
return self.description


def _make_arg_str(arg):
Expand All @@ -124,7 +122,7 @@ def wrap_objective(f, *args, **kwds):
"""
objective_func = f(*args, **kwds)
objective_name = f.__name__
args_str = " [" + ", ".join([_make_arg_str(arg) for arg in args]) + "]"
args_str = "(" + ", ".join([_make_arg_str(arg) for arg in args]) + ")"
description = objective_name.title() + args_str
return Objective(objective_func, objective_name, description)

Expand Down Expand Up @@ -180,7 +178,13 @@ def _dot_cossim(x, y, cossim_pow=0):
if cossim_pow == 0: return tf.reduce_mean(xy_dot)
x_mags = tf.sqrt(_dot(x,x))
y_mags = tf.sqrt(_dot(y,y))
cossims = xy_dot / (eps + x_mags ) / (eps + y_mags)
a = xy_dot
b = (eps + x_mags )
c = (eps + y_mags)
d = a / b
e = d / c
print(a,b,c,d,e)
cossims = e
floored_cossims = tf.maximum(0.1, cossims)
return tf.reduce_mean(xy_dot * floored_cossims**cossim_pow)

Expand All @@ -190,10 +194,10 @@ def direction(layer, vec, batch=None, cossim_pow=0):
"""Visualize a direction"""
if batch is None:
vec = vec[None, None, None]
return lambda T: _dot_cossim(T(layer), vec)
return lambda T: _dot_cossim(T(layer), vec, cossim_pow = cossim_pow)
gabgoh marked this conversation as resolved.
Show resolved Hide resolved
else:
vec = vec[None, None]
return lambda T: _dot_cossim(T(layer)[batch], vec)
return lambda T: _dot_cossim(T(layer)[batch], vec, cossim_pow = cossim_pow)


@wrap_objective
Expand Down
36 changes: 36 additions & 0 deletions tests/optvis/test_objectives.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import tensorflow as tf
import numpy as np
from lucid.optvis import objectives, param, render, transform
from lucid.optvis.objectives import wrap_objective


np.random.seed(42)

Expand Down Expand Up @@ -41,6 +43,40 @@ def test_neuron(inceptionv1):
objective = objectives.neuron("mixed4a_pre_relu", 42)
assert_gradient_ascent(objective, inceptionv1)

def test_composition():
@wrap_objective
def f(a):
return lambda T: a

a = f(1)
b = f(2)
c = f(3)
ab = a - 2*b
cab = c*(ab - 1)

assert str(cab) == "F(3)·((F(1) + F(2)·2·-1) + -1)"
assert cab(None) == 3*(1 - 2*2 - 1)
assert a.value == 1
assert b.value == 2
assert c.value == 3
assert ab.value == (a.value - 2*b.value)
assert cab.value == c.value*(ab.value - 1)


@pytest.mark.parametrize("cossim_pow", [0, 1, 2])
def test_cossim(cossim_pow):
true_values = [1.0, 2**(0.5)/2, 0.5]
x = np.array([1,1], dtype = np.float32)
y = np.array([1,0], dtype = np.float32)
T = lambda _: tf.constant(x[None, None, None, :])
objective = objectives.direction("dummy", y, cossim_pow=cossim_pow)
objective_t = objective(T)
with tf.Session() as sess:
trueval = np.dot(x,y)*(np.dot(x,y)/(np.linalg.norm(x)*np.linalg.norm(y)))**cossim_pow
print(cossim_pow, trueval)
objective = sess.run(objective_t)
assert abs(objective - true_values[cossim_pow]) < 1e-3


def test_channel(inceptionv1):
objective = objectives.channel("mixed4a_pre_relu", 42)
Expand Down