Select Git revision
loss.py 5.23 KiB
import tensorflow as tf
def crossdomainidloss(feats0, feats1, labels, labels_, var_list0, var_list1):
"""Add L2Loss to all the trainable variables.
Add summary for "Loss" and "Loss/avg".
Args:
logits: Logits from residualSpectralTransform().
labels: Labels from distorted_inputs or inputs(). 1-D tensor
of shape [batch_size]
Returns:
Loss tensor of type float.
"""
print("Cross domain identity loss")
feats0 = tf.keras.layers.Flatten()(feats0)
feats1 = tf.keras.layers.Flatten()(feats1)
feats0_flat = feats0
feats1_flat = feats1
NC = 96
wd = None
#feats0 = tf.keras.layers.Dropout(0.3)(feats0) # dropout
labels = tf.cast(labels, tf.int32)
dim0 = feats0.get_shape()[1].value
with tf.variable_scope('softmax_linear') as scope:
dense = tf.keras.layers.Dense(NC, activation=None, name='softmax_linear')
if wd is not None:
weight_decay = tf.multiply(tf.nn.l2_loss(weights0), wd, name='weight_loss')
logits00 = dense(feats0) # visible (0) -> visible (0)
""" Use same classifier on thermal features """
logits10 = dense(feats1) # thermal (1) -> visible (0)
var_list0 = var_list0 + dense.trainable_variables # only update weights here
var_list1 = var_list1
cross_entropy00 = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits00, name='cross_entropy_per_example00')
cross_entropy_mean00 = tf.reduce_mean(cross_entropy00, name='cross_entropy00')
cross_entropy_mean00 = tf.multiply(7.5e-1, cross_entropy_mean00, name='weighted_cross_entropy00')
cross_entropy10 = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits10, name='cross_entropy_per_example10')
cross_entropy_mean10 = tf.reduce_mean(cross_entropy10, name='cross_entropy10')
cross_entropy_mean10 = tf.multiply(7.5e-1, cross_entropy_mean10, name='weighted_cross_entropy10')
if wd is not None:
losses = [cross_entropy_mean00 + tf.reduce_mean(weight_decay), cross_entropy_mean10]
else:
losses = [cross_entropy_mean00, cross_entropy_mean10]
logits = [logits00, logits10]
var_lists = [var_list0, var_list1]
return losses, logits, var_lists, feats0_flat, feats1_flat
def domaininvarianceloss(feats0, feats1, labels, labels_, var_list0, var_list1):
print("Domain invariance loss")
feats0 = tf.keras.layers.Flatten()(feats0)
feats1 = tf.keras.layers.Flatten()(feats1)
feats0_flat = feats0
feats1_flat = feats1
NC = 96
wd = None
labels = tf.cast(labels, tf.int32)
dim0 = feats0.get_shape()[1].value
with tf.variable_scope('softmax_linear') as scope:
dense = tf.keras.layers.Dense(NC, activation=None, name='softmax_linear')
if wd is not None:
weight_decay = tf.multiply(tf.nn.l2_loss(weights0), wd, name='weight_loss')
logits00 = dense(feats0) # visible (0) -> visible (0)
""" Use same classifier on thermal features """
logits10 = dense(feats1) # thermal (1) -> visible (0)
with tf.variable_scope('softmax_linear1') as scope:
dense_ = tf.keras.layers.Dense(2, activation=None, name='softmax_linear1')
logits00_ = dense_(feats0) # visible (0) -> visible (0)
""" Use same classifier on thermal features """
logits10_ = dense_(feats1) # thermal (1) -> visible (0)
var_list0 = var_list0 + dense.trainable_variables + dense_.trainable_variables # only update weights here
var_list1 = var_list1 + dense_.trainable_variables
cross_entropy00 = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits00, name='cross_entropy_per_example00')
cross_entropy_mean00 = tf.reduce_mean(cross_entropy00, name='cross_entropy00')
cross_entropy_mean00 = tf.multiply(7.5e-1, cross_entropy_mean00, name='weighted_cross_entropy00')
cross_entropy10 = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits10, name='cross_entropy_per_example10')
cross_entropy_mean10 = tf.reduce_mean(cross_entropy10, name='cross_entropy10')
cross_entropy_mean10 = tf.multiply(7.5e-1, cross_entropy_mean10, name='weighted_cross_entropy10')
cross_entropy00_ = tf.nn.softmax_cross_entropy_with_logits(
labels=labels_, logits=logits00_, name='cross_entropy_per_example00_')
cross_entropy_mean00_ = .25 * tf.reduce_mean(cross_entropy00_, name='cross_entropy00_')
cross_entropy10_ = tf.nn.softmax_cross_entropy_with_logits(
labels=labels_, logits=logits10_, name='cross_entropy_per_example10_')
cross_entropy_mean10_ = .25 * tf.reduce_mean(cross_entropy10_, name='cross_entropy10_')
if wd is not None:
losses = [cross_entropy_mean00 + cross_entropy_mean00_ + tf.reduce_mean(weight_decay), cross_entropy_mean10 + cross_entropy_mean10_ ]
else:
losses = [cross_entropy_mean00 + cross_entropy_mean00_, cross_entropy_mean10 + cross_entropy_mean10_]
logits = [logits00, logits10]
var_lists = [var_list0, var_list1]
return losses, logits, var_lists, feats0_flat, feats1_flat