-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathloss.py
33 lines (27 loc) · 1 KB
/
loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
# -*- coding: utf-8 -*-
"""Loss.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1GdZf6XcAK7K3OyqE2AYFZAflWsDoExIx
"""
import tensorflow as tf
import numpy as np
from tensorflow.keras.losses import Loss
class Discriminator_loss(Loss):
def __init__(self,lamb,batch_size=4,epsilon = 0.1):
super(Discriminator_loss,self).__init__()
self.lamb = lamb
self.batch_size = batch_size
self.epsilon = epsilon
def call(self,y_true,y_fake,x_true,x_fake,discriminator):
loss = tf.reduce_mean(y_fake - y_true)
with tf.GradientTape() as tape:
x_cap = self.epsilon*(x_true) + (1 -self.epsilon)*(x_fake)
tape.watch(x_cap)
discriminator_output = discrimininator(x_cap)
grad = tape.gradient(discriminator_output,x_cap)
grad = tf.math.pow(grad,2)
grad_norm = tf.math.sqrt(tf.reduce_mean(grad,[1,2,3]))
grad_penalty = self.lamb*(grad_norm)
loss += tf.reduce_mean(grad_penalty)
return loss