Skip to content
代码片段 群组 项目
未验证 提交 796e56e7 编辑于 作者: dlagul's avatar dlagul 提交者: GitHub
浏览文件

Update tester.py

上级 20305675
无相关合并请求
......@@ -82,6 +82,9 @@ class Tester(object):
def loss_fn(self, original_seq, recon_seq_mu, recon_seq_logvar, s_mean, s_logvar, d_post_mean, d_post_logvar, d_prior_mean, d_prior_logvar):
batch_size = original_seq.size(0)
# See https://arxiv.org/pdf/1606.05908.pdf, Page 9, Section 2.2 for details.
# N(x|mu,var)
# = log{1/(sqrt(2*pi)*var)exp{-(x-mu)^2/(2*var^2)}}
# = -0.5*{log(2*pi)+2*log(var)+[(x-mu)/exp{log(var)}]^2}
loglikelihood = -0.5 * torch.sum(torch.pow(((original_seq.float()-recon_seq_mu.float())/torch.exp(recon_seq_logvar.float())), 2)
+ 2 * recon_seq_logvar.float()
+ np.log(np.pi*2))
......@@ -98,6 +101,9 @@ class Tester(object):
def loglikelihood_last_timestamp(self, x, recon_x_mu, recon_x_logvar):
# See https://arxiv.org/pdf/1606.05908.pdf, Page 9, Section 2.2 for details.
# N(x|mu,var)
# = log{1/(sqrt(2*pi)*var)exp{-(x-mu)^2/(2*var^2)}}
# = -0.5*{log(2*pi)+2*log(var)+[(x-mu)/exp{log(var)}]^2}
llh = -0.5 * torch.sum(torch.pow(((x.float()-recon_x_mu.float())/torch.exp(recon_x_logvar.float())), 2)
+ 2 * recon_x_logvar.float()
+ np.log(np.pi*2))
......
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册