Tag
#VAE
#Deeplearning
#딥러닝
#reparameterization trick
#gaussian mixture
#variational inference
#exponential family
#probabilistic graphical model
#GNN
#Generative Model
#논문읽기
#CS231n
#deep learning
#GCN
#mutual information
#gan
#gaussian
#Bernoulli
#Attention
#논문
#rl as inference
#amortized inference
#metropolis-hastings
#rejection sampling
#markov chain monte carlo
#marginal gaussian
#conditional gaussian
#multivariate gaussian
#probability distribution
#reverse kl
#forward kl
#latent variable model
#em algorithm
#graphical model
#restricted boltzmann machine
#bayesian model
#energy-based model
#conditional random fields
#markov random fields
#Pearson Correlation
#MCMC
#generalized linear model
#SpectralGCN
#GNN models
#ChebNet
#GraphSAGE
#Spectral Graph Theory
#Graph Fourier Transform
#Graph Laplacian
#MASN
#ACL2021
#multi-modal
#VideoQA
#Importance sampling
#Language model
#lower bound
#Cross-Entropy
#Maximum Likelihood Estimation
#Bayesian Networks
#diffusion
#Policy Gradient
#논문 리뷰
#Attention is all you need
#메모리 네트워크
#Memory Network
#soft attention
#hard attention
#Wasserstein
#Wasserstein GAN
#WGAN
#GAN loss
#AutoEncoder
#Logistic Regression
#multinomial
#논문 읽기
#Gibbs Sampling
#pgm
#GAt
#MLE
#Monte Carlo