U Od @sJddlZddlZGdddZGdddeZGdddeZdd ZdS) Nc@seZdZddZddZdS)AbstractDistributioncCs tdSNNotImplementedErrorselfr}/group/30042/chongmou/ft_local/Diffusion/iccv23/ft_local/hug_coadapter/T2I-Adapter/ldm/modules/distributions/distributions.pysampleszAbstractDistribution.samplecCs tdSrrrrrr mode szAbstractDistribution.modeN)__name__ __module__ __qualname__r r rrrr rsrc@s$eZdZddZddZddZdS)DiracDistributioncCs ||_dSrvalue)rrrrr __init__szDiracDistribution.__init__cCs|jSrrrrrr r szDiracDistribution.samplecCs|jSrrrrrr r szDiracDistribution.modeN)r r rrr r rrrr r src@sBeZdZdddZddZdddZd d d gfd d ZddZdS)DiagonalGaussianDistributionFcCs||_tj|ddd\|_|_t|jdd|_||_td|j|_t|j|_ |jr|t |jj |jj d|_ |_dS)Ndimg>g4@?device) parameterstorchchunkmeanlogvarclamp deterministicexpstdvar zeros_liketor)rrr!rrr rsz%DiagonalGaussianDistribution.__init__cCs*|j|jt|jjj|jjd}|S)Nr)rr#rrandnshaper&rr)rxrrr r #s&z#DiagonalGaussianDistribution.sampleNcCs|jrtdgS|dkrLdtjt|jd|jd|jdddgdSdtjt|j|jd|j|j|jd|j|jdddgdSdS)Nrrg?rr)r!rTensorsumpowrr$r)rotherrrr kl's0  zDiagonalGaussianDistribution.klrrr+cCsR|jrtdgStdtj}dtj||jt||j d|j |dS)Nr*g@rrr) r!rr,nplogpir-rr.rr$)rr dimsZlogtwopirrr nll5s  z DiagonalGaussianDistribution.nllcCs|jSr)rrrrr r =sz!DiagonalGaussianDistribution.mode)F)N)r r rrr r0r5r rrrr rs   rcsd||||fD]}t|tjr|q*qdk s:tdfdd||fD\}}dd||t||||dt| S)a* source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 Compute the KL divergence between two gaussians. Shapes are automatically broadcasted, so batches can be compared to scalars, among other use cases. Nz&at least one argument must be a Tensorcs,g|]$}t|tjr|nt|qSr) isinstancerr,tensorr&).0r)r7rr Qsznormal_kl..rgr)r6rr,AssertionErrorr")mean1logvar1mean2logvar2objrr9r normal_klAs(    rA)rnumpyr1rrobjectrrArrrr s  )