Package BIP :: Package Bayes :: Module like
[hide private]
[frames] | no frames]

Source Code for Module BIP.Bayes.like

  1  # -*- coding:utf-8 -*- 
  2  #----------------------------------------------------------------------------- 
  3  # Name:        like.py 
  4  # Project:  BayesianInference 
  5  # Purpose:     log-likelihood functions 
  6  # 
  7  # Author:      Flávio Codeço Coelho <fccoelho@gmail.com> 
  8  # 
  9  # Created:     2008-11-26 
 10  # Copyright:   (c) 2008 by the Author 
 11  # Licence:     GPL v3 
 12  #----------------------------------------------------------------------------- 
 13  __docformat__ = "restructuredtext en" 
 14  import random 
 15  import scipy 
 16  from scipy.special import gammaln 
 17  from numpy import array, searchsorted,log, random,pi,sum, inf 
 18   
 19   
20 -def Categor(x,hist):
21 """ 22 Categorical Log-likelihood 23 generalization of a Bernoulli process for variables with any constant 24 number of discrete values. 25 26 :Parameters: 27 - `x`: data vector (list) 28 - `hist`: tuple (prob,classes) classes contain the superior limit of the histogram classes 29 30 >>> Categor([1],([.3,.7],[0,1])) 31 -0.356674943939 32 """ 33 like =0.0 34 x = array(x) 35 prob = array(hist[0]) 36 sup = array(hist[1]) 37 ind = searchsorted(sup,x) 38 like += sum(log(prob[ind])) 39 return like
40
41 -def Uniform(x,min,max):
42 """ 43 Uniform Log-likelihood 44 45 :Parameters: 46 - `x`: data vector(list) 47 - `min`: lower limit of the distribution 48 - `max`: upper limit of the distribution 49 50 >>> Uniform([1.1,2.3,3.4,4],0,5) 51 -6.4377516497364011 52 """ 53 assert max>min 54 like = 0.0 55 p = 1./max-min 56 for i in x: 57 if i>min and i<=max: 58 like += log(p) 59 else: 60 like = -inf 61 return like 62 return like
63
64 -def Normal(x,mu,tau):
65 """ 66 Normal Log-like 67 68 :Parameters: 69 - `mu`: mean 70 - `tau`: precision (1/variance) 71 72 >>> Normal([0],0,1) 73 -0.918938533205 74 """ 75 x = array(x) 76 n = x.size 77 like = sum(-0.5 * tau * (x-mu)**2) 78 like += n*0.5*log(0.5*tau/pi) 79 return like
80
81 -def find_best_tau(x,mu):
82 """ 83 returns the value of tau which maximizes normal loglik for a fixed (x,mu) 84 """ 85 if mu == 0: 86 tau = 1./(mu+1) 87 else: 88 tau=1./mu #starting point 89 ll = Normal(x,mu,tau) 90 i=0;j=0 91 while i < 1000 and j<100000: 92 taun = tau + random.normal() 93 l = Normal(x,mu,taun) 94 if l>ll: 95 tau = taun 96 ll = l 97 i+=1 98 j+=1 99 return tau
100
101 -def Lognormal(x,mu,tau):
102 """ 103 Lognormal Log-likelihood 104 105 :Parameters: 106 - `mu`: mean 107 - `tau`: precision (1/sd) 108 109 >>> Lognormal([0.5,1,1.2],0,0.5) 110 -3.15728720569 111 """ 112 x = array(x) 113 n = x.size 114 like = n * 0.5 * (log(tau) - log(2.0*pi)) + sum(0.5*tau*(log(x)-mu)**2 - log(x)) 115 return -like
116
117 -def Poisson(x,mu):
118 """ 119 Poisson Log-Likelihood function 120 >>> Poisson([2],2) 121 -1.30685281944 122 """ 123 x=array(x) 124 sumx = sum(x*log(mu)-mu) 125 sumfact = sum(log(scipy.factorial(x))) 126 like = sumx-sumfact 127 return like
128
129 -def Negbin(x,r,p):
130 """ 131 Negative Binomial Log-Likelihood 132 >>> Negbin([2,3],6,0.3) 133 -9.16117424315 134 """ 135 x = array(x) 136 like = sum(r*log(p)+x*log(1-p)+log(scipy.factorial(x+r-1))-log(scipy.factorial(x))-log(scipy.factorial(r-1))) 137 return like
138
139 -def Binomial(x,n,p):
140 """ 141 Binomial Log-Likelihood 142 >>> Binomial([2,3],6,0.3) 143 -2.81280615454 144 """ 145 x = array(x) 146 like = sum(x*log(p)+ (n-x)*log(1.-p)+log(scipy.factorial(n))-log(scipy.factorial(x))-log(scipy.factorial(n-x))) 147 return like
148
149 -def Weibull(x,alpha,beta):
150 """ 151 Log-Like Weibull 152 >>> Weibull([2,1,0.3,.5,1.7],1.5,3) 153 -7.811955373 154 """ 155 x=array(x) 156 beta = float(beta) 157 n = x.size 158 #Normalizing constant 159 like = n * (log(alpha) - alpha*log(beta)) 160 # Kernel of the distribution 161 like += sum((alpha-1) * log(x) - (x/beta)**alpha) 162 return like
163
164 -def Bernoulli(x,p):
165 """ 166 Log-Like Bernoulli 167 >>> Bernoulli([0,1,1,1,0,0,1,1],0.5) 168 -5.54517744448 169 """ 170 x=array(x) 171 like = sum(x*log(p) + (1-x)*log(1.-p)) 172 return like
173
174 -def Gamma(x,alpha,beta):
175 """ 176 Log-Like Gamma 177 >>> Gamma([2,3,7,6,4],2,2) 178 -11.015748357 179 """ 180 x = array(x) 181 beta = float(beta) 182 n = x.size 183 #Normalizing constant 184 like = -n * (gammaln(alpha) + alpha*log(beta)) 185 # Kernel of the distribution 186 like += sum((alpha - 1.0)*log(x) - x/beta) 187 return like
188
189 -def Beta(x,a,b):
190 """ 191 Log-Like Beta 192 >>> Beta([.2,.3,.7,.6,.4],2,5) 193 -0.434845728904 194 """ 195 x = array(x) 196 n = x.size 197 #Normalizing constant 198 like = n * (gammaln(a+b) - gammaln(a) - gammaln(b)) 199 # Kernel of the distribution 200 like += sum((a-1.0)*log(x) + (b-1.0)*log(1.0-x)) 201 return like
202
203 -def Simple(x,w,a,start=0):
204 """ 205 find out what it is. 206 """ 207 m=len(a) 208 n=len(x) 209 like = 0.0 210 s = sum(a*(x/w)**(2*range(n))) 211 like += log(1+s) 212 return like
213 214 if __name__=="__main__": 215 import doctest 216 doctest.testmod(verbose=True) 217