1 """This a helper module for multivariance.py"""
2
3
4 import multivariance_classes as M
5 import Num
6 import random
7 import gpkmisc
8
9 HUGE = 1e30
10
12 __doc__ = """This describes a quadratic model of a known size."""
13
14 - def __init__(self, dataset=None, ndim=None):
19
20
22 m = self.ndim()
23 return m + (m*(m+1))/2
24
25
26
28 m = self.ndim()
29 assert len(prms) == self.modeldim()
30 mu = prms[:m]
31 invsigma = Num.zeros((m, m), Num.Float)
32 j = m
33 for i in range(m):
34 tmp = prms[j:j+(m-i)]
35 invsigma[i,i:] = tmp
36 invsigma[i:,i] = tmp
37 j += m-i
38 return self.new(mu, invsigma)
39
40 - def new(self, mu, invsigma, bias=0.0):
42
44 """Returns (starting position vector, covariance matrix)"""
45 n = self.ndim()
46 if len(data) > 1:
47 iv = M.diag_inv_variance(data)
48 else:
49 iv = Num.identity(n)
50
51 ivd = Num.diagonal(iv)
52 isd = Num.sqrt(ivd)
53 size = []
54 tmp = []
55 for i in range(n):
56 for j in range(i,n):
57 size.append(isd[i]*isd[j])
58 tmp.append(iv[i,i:])
59 packedivar = Num.concatenate( tmp )
60
61 packedvarivar = Num.array( size, copy=True )
62
63
64 packedpos = Num.concatenate( ( random.choice(data), packedivar ) )
65 packedsize = Num.concatenate( (1.0/Num.diagonal(iv), packedvarivar) )
66
67 return ( packedpos, gpkmisc.make_diag(packedsize) )
68
69
70
71
73 - def __init__(self, mu, invsigma, details, bias=0.0, offset=None):
74 """self.mu, self.invsigma, and self._offset should be considered
75 read-only for all users of this class."""
76 assert isinstance(details, quadratic)
77 M.model_with_numbers.__init__(self, details, bias)
78 n = self.ndim()
79 self.mu = Num.array(mu, copy=True)
80 assert self.mu.shape == (n,)
81 self.invsigma = Num.array(invsigma, copy=True)
82 assert self.invsigma.shape == (n,n)
83 self._offset = offset
84
86 return '<quadratic: mu=%s; invsigma=%s >' % (str(self.mu), str(self.invsigma))
87
88 __repr__ = __str__
89
90 addoff = M._q_addoff
91
92
93
94 - def logp(self, datum):
95 delta = datum - self.mu
96 parab = Num.dot(delta, Num.matrixmultiply(self.invsigma,
97 delta))
98 if not parab >= 0.0:
99 raise M.QuadraticNotNormalizable, "Not positive-definite"
100 return -parab/2.0 + self.offset() + self.bias
101
102
103
104
105
107 - def __init__(self, dataset=None, ndim=None):
109
110
112 m = self.ndim()
113 return 2*m
114
115
117 m = self.ndim()
118 assert len(prms) == self.modeldim()
119 mu = prms[:m]
120 invsigma = prms[m:]
121 return self.new(mu, invsigma)
122
123 - def new(self, mu, invsigma, bias=0.0):
125
127 if len(data) > 1:
128 iv = M.vec_inv_variance(data)
129 else:
130 iv = Num.ones(self.ndim(), Num.Float)
131
132 packeddata = Num.concatenate((random.choice(data), iv))
133 packedsize = Num.concatenate((1.0/iv, iv**2))
134
135 return ( packeddata,
136 gpkmisc.make_diag(packedsize)
137 )
138
139
140
142 - def __init__(self, mu, invsigma, details, bias=0.0, offset=None):
151
153 return '<diag-quadratic: mu=%s; invsigma=%s >' % (str(self.mu), str(self.invsigma))
154
155 __repr__ = __str__
156
157 addoff = M._d_addoff
158
159 - def logp(self, datum):
160 delta = datum - self.mu
161 parab = Num.sum(self.invsigma * delta**2)
162 if not parab >= 0.0:
163 raise M.QuadraticNotNormalizable, "Not positive-definite"
164 return -parab/2.0 + self.offset() + self.bias
165