1
2
3
4
5
6
7
8
9 """Unit tests for PyMVPA serial feature inclusion algorithm"""
10
11 from mvpa.misc.support import *
12 from mvpa.datasets.splitters import NFoldSplitter
13 from mvpa.clfs.transerror import TransferError
14 from tests_warehouse import *
15 from tests_warehouse import getMVPattern
16 from tests_warehouse_clfs import *
17 from mvpa.clfs.distance import oneMinusCorrelation
18
19 from mvpa.support.copy import deepcopy
22
57
58
59
61 self.failUnlessRaises(ValueError, Event)
62 ev = Event(onset=2.5)
63
64
65 self.failUnless(ev.items() == [('onset', 2.5)])
66
67
68 self.failUnless(ev.asDescreteTime(dt=2).items() == [('onset', 1)])
69 evc = ev.asDescreteTime(dt=2, storeoffset=True)
70 self.failUnless(evc.has_key('features'))
71 self.failUnless(evc['features'] == [0.5])
72
73
74 evc = Event(onset=2.5, duration=3.55).asDescreteTime(dt=2)
75 self.failUnless(evc['duration'] == 3)
76
77
79 self.failUnlessEqual(
80 getUniqueLengthNCombinations( range(3), 1 ), [[0],[1],[2]] )
81 self.failUnlessEqual(
82 getUniqueLengthNCombinations(
83 range(4), 2 ),
84 [[0, 1], [0, 2], [0, 3], [1, 2], [1, 3], [2, 3]]
85 )
86 self.failUnlessEqual(
87 getUniqueLengthNCombinations(
88 range(4), 3 ),
89 [[0, 1, 2], [0, 1, 3], [0, 2, 3], [1, 2, 3]] )
90
91
93 items_cont = [0, 0, 0, 1, 1, 1, 3, 3, 2]
94 items_noncont = [0, 0, 1, 1, 0, 3, 2]
95 self.failUnlessRaises(ValueError, getBreakPoints, items_noncont)
96 self.failUnlessEqual(getBreakPoints(items_noncont, contiguous=False),
97 [0, 2, 4, 5, 6])
98 self.failUnlessEqual(getBreakPoints(items_cont), [0, 3, 6, 8])
99 self.failUnlessEqual(getBreakPoints(items_cont, contiguous=False),
100 [0, 3, 6, 8])
101
102
104 mo = MapOverlap()
105
106 maps = [[1,0,1,0],
107 [1,0,0,1],
108 [1,0,1,0]]
109
110 overlap = mo(maps)
111
112 self.failUnlessEqual(overlap, 1./len(maps[0]))
113 self.failUnless((mo.overlap_map == [1,0,0,0]).all())
114 self.failUnless((mo.spread_map == [0,0,1,1]).all())
115 self.failUnless((mo.ovstats_map == [1,0,2./3,1./3]).all())
116
117 mo = MapOverlap(overlap_threshold=0.5)
118 overlap = mo(maps)
119 self.failUnlessEqual(overlap, 2./len(maps[0]))
120 self.failUnless((mo.overlap_map == [1,0,1,0]).all())
121 self.failUnless((mo.spread_map == [0,0,0,1]).all())
122 self.failUnless((mo.ovstats_map == [1,0,2./3,1./3]).all())
123
124
126
127 self.failUnlessEqual(
128 [(-1)*i for i in range(5)],
129 Harvester(xrange,
130 [HarvesterCall(lambda x: (-1)*x, expand_args=False)])
131 (5))
132
133
134
135 cv = Harvester(NFoldSplitter(cvtype=1),
136 [HarvesterCall(TransferError(sample_clf_nl), argfilter=[1,0])])
137 data = getMVPattern(10)
138 err = N.array(cv(data))
139
140
141 self.failUnless((err < 0.1).all())
142 self.failUnlessEqual(err.shape, (len(data.uniquechunks),))
143
144
145 cv = Harvester(NFoldSplitter(cvtype=1),
146 [HarvesterCall(TransferError(sample_clf_nl), argfilter=[1,0]),
147 HarvesterCall(TransferError(sample_clf_nl), argfilter=[1,0])])
148 err = N.array(cv(data))
149 self.failUnlessEqual(err.shape, (2,len(data.uniquechunks)))
150
151
152 cv = Harvester(NFoldSplitter(cvtype=1),
153 [HarvesterCall(TransferError(sample_clf_nl,
154 enable_states=['confusion']),
155 argfilter=[1,0], attribs=['confusion'])])
156 res = cv(data)
157
158 self.failUnless(isinstance(res, dict))
159 self.failUnless(res.has_key('confusion') and res.has_key('result'))
160 self.failUnless(len(res['result']) == len(data.uniquechunks))
161
162
163 @sweepargs(pair=[(N.random.normal(size=(10,20)), N.random.normal(size=(10,20))),
164 ([1,2,3,0], [1,3,2,0]),
165 ((1,2,3,1), (1,3,2,1))])
167 a, b = pair
168 a1 = deepcopy(a)
169 a_1 = idhash(a)
170 self.failUnless(a_1 == idhash(a), msg="Must be of the same idhash")
171 self.failUnless(a_1 != idhash(b), msg="Must be of different idhash")
172 if isinstance(a, N.ndarray):
173 self.failUnless(a_1 != idhash(a.T), msg=".T must be of different idhash")
174 if not isinstance(a, tuple):
175 self.failUnless(a_1 != idhash(a1), msg="Must be of different idhash")
176 a[2] += 1; a_2 = idhash(a)
177 self.failUnless(a_1 != a_2, msg="Idhash must change")
178 else:
179 a_2 = a_1
180 a = a[2:]; a_3 = idhash(a)
181 self.failUnless(a_2 != a_3, msg="Idhash must change after slicing")
182
183
185
186 X = N.random.rand(20,80)
187
188 C = 1 - oneMinusCorrelation(X, X)
189
190
191 self.failUnless(C.shape == (20, 20))
192
193 self.failUnless((N.abs(N.diag(C) - 1).mean() < 0.00001).all())
194
195
196 Y = N.random.rand(5,80)
197 C2 = 1 - oneMinusCorrelation(X, Y)
198
199 self.failUnless(C2.shape == (20, 5))
200
201 self.failUnless(C2[10,2] - N.corrcoef(X[10], Y[2])[0,1] < 0.000001)
202
204 """Test conversion of versions from strings
205 """
206
207 self.failUnless(version_to_tuple('0.0.01') == (0, 0, 1))
208 self.failUnless(version_to_tuple('0.7.1rc3') == (0, 7, 1, 'rc', 3))
209
210
212 """Test our ad-hoc SmartVersion
213 """
214 SV = SmartVersion
215
216 for v1, v2 in (
217 ('0.0.1', '0.0.2'),
218 ('0.0.1', '0.1'),
219 ('0.0.1', '0.1.0'),
220 ('0.0.1', '0.0.1a'),
221 ('0.0.1', '0.0.1+svn234'),
222 ('0.0.1+svn234', '0.0.1+svn235'),
223 ('0.0.1dev1', '0.0.1'),
224 ('0.0.1dev1', '0.0.1rc3'),
225 ('0.7.1rc3', '0.7.1'),
226 ('0.0.1-dev1', '0.0.1'),
227 ('0.0.1-svn1', '0.0.1'),
228 ('0.0.1~p', '0.0.1'),
229 ('0.0.1~prior.1.2', '0.0.1'),
230 ):
231 self.failUnless(SV(v1) < SV(v2),
232 msg="Failed to compare %s to %s" % (v1, v2))
233 self.failUnless(SV(v2) > SV(v1),
234 msg="Failed to reverse compare %s to %s" % (v2, v1))
235
236 self.failUnless(SV(v1) < v2,
237 msg="Failed to compare %s to string %s" % (v1, v2))
238 self.failUnless(v1 < SV(v2),
239 msg="Failed to compare string %s to %s" % (v1, v2))
240
241 self.failUnless(SV(v1) < version_to_tuple(v2),
242 msg="Failed to compare %s to tuple of %s"
243 % (v1, v2))
244 self.failUnless(version_to_tuple(v1) < SV(v2),
245 msg="Failed to compare tuple of %s to %s"
246 % (v1, v2))
247
251
252
253 if __name__ == '__main__':
254 import runner
255