1
|
|
|
# |
2
|
|
|
# Copyright 2015 Quantopian, Inc. |
3
|
|
|
# |
4
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
5
|
|
|
# you may not use this file except in compliance with the License. |
6
|
|
|
# You may obtain a copy of the License at |
7
|
|
|
# |
8
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
9
|
|
|
# |
10
|
|
|
# Unless required by applicable law or agreed to in writing, software |
11
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
12
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13
|
|
|
# See the License for the specific language governing permissions and |
14
|
|
|
# limitations under the License. |
15
|
|
|
|
16
|
|
|
""" |
17
|
|
|
Tests for the zipline.assets package |
18
|
|
|
""" |
19
|
|
|
from contextlib import contextmanager |
20
|
|
|
from datetime import datetime, timedelta |
21
|
|
|
import pickle |
22
|
|
|
import sys |
23
|
|
|
from unittest import TestCase |
24
|
|
|
import uuid |
25
|
|
|
import warnings |
26
|
|
|
|
27
|
|
|
import pandas as pd |
28
|
|
|
from pandas.tseries.tools import normalize_date |
29
|
|
|
from pandas.util.testing import assert_frame_equal |
30
|
|
|
|
31
|
|
|
from nose_parameterized import parameterized |
32
|
|
|
from numpy import full |
33
|
|
|
import sqlalchemy as sa |
34
|
|
|
|
35
|
|
|
from zipline.assets import ( |
36
|
|
|
Asset, |
37
|
|
|
Equity, |
38
|
|
|
Future, |
39
|
|
|
AssetFinder, |
40
|
|
|
AssetFinderCachedEquities, |
41
|
|
|
) |
42
|
|
|
from six import itervalues |
43
|
|
|
from toolz import valmap |
44
|
|
|
|
45
|
|
|
from zipline.assets.futures import ( |
46
|
|
|
cme_code_to_month, |
47
|
|
|
FutureChain, |
48
|
|
|
month_to_cme_code |
49
|
|
|
) |
50
|
|
|
from zipline.assets.asset_writer import ( |
51
|
|
|
check_version_info, |
52
|
|
|
write_version_info, |
53
|
|
|
) |
54
|
|
|
from zipline.assets.asset_db_schema import ( |
55
|
|
|
ASSET_DB_VERSION, |
56
|
|
|
_version_table_schema, |
57
|
|
|
) |
58
|
|
|
from zipline.assets.asset_db_migrations import ( |
59
|
|
|
downgrade |
60
|
|
|
) |
61
|
|
|
from zipline.errors import ( |
62
|
|
|
EquitiesNotFound, |
63
|
|
|
FutureContractsNotFound, |
64
|
|
|
MultipleSymbolsFound, |
65
|
|
|
RootSymbolNotFound, |
66
|
|
|
AssetDBVersionError, |
67
|
|
|
SidAssignmentError, |
68
|
|
|
SidsNotFound, |
69
|
|
|
SymbolNotFound, |
70
|
|
|
AssetDBImpossibleDowngrade, |
71
|
|
|
) |
72
|
|
|
from zipline.finance.trading import TradingEnvironment, noop_load |
73
|
|
|
from zipline.utils.test_utils import ( |
74
|
|
|
all_subindices, |
75
|
|
|
make_commodity_future_info, |
76
|
|
|
make_rotating_equity_info, |
77
|
|
|
make_simple_equity_info, |
78
|
|
|
tmp_assets_db, |
79
|
|
|
tmp_asset_finder, |
80
|
|
|
) |
81
|
|
|
|
82
|
|
|
|
83
|
|
|
@contextmanager |
84
|
|
|
def build_lookup_generic_cases(asset_finder_type): |
85
|
|
|
""" |
86
|
|
|
Generate test cases for the type of asset finder specific by |
87
|
|
|
asset_finder_type for test_lookup_generic. |
88
|
|
|
""" |
89
|
|
|
|
90
|
|
|
unique_start = pd.Timestamp('2013-01-01', tz='UTC') |
91
|
|
|
unique_end = pd.Timestamp('2014-01-01', tz='UTC') |
92
|
|
|
|
93
|
|
|
dupe_0_start = pd.Timestamp('2013-01-01', tz='UTC') |
94
|
|
|
dupe_0_end = dupe_0_start + timedelta(days=1) |
95
|
|
|
|
96
|
|
|
dupe_1_start = pd.Timestamp('2013-01-03', tz='UTC') |
97
|
|
|
dupe_1_end = dupe_1_start + timedelta(days=1) |
98
|
|
|
|
99
|
|
|
frame = pd.DataFrame.from_records( |
100
|
|
|
[ |
101
|
|
|
{ |
102
|
|
|
'sid': 0, |
103
|
|
|
'symbol': 'duplicated', |
104
|
|
|
'start_date': dupe_0_start.value, |
105
|
|
|
'end_date': dupe_0_end.value, |
106
|
|
|
'exchange': '', |
107
|
|
|
}, |
108
|
|
|
{ |
109
|
|
|
'sid': 1, |
110
|
|
|
'symbol': 'duplicated', |
111
|
|
|
'start_date': dupe_1_start.value, |
112
|
|
|
'end_date': dupe_1_end.value, |
113
|
|
|
'exchange': '', |
114
|
|
|
}, |
115
|
|
|
{ |
116
|
|
|
'sid': 2, |
117
|
|
|
'symbol': 'unique', |
118
|
|
|
'start_date': unique_start.value, |
119
|
|
|
'end_date': unique_end.value, |
120
|
|
|
'exchange': '', |
121
|
|
|
}, |
122
|
|
|
], |
123
|
|
|
index='sid') |
124
|
|
|
with tmp_assets_db(equities=frame) as assets_db: |
125
|
|
|
finder = asset_finder_type(assets_db) |
126
|
|
|
dupe_0, dupe_1, unique = assets = [ |
127
|
|
|
finder.retrieve_asset(i) |
128
|
|
|
for i in range(3) |
129
|
|
|
] |
130
|
|
|
|
131
|
|
|
dupe_0_start = dupe_0.start_date |
132
|
|
|
dupe_1_start = dupe_1.start_date |
133
|
|
|
yield ( |
134
|
|
|
## |
135
|
|
|
# Scalars |
136
|
|
|
|
137
|
|
|
# Asset object |
138
|
|
|
(finder, assets[0], None, assets[0]), |
139
|
|
|
(finder, assets[1], None, assets[1]), |
140
|
|
|
(finder, assets[2], None, assets[2]), |
141
|
|
|
# int |
142
|
|
|
(finder, 0, None, assets[0]), |
143
|
|
|
(finder, 1, None, assets[1]), |
144
|
|
|
(finder, 2, None, assets[2]), |
145
|
|
|
# Duplicated symbol with resolution date |
146
|
|
|
(finder, 'DUPLICATED', dupe_0_start, dupe_0), |
147
|
|
|
(finder, 'DUPLICATED', dupe_1_start, dupe_1), |
148
|
|
|
# Unique symbol, with or without resolution date. |
149
|
|
|
(finder, 'UNIQUE', unique_start, unique), |
150
|
|
|
(finder, 'UNIQUE', None, unique), |
151
|
|
|
|
152
|
|
|
## |
153
|
|
|
# Iterables |
154
|
|
|
|
155
|
|
|
# Iterables of Asset objects. |
156
|
|
|
(finder, assets, None, assets), |
157
|
|
|
(finder, iter(assets), None, assets), |
158
|
|
|
# Iterables of ints |
159
|
|
|
(finder, (0, 1), None, assets[:-1]), |
160
|
|
|
(finder, iter((0, 1)), None, assets[:-1]), |
161
|
|
|
# Iterables of symbols. |
162
|
|
|
(finder, ('DUPLICATED', 'UNIQUE'), dupe_0_start, [dupe_0, unique]), |
163
|
|
|
(finder, ('DUPLICATED', 'UNIQUE'), dupe_1_start, [dupe_1, unique]), |
164
|
|
|
# Mixed types |
165
|
|
|
(finder, |
166
|
|
|
('DUPLICATED', 2, 'UNIQUE', 1, dupe_1), |
167
|
|
|
dupe_0_start, |
168
|
|
|
[dupe_0, assets[2], unique, assets[1], dupe_1]), |
169
|
|
|
) |
170
|
|
|
|
171
|
|
|
|
172
|
|
|
class AssetTestCase(TestCase): |
173
|
|
|
|
174
|
|
|
def test_asset_object(self): |
175
|
|
|
self.assertEquals({5061: 'foo'}[Asset(5061)], 'foo') |
176
|
|
|
self.assertEquals(Asset(5061), 5061) |
177
|
|
|
self.assertEquals(5061, Asset(5061)) |
178
|
|
|
|
179
|
|
|
self.assertEquals(Asset(5061), Asset(5061)) |
180
|
|
|
self.assertEquals(int(Asset(5061)), 5061) |
181
|
|
|
|
182
|
|
|
self.assertEquals(str(Asset(5061)), 'Asset(5061)') |
183
|
|
|
|
184
|
|
|
def test_asset_is_pickleable(self): |
185
|
|
|
|
186
|
|
|
# Very wow |
187
|
|
|
s = Asset( |
188
|
|
|
1337, |
189
|
|
|
symbol="DOGE", |
190
|
|
|
asset_name="DOGECOIN", |
191
|
|
|
start_date=pd.Timestamp('2013-12-08 9:31AM', tz='UTC'), |
192
|
|
|
end_date=pd.Timestamp('2014-06-25 11:21AM', tz='UTC'), |
193
|
|
|
first_traded=pd.Timestamp('2013-12-08 9:31AM', tz='UTC'), |
194
|
|
|
exchange='THE MOON', |
195
|
|
|
) |
196
|
|
|
s_unpickled = pickle.loads(pickle.dumps(s)) |
197
|
|
|
|
198
|
|
|
attrs_to_check = ['end_date', |
199
|
|
|
'exchange', |
200
|
|
|
'first_traded', |
201
|
|
|
'end_date', |
202
|
|
|
'asset_name', |
203
|
|
|
'start_date', |
204
|
|
|
'sid', |
205
|
|
|
'start_date', |
206
|
|
|
'symbol'] |
207
|
|
|
|
208
|
|
|
for attr in attrs_to_check: |
209
|
|
|
self.assertEqual(getattr(s, attr), getattr(s_unpickled, attr)) |
210
|
|
|
|
211
|
|
|
def test_asset_comparisons(self): |
212
|
|
|
|
213
|
|
|
s_23 = Asset(23) |
214
|
|
|
s_24 = Asset(24) |
215
|
|
|
|
216
|
|
|
self.assertEqual(s_23, s_23) |
217
|
|
|
self.assertEqual(s_23, 23) |
218
|
|
|
self.assertEqual(23, s_23) |
219
|
|
|
|
220
|
|
|
self.assertNotEqual(s_23, s_24) |
221
|
|
|
self.assertNotEqual(s_23, 24) |
222
|
|
|
self.assertNotEqual(s_23, "23") |
223
|
|
|
self.assertNotEqual(s_23, 23.5) |
224
|
|
|
self.assertNotEqual(s_23, []) |
225
|
|
|
self.assertNotEqual(s_23, None) |
226
|
|
|
|
227
|
|
|
self.assertLess(s_23, s_24) |
228
|
|
|
self.assertLess(s_23, 24) |
229
|
|
|
self.assertGreater(24, s_23) |
230
|
|
|
self.assertGreater(s_24, s_23) |
231
|
|
|
|
232
|
|
|
def test_lt(self): |
233
|
|
|
self.assertTrue(Asset(3) < Asset(4)) |
234
|
|
|
self.assertFalse(Asset(4) < Asset(4)) |
235
|
|
|
self.assertFalse(Asset(5) < Asset(4)) |
236
|
|
|
|
237
|
|
|
def test_le(self): |
238
|
|
|
self.assertTrue(Asset(3) <= Asset(4)) |
239
|
|
|
self.assertTrue(Asset(4) <= Asset(4)) |
240
|
|
|
self.assertFalse(Asset(5) <= Asset(4)) |
241
|
|
|
|
242
|
|
|
def test_eq(self): |
243
|
|
|
self.assertFalse(Asset(3) == Asset(4)) |
244
|
|
|
self.assertTrue(Asset(4) == Asset(4)) |
245
|
|
|
self.assertFalse(Asset(5) == Asset(4)) |
246
|
|
|
|
247
|
|
|
def test_ge(self): |
248
|
|
|
self.assertFalse(Asset(3) >= Asset(4)) |
249
|
|
|
self.assertTrue(Asset(4) >= Asset(4)) |
250
|
|
|
self.assertTrue(Asset(5) >= Asset(4)) |
251
|
|
|
|
252
|
|
|
def test_gt(self): |
253
|
|
|
self.assertFalse(Asset(3) > Asset(4)) |
254
|
|
|
self.assertFalse(Asset(4) > Asset(4)) |
255
|
|
|
self.assertTrue(Asset(5) > Asset(4)) |
256
|
|
|
|
257
|
|
|
def test_type_mismatch(self): |
258
|
|
|
if sys.version_info.major < 3: |
259
|
|
|
self.assertIsNotNone(Asset(3) < 'a') |
260
|
|
|
self.assertIsNotNone('a' < Asset(3)) |
261
|
|
|
else: |
262
|
|
|
with self.assertRaises(TypeError): |
263
|
|
|
Asset(3) < 'a' |
264
|
|
|
with self.assertRaises(TypeError): |
265
|
|
|
'a' < Asset(3) |
266
|
|
|
|
267
|
|
|
|
268
|
|
|
class TestFuture(TestCase): |
269
|
|
|
|
270
|
|
|
@classmethod |
271
|
|
|
def setUpClass(cls): |
272
|
|
|
cls.future = Future( |
273
|
|
|
2468, |
274
|
|
|
symbol='OMH15', |
275
|
|
|
root_symbol='OM', |
276
|
|
|
notice_date=pd.Timestamp('2014-01-20', tz='UTC'), |
277
|
|
|
expiration_date=pd.Timestamp('2014-02-20', tz='UTC'), |
278
|
|
|
auto_close_date=pd.Timestamp('2014-01-18', tz='UTC'), |
279
|
|
|
tick_size=.01, |
280
|
|
|
multiplier=500 |
281
|
|
|
) |
282
|
|
|
cls.future2 = Future( |
283
|
|
|
0, |
284
|
|
|
symbol='CLG06', |
285
|
|
|
root_symbol='CL', |
286
|
|
|
start_date=pd.Timestamp('2005-12-01', tz='UTC'), |
287
|
|
|
notice_date=pd.Timestamp('2005-12-20', tz='UTC'), |
288
|
|
|
expiration_date=pd.Timestamp('2006-01-20', tz='UTC') |
289
|
|
|
) |
290
|
|
|
env = TradingEnvironment(load=noop_load) |
291
|
|
|
env.write_data(futures_identifiers=[TestFuture.future, |
292
|
|
|
TestFuture.future2]) |
293
|
|
|
cls.asset_finder = env.asset_finder |
294
|
|
|
|
295
|
|
|
def test_str(self): |
296
|
|
|
strd = self.future.__str__() |
297
|
|
|
self.assertEqual("Future(2468 [OMH15])", strd) |
298
|
|
|
|
299
|
|
|
def test_repr(self): |
300
|
|
|
reprd = self.future.__repr__() |
301
|
|
|
self.assertTrue("Future" in reprd) |
302
|
|
|
self.assertTrue("2468" in reprd) |
303
|
|
|
self.assertTrue("OMH15" in reprd) |
304
|
|
|
self.assertTrue("root_symbol='OM'" in reprd) |
305
|
|
|
self.assertTrue(("notice_date=Timestamp('2014-01-20 00:00:00+0000', " |
306
|
|
|
"tz='UTC')") in reprd) |
307
|
|
|
self.assertTrue("expiration_date=Timestamp('2014-02-20 00:00:00+0000'" |
308
|
|
|
in reprd) |
309
|
|
|
self.assertTrue("auto_close_date=Timestamp('2014-01-18 00:00:00+0000'" |
310
|
|
|
in reprd) |
311
|
|
|
self.assertTrue("tick_size=0.01" in reprd) |
312
|
|
|
self.assertTrue("multiplier=500" in reprd) |
313
|
|
|
|
314
|
|
|
def test_reduce(self): |
315
|
|
|
reduced = self.future.__reduce__() |
316
|
|
|
self.assertEqual(Future, reduced[0]) |
317
|
|
|
|
318
|
|
|
def test_to_and_from_dict(self): |
319
|
|
|
dictd = self.future.to_dict() |
320
|
|
|
self.assertTrue('root_symbol' in dictd) |
321
|
|
|
self.assertTrue('notice_date' in dictd) |
322
|
|
|
self.assertTrue('expiration_date' in dictd) |
323
|
|
|
self.assertTrue('auto_close_date' in dictd) |
324
|
|
|
self.assertTrue('tick_size' in dictd) |
325
|
|
|
self.assertTrue('multiplier' in dictd) |
326
|
|
|
|
327
|
|
|
from_dict = Future.from_dict(dictd) |
328
|
|
|
self.assertTrue(isinstance(from_dict, Future)) |
329
|
|
|
self.assertEqual(self.future, from_dict) |
330
|
|
|
|
331
|
|
|
def test_root_symbol(self): |
332
|
|
|
self.assertEqual('OM', self.future.root_symbol) |
333
|
|
|
|
334
|
|
|
def test_lookup_future_symbol(self): |
335
|
|
|
""" |
336
|
|
|
Test the lookup_future_symbol method. |
337
|
|
|
""" |
338
|
|
|
om = TestFuture.asset_finder.lookup_future_symbol('OMH15') |
339
|
|
|
self.assertEqual(om.sid, 2468) |
340
|
|
|
self.assertEqual(om.symbol, 'OMH15') |
341
|
|
|
self.assertEqual(om.root_symbol, 'OM') |
342
|
|
|
self.assertEqual(om.notice_date, pd.Timestamp('2014-01-20', tz='UTC')) |
343
|
|
|
self.assertEqual(om.expiration_date, |
344
|
|
|
pd.Timestamp('2014-02-20', tz='UTC')) |
345
|
|
|
self.assertEqual(om.auto_close_date, |
346
|
|
|
pd.Timestamp('2014-01-18', tz='UTC')) |
347
|
|
|
|
348
|
|
|
cl = TestFuture.asset_finder.lookup_future_symbol('CLG06') |
349
|
|
|
self.assertEqual(cl.sid, 0) |
350
|
|
|
self.assertEqual(cl.symbol, 'CLG06') |
351
|
|
|
self.assertEqual(cl.root_symbol, 'CL') |
352
|
|
|
self.assertEqual(cl.start_date, pd.Timestamp('2005-12-01', tz='UTC')) |
353
|
|
|
self.assertEqual(cl.notice_date, pd.Timestamp('2005-12-20', tz='UTC')) |
354
|
|
|
self.assertEqual(cl.expiration_date, |
355
|
|
|
pd.Timestamp('2006-01-20', tz='UTC')) |
356
|
|
|
|
357
|
|
|
with self.assertRaises(SymbolNotFound): |
358
|
|
|
TestFuture.asset_finder.lookup_future_symbol('') |
359
|
|
|
|
360
|
|
|
with self.assertRaises(SymbolNotFound): |
361
|
|
|
TestFuture.asset_finder.lookup_future_symbol('#&?!') |
362
|
|
|
|
363
|
|
|
with self.assertRaises(SymbolNotFound): |
364
|
|
|
TestFuture.asset_finder.lookup_future_symbol('FOOBAR') |
365
|
|
|
|
366
|
|
|
with self.assertRaises(SymbolNotFound): |
367
|
|
|
TestFuture.asset_finder.lookup_future_symbol('XXX99') |
368
|
|
|
|
369
|
|
|
|
370
|
|
|
class AssetFinderTestCase(TestCase): |
371
|
|
|
|
372
|
|
|
def setUp(self): |
373
|
|
|
self.env = TradingEnvironment(load=noop_load) |
374
|
|
|
self.asset_finder_type = AssetFinder |
375
|
|
|
|
376
|
|
|
def test_lookup_symbol_delimited(self): |
377
|
|
|
as_of = pd.Timestamp('2013-01-01', tz='UTC') |
378
|
|
|
frame = pd.DataFrame.from_records( |
379
|
|
|
[ |
380
|
|
|
{ |
381
|
|
|
'sid': i, |
382
|
|
|
'symbol': 'TEST.%d' % i, |
383
|
|
|
'company_name': "company%d" % i, |
384
|
|
|
'start_date': as_of.value, |
385
|
|
|
'end_date': as_of.value, |
386
|
|
|
'exchange': uuid.uuid4().hex |
387
|
|
|
} |
388
|
|
|
for i in range(3) |
389
|
|
|
] |
390
|
|
|
) |
391
|
|
|
self.env.write_data(equities_df=frame) |
392
|
|
|
finder = self.asset_finder_type(self.env.engine) |
393
|
|
|
asset_0, asset_1, asset_2 = ( |
394
|
|
|
finder.retrieve_asset(i) for i in range(3) |
395
|
|
|
) |
396
|
|
|
|
397
|
|
|
# we do it twice to catch caching bugs |
398
|
|
|
for i in range(2): |
399
|
|
|
with self.assertRaises(SymbolNotFound): |
400
|
|
|
finder.lookup_symbol('TEST', as_of) |
401
|
|
|
with self.assertRaises(SymbolNotFound): |
402
|
|
|
finder.lookup_symbol('TEST1', as_of) |
403
|
|
|
# '@' is not a supported delimiter |
404
|
|
|
with self.assertRaises(SymbolNotFound): |
405
|
|
|
finder.lookup_symbol('TEST@1', as_of) |
406
|
|
|
|
407
|
|
|
# Adding an unnecessary fuzzy shouldn't matter. |
408
|
|
|
for fuzzy_char in ['-', '/', '_', '.']: |
409
|
|
|
self.assertEqual( |
410
|
|
|
asset_1, |
411
|
|
|
finder.lookup_symbol('TEST%s1' % fuzzy_char, as_of) |
412
|
|
|
) |
413
|
|
|
|
414
|
|
|
def test_lookup_symbol_fuzzy(self): |
415
|
|
|
metadata = { |
416
|
|
|
0: {'symbol': 'PRTY_HRD'}, |
417
|
|
|
1: {'symbol': 'BRKA'}, |
418
|
|
|
2: {'symbol': 'BRK_A'}, |
419
|
|
|
} |
420
|
|
|
self.env.write_data(equities_data=metadata) |
421
|
|
|
finder = self.env.asset_finder |
422
|
|
|
dt = pd.Timestamp('2013-01-01', tz='UTC') |
423
|
|
|
|
424
|
|
|
# Try combos of looking up PRTYHRD with and without a time or fuzzy |
425
|
|
|
# Both non-fuzzys get no result |
426
|
|
|
with self.assertRaises(SymbolNotFound): |
427
|
|
|
finder.lookup_symbol('PRTYHRD', None) |
428
|
|
|
with self.assertRaises(SymbolNotFound): |
429
|
|
|
finder.lookup_symbol('PRTYHRD', dt) |
430
|
|
|
# Both fuzzys work |
431
|
|
|
self.assertEqual(0, finder.lookup_symbol('PRTYHRD', None, fuzzy=True)) |
432
|
|
|
self.assertEqual(0, finder.lookup_symbol('PRTYHRD', dt, fuzzy=True)) |
433
|
|
|
|
434
|
|
|
# Try combos of looking up PRTY_HRD, all returning sid 0 |
435
|
|
|
self.assertEqual(0, finder.lookup_symbol('PRTY_HRD', None)) |
436
|
|
|
self.assertEqual(0, finder.lookup_symbol('PRTY_HRD', dt)) |
437
|
|
|
self.assertEqual(0, finder.lookup_symbol('PRTY_HRD', None, fuzzy=True)) |
438
|
|
|
self.assertEqual(0, finder.lookup_symbol('PRTY_HRD', dt, fuzzy=True)) |
439
|
|
|
|
440
|
|
|
# Try combos of looking up BRKA, all returning sid 1 |
441
|
|
|
self.assertEqual(1, finder.lookup_symbol('BRKA', None)) |
442
|
|
|
self.assertEqual(1, finder.lookup_symbol('BRKA', dt)) |
443
|
|
|
self.assertEqual(1, finder.lookup_symbol('BRKA', None, fuzzy=True)) |
444
|
|
|
self.assertEqual(1, finder.lookup_symbol('BRKA', dt, fuzzy=True)) |
445
|
|
|
|
446
|
|
|
# Try combos of looking up BRK_A, all returning sid 2 |
447
|
|
|
self.assertEqual(2, finder.lookup_symbol('BRK_A', None)) |
448
|
|
|
self.assertEqual(2, finder.lookup_symbol('BRK_A', dt)) |
449
|
|
|
self.assertEqual(2, finder.lookup_symbol('BRK_A', None, fuzzy=True)) |
450
|
|
|
self.assertEqual(2, finder.lookup_symbol('BRK_A', dt, fuzzy=True)) |
451
|
|
|
|
452
|
|
|
def test_lookup_symbol(self): |
453
|
|
|
|
454
|
|
|
# Incrementing by two so that start and end dates for each |
455
|
|
|
# generated Asset don't overlap (each Asset's end_date is the |
456
|
|
|
# day after its start date.) |
457
|
|
|
dates = pd.date_range('2013-01-01', freq='2D', periods=5, tz='UTC') |
458
|
|
|
df = pd.DataFrame.from_records( |
459
|
|
|
[ |
460
|
|
|
{ |
461
|
|
|
'sid': i, |
462
|
|
|
'symbol': 'existing', |
463
|
|
|
'start_date': date.value, |
464
|
|
|
'end_date': (date + timedelta(days=1)).value, |
465
|
|
|
'exchange': 'NYSE', |
466
|
|
|
} |
467
|
|
|
for i, date in enumerate(dates) |
468
|
|
|
] |
469
|
|
|
) |
470
|
|
|
self.env.write_data(equities_df=df) |
471
|
|
|
finder = self.asset_finder_type(self.env.engine) |
472
|
|
|
for _ in range(2): # Run checks twice to test for caching bugs. |
473
|
|
|
with self.assertRaises(SymbolNotFound): |
474
|
|
|
finder.lookup_symbol('NON_EXISTING', dates[0]) |
475
|
|
|
|
476
|
|
|
with self.assertRaises(MultipleSymbolsFound): |
477
|
|
|
finder.lookup_symbol('EXISTING', None) |
478
|
|
|
|
479
|
|
|
for i, date in enumerate(dates): |
480
|
|
|
# Verify that we correctly resolve multiple symbols using |
481
|
|
|
# the supplied date |
482
|
|
|
result = finder.lookup_symbol('EXISTING', date) |
483
|
|
|
self.assertEqual(result.symbol, 'EXISTING') |
484
|
|
|
self.assertEqual(result.sid, i) |
485
|
|
|
|
486
|
|
|
def test_lookup_symbol_from_multiple_valid(self): |
487
|
|
|
# This test asserts that we resolve conflicts in accordance with the |
488
|
|
|
# following rules when we have multiple assets holding the same symbol |
489
|
|
|
# at the same time: |
490
|
|
|
|
491
|
|
|
# If multiple SIDs exist for symbol S at time T, return the candidate |
492
|
|
|
# SID whose start_date is highest. (200 cases) |
493
|
|
|
|
494
|
|
|
# If multiple SIDs exist for symbol S at time T, the best candidate |
495
|
|
|
# SIDs share the highest start_date, return the SID with the highest |
496
|
|
|
# end_date. (34 cases) |
497
|
|
|
|
498
|
|
|
# It is the opinion of the author (ssanderson) that we should consider |
499
|
|
|
# this malformed input and fail here. But this is the current indended |
500
|
|
|
# behavior of the code, and I accidentally broke it while refactoring. |
501
|
|
|
# These will serve as regression tests until the time comes that we |
502
|
|
|
# decide to enforce this as an error. |
503
|
|
|
|
504
|
|
|
# See https://github.com/quantopian/zipline/issues/837 for more |
505
|
|
|
# details. |
506
|
|
|
|
507
|
|
|
df = pd.DataFrame.from_records( |
508
|
|
|
[ |
509
|
|
|
{ |
510
|
|
|
'sid': 1, |
511
|
|
|
'symbol': 'multiple', |
512
|
|
|
'start_date': pd.Timestamp('2010-01-01'), |
513
|
|
|
'end_date': pd.Timestamp('2012-01-01'), |
514
|
|
|
'exchange': 'NYSE' |
515
|
|
|
}, |
516
|
|
|
# Same as asset 1, but with a later end date. |
517
|
|
|
{ |
518
|
|
|
'sid': 2, |
519
|
|
|
'symbol': 'multiple', |
520
|
|
|
'start_date': pd.Timestamp('2010-01-01'), |
521
|
|
|
'end_date': pd.Timestamp('2013-01-01'), |
522
|
|
|
'exchange': 'NYSE' |
523
|
|
|
}, |
524
|
|
|
# Same as asset 1, but with a later start_date |
525
|
|
|
{ |
526
|
|
|
'sid': 3, |
527
|
|
|
'symbol': 'multiple', |
528
|
|
|
'start_date': pd.Timestamp('2011-01-01'), |
529
|
|
|
'end_date': pd.Timestamp('2012-01-01'), |
530
|
|
|
'exchange': 'NYSE' |
531
|
|
|
}, |
532
|
|
|
] |
533
|
|
|
) |
534
|
|
|
|
535
|
|
|
def check(expected_sid, date): |
536
|
|
|
result = finder.lookup_symbol( |
537
|
|
|
'MULTIPLE', date, |
538
|
|
|
) |
539
|
|
|
self.assertEqual(result.symbol, 'MULTIPLE') |
540
|
|
|
self.assertEqual(result.sid, expected_sid) |
541
|
|
|
|
542
|
|
|
with tmp_asset_finder(finder_cls=self.asset_finder_type, |
543
|
|
|
equities=df) as finder: |
544
|
|
|
self.assertIsInstance(finder, self.asset_finder_type) |
545
|
|
|
|
546
|
|
|
# Sids 1 and 2 are eligible here. We should get asset 2 because it |
547
|
|
|
# has the later end_date. |
548
|
|
|
check(2, pd.Timestamp('2010-12-31')) |
549
|
|
|
|
550
|
|
|
# Sids 1, 2, and 3 are eligible here. We should get sid 3 because |
551
|
|
|
# it has a later start_date |
552
|
|
|
check(3, pd.Timestamp('2011-01-01')) |
553
|
|
|
|
554
|
|
|
def test_lookup_generic(self): |
555
|
|
|
""" |
556
|
|
|
Ensure that lookup_generic works with various permutations of inputs. |
557
|
|
|
""" |
558
|
|
|
with build_lookup_generic_cases(self.asset_finder_type) as cases: |
559
|
|
|
for finder, symbols, reference_date, expected in cases: |
560
|
|
|
results, missing = finder.lookup_generic(symbols, |
561
|
|
|
reference_date) |
562
|
|
|
self.assertEqual(results, expected) |
563
|
|
|
self.assertEqual(missing, []) |
564
|
|
|
|
565
|
|
|
def test_lookup_generic_handle_missing(self): |
566
|
|
|
data = pd.DataFrame.from_records( |
567
|
|
|
[ |
568
|
|
|
{ |
569
|
|
|
'sid': 0, |
570
|
|
|
'symbol': 'real', |
571
|
|
|
'start_date': pd.Timestamp('2013-1-1', tz='UTC'), |
572
|
|
|
'end_date': pd.Timestamp('2014-1-1', tz='UTC'), |
573
|
|
|
'exchange': '', |
574
|
|
|
}, |
575
|
|
|
{ |
576
|
|
|
'sid': 1, |
577
|
|
|
'symbol': 'also_real', |
578
|
|
|
'start_date': pd.Timestamp('2013-1-1', tz='UTC'), |
579
|
|
|
'end_date': pd.Timestamp('2014-1-1', tz='UTC'), |
580
|
|
|
'exchange': '', |
581
|
|
|
}, |
582
|
|
|
# Sid whose end date is before our query date. We should |
583
|
|
|
# still correctly find it. |
584
|
|
|
{ |
585
|
|
|
'sid': 2, |
586
|
|
|
'symbol': 'real_but_old', |
587
|
|
|
'start_date': pd.Timestamp('2002-1-1', tz='UTC'), |
588
|
|
|
'end_date': pd.Timestamp('2003-1-1', tz='UTC'), |
589
|
|
|
'exchange': '', |
590
|
|
|
}, |
591
|
|
|
# Sid whose start_date is **after** our query date. We should |
592
|
|
|
# **not** find it. |
593
|
|
|
{ |
594
|
|
|
'sid': 3, |
595
|
|
|
'symbol': 'real_but_in_the_future', |
596
|
|
|
'start_date': pd.Timestamp('2014-1-1', tz='UTC'), |
597
|
|
|
'end_date': pd.Timestamp('2020-1-1', tz='UTC'), |
598
|
|
|
'exchange': 'THE FUTURE', |
599
|
|
|
}, |
600
|
|
|
] |
601
|
|
|
) |
602
|
|
|
self.env.write_data(equities_df=data) |
603
|
|
|
finder = self.asset_finder_type(self.env.engine) |
604
|
|
|
results, missing = finder.lookup_generic( |
605
|
|
|
['REAL', 1, 'FAKE', 'REAL_BUT_OLD', 'REAL_BUT_IN_THE_FUTURE'], |
606
|
|
|
pd.Timestamp('2013-02-01', tz='UTC'), |
607
|
|
|
) |
608
|
|
|
|
609
|
|
|
self.assertEqual(len(results), 3) |
610
|
|
|
self.assertEqual(results[0].symbol, 'REAL') |
611
|
|
|
self.assertEqual(results[0].sid, 0) |
612
|
|
|
self.assertEqual(results[1].symbol, 'ALSO_REAL') |
613
|
|
|
self.assertEqual(results[1].sid, 1) |
614
|
|
|
self.assertEqual(results[2].symbol, 'REAL_BUT_OLD') |
615
|
|
|
self.assertEqual(results[2].sid, 2) |
616
|
|
|
|
617
|
|
|
self.assertEqual(len(missing), 2) |
618
|
|
|
self.assertEqual(missing[0], 'FAKE') |
619
|
|
|
self.assertEqual(missing[1], 'REAL_BUT_IN_THE_FUTURE') |
620
|
|
|
|
621
|
|
|
def test_insert_metadata(self): |
622
|
|
|
data = {0: {'start_date': '2014-01-01', |
623
|
|
|
'end_date': '2015-01-01', |
624
|
|
|
'symbol': "PLAY", |
625
|
|
|
'foo_data': "FOO"}} |
626
|
|
|
self.env.write_data(equities_data=data) |
627
|
|
|
finder = self.asset_finder_type(self.env.engine) |
628
|
|
|
# Test proper insertion |
629
|
|
|
equity = finder.retrieve_asset(0) |
630
|
|
|
self.assertIsInstance(equity, Equity) |
631
|
|
|
self.assertEqual('PLAY', equity.symbol) |
632
|
|
|
self.assertEqual(pd.Timestamp('2015-01-01', tz='UTC'), |
633
|
|
|
equity.end_date) |
634
|
|
|
|
635
|
|
|
# Test invalid field |
636
|
|
|
with self.assertRaises(AttributeError): |
637
|
|
|
equity.foo_data |
638
|
|
|
|
639
|
|
|
def test_consume_metadata(self): |
640
|
|
|
|
641
|
|
|
# Test dict consumption |
642
|
|
|
dict_to_consume = {0: {'symbol': 'PLAY'}, |
643
|
|
|
1: {'symbol': 'MSFT'}} |
644
|
|
|
self.env.write_data(equities_data=dict_to_consume) |
645
|
|
|
finder = self.asset_finder_type(self.env.engine) |
646
|
|
|
|
647
|
|
|
equity = finder.retrieve_asset(0) |
648
|
|
|
self.assertIsInstance(equity, Equity) |
649
|
|
|
self.assertEqual('PLAY', equity.symbol) |
650
|
|
|
|
651
|
|
|
# Test dataframe consumption |
652
|
|
|
df = pd.DataFrame(columns=['asset_name', 'exchange'], index=[0, 1]) |
653
|
|
|
df['asset_name'][0] = "Dave'N'Busters" |
654
|
|
|
df['exchange'][0] = "NASDAQ" |
655
|
|
|
df['asset_name'][1] = "Microsoft" |
656
|
|
|
df['exchange'][1] = "NYSE" |
657
|
|
|
self.env = TradingEnvironment(load=noop_load) |
658
|
|
|
self.env.write_data(equities_df=df) |
659
|
|
|
finder = self.asset_finder_type(self.env.engine) |
660
|
|
|
self.assertEqual('NASDAQ', finder.retrieve_asset(0).exchange) |
661
|
|
|
self.assertEqual('Microsoft', finder.retrieve_asset(1).asset_name) |
662
|
|
|
|
663
|
|
|
def test_consume_asset_as_identifier(self): |
664
|
|
|
# Build some end dates |
665
|
|
|
eq_end = pd.Timestamp('2012-01-01', tz='UTC') |
666
|
|
|
fut_end = pd.Timestamp('2008-01-01', tz='UTC') |
667
|
|
|
|
668
|
|
|
# Build some simple Assets |
669
|
|
|
equity_asset = Equity(1, symbol="TESTEQ", end_date=eq_end) |
670
|
|
|
future_asset = Future(200, symbol="TESTFUT", end_date=fut_end) |
671
|
|
|
|
672
|
|
|
# Consume the Assets |
673
|
|
|
self.env.write_data(equities_identifiers=[equity_asset], |
674
|
|
|
futures_identifiers=[future_asset]) |
675
|
|
|
finder = self.asset_finder_type(self.env.engine) |
676
|
|
|
|
677
|
|
|
# Test equality with newly built Assets |
678
|
|
|
self.assertEqual(equity_asset, finder.retrieve_asset(1)) |
679
|
|
|
self.assertEqual(future_asset, finder.retrieve_asset(200)) |
680
|
|
|
self.assertEqual(eq_end, finder.retrieve_asset(1).end_date) |
681
|
|
|
self.assertEqual(fut_end, finder.retrieve_asset(200).end_date) |
682
|
|
|
|
683
|
|
|
def test_sid_assignment(self): |
684
|
|
|
|
685
|
|
|
# This metadata does not contain SIDs |
686
|
|
|
metadata = ['PLAY', 'MSFT'] |
687
|
|
|
|
688
|
|
|
today = normalize_date(pd.Timestamp('2015-07-09', tz='UTC')) |
689
|
|
|
|
690
|
|
|
# Write data with sid assignment |
691
|
|
|
self.env.write_data(equities_identifiers=metadata, |
692
|
|
|
allow_sid_assignment=True) |
693
|
|
|
|
694
|
|
|
# Verify that Assets were built and different sids were assigned |
695
|
|
|
finder = self.asset_finder_type(self.env.engine) |
696
|
|
|
play = finder.lookup_symbol('PLAY', today) |
697
|
|
|
msft = finder.lookup_symbol('MSFT', today) |
698
|
|
|
self.assertEqual('PLAY', play.symbol) |
699
|
|
|
self.assertIsNotNone(play.sid) |
700
|
|
|
self.assertNotEqual(play.sid, msft.sid) |
701
|
|
|
|
702
|
|
|
def test_sid_assignment_failure(self): |
703
|
|
|
|
704
|
|
|
# This metadata does not contain SIDs |
705
|
|
|
metadata = ['PLAY', 'MSFT'] |
706
|
|
|
|
707
|
|
|
# Write data without sid assignment, asserting failure |
708
|
|
|
with self.assertRaises(SidAssignmentError): |
709
|
|
|
self.env.write_data(equities_identifiers=metadata, |
710
|
|
|
allow_sid_assignment=False) |
711
|
|
|
|
712
|
|
|
def test_security_dates_warning(self): |
713
|
|
|
|
714
|
|
|
# Build an asset with an end_date |
715
|
|
|
eq_end = pd.Timestamp('2012-01-01', tz='UTC') |
716
|
|
|
equity_asset = Equity(1, symbol="TESTEQ", end_date=eq_end) |
717
|
|
|
|
718
|
|
|
# Catch all warnings |
719
|
|
|
with warnings.catch_warnings(record=True) as w: |
720
|
|
|
# Cause all warnings to always be triggered |
721
|
|
|
warnings.simplefilter("always") |
722
|
|
|
equity_asset.security_start_date |
723
|
|
|
equity_asset.security_end_date |
724
|
|
|
equity_asset.security_name |
725
|
|
|
# Verify the warning |
726
|
|
|
self.assertEqual(3, len(w)) |
727
|
|
|
for warning in w: |
728
|
|
|
self.assertTrue(issubclass(warning.category, |
729
|
|
|
DeprecationWarning)) |
730
|
|
|
|
731
|
|
|
def test_lookup_future_chain(self): |
732
|
|
|
metadata = { |
733
|
|
|
# Notice day is today, so should be valid. |
734
|
|
|
0: { |
735
|
|
|
'symbol': 'ADN15', |
736
|
|
|
'root_symbol': 'AD', |
737
|
|
|
'notice_date': pd.Timestamp('2015-05-14', tz='UTC'), |
738
|
|
|
'expiration_date': pd.Timestamp('2015-06-14', tz='UTC'), |
739
|
|
|
'start_date': pd.Timestamp('2015-01-01', tz='UTC') |
740
|
|
|
}, |
741
|
|
|
1: { |
742
|
|
|
'symbol': 'ADV15', |
743
|
|
|
'root_symbol': 'AD', |
744
|
|
|
'notice_date': pd.Timestamp('2015-08-14', tz='UTC'), |
745
|
|
|
'expiration_date': pd.Timestamp('2015-09-14', tz='UTC'), |
746
|
|
|
'start_date': pd.Timestamp('2015-01-01', tz='UTC') |
747
|
|
|
}, |
748
|
|
|
# Starts trading today, so should be valid. |
749
|
|
|
2: { |
750
|
|
|
'symbol': 'ADF16', |
751
|
|
|
'root_symbol': 'AD', |
752
|
|
|
'notice_date': pd.Timestamp('2015-11-16', tz='UTC'), |
753
|
|
|
'expiration_date': pd.Timestamp('2015-12-16', tz='UTC'), |
754
|
|
|
'start_date': pd.Timestamp('2015-05-14', tz='UTC') |
755
|
|
|
}, |
756
|
|
|
# Starts trading in August, so not valid. |
757
|
|
|
3: { |
758
|
|
|
'symbol': 'ADX16', |
759
|
|
|
'root_symbol': 'AD', |
760
|
|
|
'notice_date': pd.Timestamp('2015-11-16', tz='UTC'), |
761
|
|
|
'expiration_date': pd.Timestamp('2015-12-16', tz='UTC'), |
762
|
|
|
'start_date': pd.Timestamp('2015-08-01', tz='UTC') |
763
|
|
|
}, |
764
|
|
|
# Notice date comes after expiration |
765
|
|
|
4: { |
766
|
|
|
'symbol': 'ADZ16', |
767
|
|
|
'root_symbol': 'AD', |
768
|
|
|
'notice_date': pd.Timestamp('2016-11-25', tz='UTC'), |
769
|
|
|
'expiration_date': pd.Timestamp('2016-11-16', tz='UTC'), |
770
|
|
|
'start_date': pd.Timestamp('2015-08-01', tz='UTC') |
771
|
|
|
}, |
772
|
|
|
# This contract has no start date and also this contract should be |
773
|
|
|
# last in all chains |
774
|
|
|
5: { |
775
|
|
|
'symbol': 'ADZ20', |
776
|
|
|
'root_symbol': 'AD', |
777
|
|
|
'notice_date': pd.Timestamp('2020-11-25', tz='UTC'), |
778
|
|
|
'expiration_date': pd.Timestamp('2020-11-16', tz='UTC') |
779
|
|
|
}, |
780
|
|
|
} |
781
|
|
|
self.env.write_data(futures_data=metadata) |
782
|
|
|
finder = self.asset_finder_type(self.env.engine) |
783
|
|
|
dt = pd.Timestamp('2015-05-14', tz='UTC') |
784
|
|
|
dt_2 = pd.Timestamp('2015-10-14', tz='UTC') |
785
|
|
|
dt_3 = pd.Timestamp('2016-11-17', tz='UTC') |
786
|
|
|
|
787
|
|
|
# Check that we get the expected number of contracts, in the |
788
|
|
|
# right order |
789
|
|
|
ad_contracts = finder.lookup_future_chain('AD', dt) |
790
|
|
|
self.assertEqual(len(ad_contracts), 6) |
791
|
|
|
self.assertEqual(ad_contracts[0].sid, 0) |
792
|
|
|
self.assertEqual(ad_contracts[1].sid, 1) |
793
|
|
|
self.assertEqual(ad_contracts[5].sid, 5) |
794
|
|
|
|
795
|
|
|
# Check that, when some contracts have expired, the chain has advanced |
796
|
|
|
# properly to the next contracts |
797
|
|
|
ad_contracts = finder.lookup_future_chain('AD', dt_2) |
798
|
|
|
self.assertEqual(len(ad_contracts), 4) |
799
|
|
|
self.assertEqual(ad_contracts[0].sid, 2) |
800
|
|
|
self.assertEqual(ad_contracts[3].sid, 5) |
801
|
|
|
|
802
|
|
|
# Check that when the expiration_date has passed but the |
803
|
|
|
# notice_date hasn't, contract is still considered invalid. |
804
|
|
|
ad_contracts = finder.lookup_future_chain('AD', dt_3) |
805
|
|
|
self.assertEqual(len(ad_contracts), 1) |
806
|
|
|
self.assertEqual(ad_contracts[0].sid, 5) |
807
|
|
|
|
808
|
|
|
# Check that pd.NaT for as_of_date gives the whole chain |
809
|
|
|
ad_contracts = finder.lookup_future_chain('AD', pd.NaT) |
810
|
|
|
self.assertEqual(len(ad_contracts), 6) |
811
|
|
|
self.assertEqual(ad_contracts[5].sid, 5) |
812
|
|
|
|
813
|
|
|
def test_map_identifier_index_to_sids(self): |
814
|
|
|
# Build an empty finder and some Assets |
815
|
|
|
dt = pd.Timestamp('2014-01-01', tz='UTC') |
816
|
|
|
finder = self.asset_finder_type(self.env.engine) |
817
|
|
|
asset1 = Equity(1, symbol="AAPL") |
818
|
|
|
asset2 = Equity(2, symbol="GOOG") |
819
|
|
|
asset200 = Future(200, symbol="CLK15") |
820
|
|
|
asset201 = Future(201, symbol="CLM15") |
821
|
|
|
|
822
|
|
|
# Check for correct mapping and types |
823
|
|
|
pre_map = [asset1, asset2, asset200, asset201] |
824
|
|
|
post_map = finder.map_identifier_index_to_sids(pre_map, dt) |
825
|
|
|
self.assertListEqual([1, 2, 200, 201], post_map) |
826
|
|
|
for sid in post_map: |
827
|
|
|
self.assertIsInstance(sid, int) |
828
|
|
|
|
829
|
|
|
# Change order and check mapping again |
830
|
|
|
pre_map = [asset201, asset2, asset200, asset1] |
831
|
|
|
post_map = finder.map_identifier_index_to_sids(pre_map, dt) |
832
|
|
|
self.assertListEqual([201, 2, 200, 1], post_map) |
833
|
|
|
|
834
|
|
|
def test_compute_lifetimes(self): |
835
|
|
|
num_assets = 4 |
836
|
|
|
trading_day = self.env.trading_day |
837
|
|
|
first_start = pd.Timestamp('2015-04-01', tz='UTC') |
838
|
|
|
|
839
|
|
|
frame = make_rotating_equity_info( |
840
|
|
|
num_assets=num_assets, |
841
|
|
|
first_start=first_start, |
842
|
|
|
frequency=self.env.trading_day, |
843
|
|
|
periods_between_starts=3, |
844
|
|
|
asset_lifetime=5 |
845
|
|
|
) |
846
|
|
|
|
847
|
|
|
self.env.write_data(equities_df=frame) |
848
|
|
|
finder = self.env.asset_finder |
849
|
|
|
|
850
|
|
|
all_dates = pd.date_range( |
851
|
|
|
start=first_start, |
852
|
|
|
end=frame.end_date.max(), |
853
|
|
|
freq=trading_day, |
854
|
|
|
) |
855
|
|
|
|
856
|
|
|
for dates in all_subindices(all_dates): |
857
|
|
|
expected_with_start_raw = full( |
858
|
|
|
shape=(len(dates), num_assets), |
859
|
|
|
fill_value=False, |
860
|
|
|
dtype=bool, |
861
|
|
|
) |
862
|
|
|
expected_no_start_raw = full( |
863
|
|
|
shape=(len(dates), num_assets), |
864
|
|
|
fill_value=False, |
865
|
|
|
dtype=bool, |
866
|
|
|
) |
867
|
|
|
|
868
|
|
|
for i, date in enumerate(dates): |
869
|
|
|
it = frame[['start_date', 'end_date']].itertuples() |
870
|
|
|
for j, start, end in it: |
871
|
|
|
# This way of doing the checks is redundant, but very |
872
|
|
|
# clear. |
873
|
|
|
if start <= date <= end: |
874
|
|
|
expected_with_start_raw[i, j] = True |
875
|
|
|
if start < date: |
876
|
|
|
expected_no_start_raw[i, j] = True |
877
|
|
|
|
878
|
|
|
expected_with_start = pd.DataFrame( |
879
|
|
|
data=expected_with_start_raw, |
880
|
|
|
index=dates, |
881
|
|
|
columns=frame.index.values, |
882
|
|
|
) |
883
|
|
|
result = finder.lifetimes(dates, include_start_date=True) |
884
|
|
|
assert_frame_equal(result, expected_with_start) |
885
|
|
|
|
886
|
|
|
expected_no_start = pd.DataFrame( |
887
|
|
|
data=expected_no_start_raw, |
888
|
|
|
index=dates, |
889
|
|
|
columns=frame.index.values, |
890
|
|
|
) |
891
|
|
|
result = finder.lifetimes(dates, include_start_date=False) |
892
|
|
|
assert_frame_equal(result, expected_no_start) |
893
|
|
|
|
894
|
|
|
def test_sids(self): |
895
|
|
|
# Ensure that the sids property of the AssetFinder is functioning |
896
|
|
|
self.env.write_data(equities_identifiers=[1, 2, 3]) |
897
|
|
|
sids = self.env.asset_finder.sids |
898
|
|
|
self.assertEqual(3, len(sids)) |
899
|
|
|
self.assertTrue(1 in sids) |
900
|
|
|
self.assertTrue(2 in sids) |
901
|
|
|
self.assertTrue(3 in sids) |
902
|
|
|
|
903
|
|
|
def test_group_by_type(self): |
904
|
|
|
equities = make_simple_equity_info( |
905
|
|
|
range(5), |
906
|
|
|
start_date=pd.Timestamp('2014-01-01'), |
907
|
|
|
end_date=pd.Timestamp('2015-01-01'), |
908
|
|
|
) |
909
|
|
|
futures = make_commodity_future_info( |
910
|
|
|
first_sid=6, |
911
|
|
|
root_symbols=['CL'], |
912
|
|
|
years=[2014], |
913
|
|
|
) |
914
|
|
|
# Intersecting sid queries, to exercise loading of partially-cached |
915
|
|
|
# results. |
916
|
|
|
queries = [ |
917
|
|
|
([0, 1, 3], [6, 7]), |
918
|
|
|
([0, 2, 3], [7, 10]), |
919
|
|
|
(list(equities.index), list(futures.index)), |
920
|
|
|
] |
921
|
|
|
with tmp_asset_finder(equities=equities, futures=futures) as finder: |
922
|
|
|
for equity_sids, future_sids in queries: |
923
|
|
|
results = finder.group_by_type(equity_sids + future_sids) |
924
|
|
|
self.assertEqual( |
925
|
|
|
results, |
926
|
|
|
{'equity': set(equity_sids), 'future': set(future_sids)}, |
927
|
|
|
) |
928
|
|
|
|
929
|
|
|
@parameterized.expand([ |
930
|
|
|
(Equity, 'retrieve_equities', EquitiesNotFound), |
931
|
|
|
(Future, 'retrieve_futures_contracts', FutureContractsNotFound), |
932
|
|
|
]) |
933
|
|
|
def test_retrieve_specific_type(self, type_, lookup_name, failure_type): |
934
|
|
|
equities = make_simple_equity_info( |
935
|
|
|
range(5), |
936
|
|
|
start_date=pd.Timestamp('2014-01-01'), |
937
|
|
|
end_date=pd.Timestamp('2015-01-01'), |
938
|
|
|
) |
939
|
|
|
max_equity = equities.index.max() |
940
|
|
|
futures = make_commodity_future_info( |
941
|
|
|
first_sid=max_equity + 1, |
942
|
|
|
root_symbols=['CL'], |
943
|
|
|
years=[2014], |
944
|
|
|
) |
945
|
|
|
equity_sids = [0, 1] |
946
|
|
|
future_sids = [max_equity + 1, max_equity + 2, max_equity + 3] |
947
|
|
|
if type_ == Equity: |
948
|
|
|
success_sids = equity_sids |
949
|
|
|
fail_sids = future_sids |
950
|
|
|
else: |
951
|
|
|
fail_sids = equity_sids |
952
|
|
|
success_sids = future_sids |
953
|
|
|
|
954
|
|
|
with tmp_asset_finder(equities=equities, futures=futures) as finder: |
955
|
|
|
# Run twice to exercise caching. |
956
|
|
|
lookup = getattr(finder, lookup_name) |
957
|
|
|
for _ in range(2): |
958
|
|
|
results = lookup(success_sids) |
959
|
|
|
self.assertIsInstance(results, dict) |
960
|
|
|
self.assertEqual(set(results.keys()), set(success_sids)) |
961
|
|
|
self.assertEqual( |
962
|
|
|
valmap(int, results), |
963
|
|
|
dict(zip(success_sids, success_sids)), |
964
|
|
|
) |
965
|
|
|
self.assertEqual( |
966
|
|
|
{type_}, |
967
|
|
|
{type(asset) for asset in itervalues(results)}, |
968
|
|
|
) |
969
|
|
|
with self.assertRaises(failure_type): |
970
|
|
|
lookup(fail_sids) |
971
|
|
|
with self.assertRaises(failure_type): |
972
|
|
|
# Should fail if **any** of the assets are bad. |
973
|
|
|
lookup([success_sids[0], fail_sids[0]]) |
974
|
|
|
|
975
|
|
|
def test_retrieve_all(self): |
976
|
|
|
equities = make_simple_equity_info( |
977
|
|
|
range(5), |
978
|
|
|
start_date=pd.Timestamp('2014-01-01'), |
979
|
|
|
end_date=pd.Timestamp('2015-01-01'), |
980
|
|
|
) |
981
|
|
|
max_equity = equities.index.max() |
982
|
|
|
futures = make_commodity_future_info( |
983
|
|
|
first_sid=max_equity + 1, |
984
|
|
|
root_symbols=['CL'], |
985
|
|
|
years=[2014], |
986
|
|
|
) |
987
|
|
|
|
988
|
|
|
with tmp_asset_finder(equities=equities, futures=futures) as finder: |
989
|
|
|
all_sids = finder.sids |
990
|
|
|
self.assertEqual(len(all_sids), len(equities) + len(futures)) |
991
|
|
|
queries = [ |
992
|
|
|
# Empty Query. |
993
|
|
|
(), |
994
|
|
|
# Only Equities. |
995
|
|
|
tuple(equities.index[:2]), |
996
|
|
|
# Only Futures. |
997
|
|
|
tuple(futures.index[:3]), |
998
|
|
|
# Mixed, all cache misses. |
999
|
|
|
tuple(equities.index[2:]) + tuple(futures.index[3:]), |
1000
|
|
|
# Mixed, all cache hits. |
1001
|
|
|
tuple(equities.index[2:]) + tuple(futures.index[3:]), |
1002
|
|
|
# Everything. |
1003
|
|
|
all_sids, |
1004
|
|
|
all_sids, |
1005
|
|
|
] |
1006
|
|
|
for sids in queries: |
1007
|
|
|
equity_sids = [i for i in sids if i <= max_equity] |
1008
|
|
|
future_sids = [i for i in sids if i > max_equity] |
1009
|
|
|
results = finder.retrieve_all(sids) |
1010
|
|
|
self.assertEqual(sids, tuple(map(int, results))) |
1011
|
|
|
|
1012
|
|
|
self.assertEqual( |
1013
|
|
|
[Equity for _ in equity_sids] + |
1014
|
|
|
[Future for _ in future_sids], |
1015
|
|
|
list(map(type, results)), |
1016
|
|
|
) |
1017
|
|
|
self.assertEqual( |
1018
|
|
|
( |
1019
|
|
|
list(equities.symbol.loc[equity_sids]) + |
1020
|
|
|
list(futures.symbol.loc[future_sids]) |
1021
|
|
|
), |
1022
|
|
|
list(asset.symbol for asset in results), |
1023
|
|
|
) |
1024
|
|
|
|
1025
|
|
|
@parameterized.expand([ |
1026
|
|
|
(EquitiesNotFound, 'equity', 'equities'), |
1027
|
|
|
(FutureContractsNotFound, 'future contract', 'future contracts'), |
1028
|
|
|
(SidsNotFound, 'asset', 'assets'), |
1029
|
|
|
]) |
1030
|
|
|
def test_error_message_plurality(self, |
1031
|
|
|
error_type, |
1032
|
|
|
singular, |
1033
|
|
|
plural): |
1034
|
|
|
try: |
1035
|
|
|
raise error_type(sids=[1]) |
1036
|
|
|
except error_type as e: |
1037
|
|
|
self.assertEqual( |
1038
|
|
|
str(e), |
1039
|
|
|
"No {singular} found for sid: 1.".format(singular=singular) |
1040
|
|
|
) |
1041
|
|
|
try: |
1042
|
|
|
raise error_type(sids=[1, 2]) |
1043
|
|
|
except error_type as e: |
1044
|
|
|
self.assertEqual( |
1045
|
|
|
str(e), |
1046
|
|
|
"No {plural} found for sids: [1, 2].".format(plural=plural) |
1047
|
|
|
) |
1048
|
|
|
|
1049
|
|
|
|
1050
|
|
|
class AssetFinderCachedEquitiesTestCase(AssetFinderTestCase): |
1051
|
|
|
|
1052
|
|
|
def setUp(self): |
1053
|
|
|
self.env = TradingEnvironment(load=noop_load) |
1054
|
|
|
self.asset_finder_type = AssetFinderCachedEquities |
1055
|
|
|
|
1056
|
|
|
|
1057
|
|
|
class TestFutureChain(TestCase): |
1058
|
|
|
|
1059
|
|
|
@classmethod |
1060
|
|
|
def setUpClass(cls): |
1061
|
|
|
metadata = { |
1062
|
|
|
0: { |
1063
|
|
|
'symbol': 'CLG06', |
1064
|
|
|
'root_symbol': 'CL', |
1065
|
|
|
'start_date': pd.Timestamp('2005-12-01', tz='UTC'), |
1066
|
|
|
'notice_date': pd.Timestamp('2005-12-20', tz='UTC'), |
1067
|
|
|
'expiration_date': pd.Timestamp('2006-01-20', tz='UTC')}, |
1068
|
|
|
1: { |
1069
|
|
|
'root_symbol': 'CL', |
1070
|
|
|
'symbol': 'CLK06', |
1071
|
|
|
'start_date': pd.Timestamp('2005-12-01', tz='UTC'), |
1072
|
|
|
'notice_date': pd.Timestamp('2006-03-20', tz='UTC'), |
1073
|
|
|
'expiration_date': pd.Timestamp('2006-04-20', tz='UTC')}, |
1074
|
|
|
2: { |
1075
|
|
|
'symbol': 'CLQ06', |
1076
|
|
|
'root_symbol': 'CL', |
1077
|
|
|
'start_date': pd.Timestamp('2005-12-01', tz='UTC'), |
1078
|
|
|
'notice_date': pd.Timestamp('2006-06-20', tz='UTC'), |
1079
|
|
|
'expiration_date': pd.Timestamp('2006-07-20', tz='UTC')}, |
1080
|
|
|
3: { |
1081
|
|
|
'symbol': 'CLX06', |
1082
|
|
|
'root_symbol': 'CL', |
1083
|
|
|
'start_date': pd.Timestamp('2006-02-01', tz='UTC'), |
1084
|
|
|
'notice_date': pd.Timestamp('2006-09-20', tz='UTC'), |
1085
|
|
|
'expiration_date': pd.Timestamp('2006-10-20', tz='UTC')} |
1086
|
|
|
} |
1087
|
|
|
|
1088
|
|
|
env = TradingEnvironment(load=noop_load) |
1089
|
|
|
env.write_data(futures_data=metadata) |
1090
|
|
|
cls.asset_finder = env.asset_finder |
1091
|
|
|
|
1092
|
|
|
@classmethod |
1093
|
|
|
def tearDownClass(cls): |
1094
|
|
|
del cls.asset_finder |
1095
|
|
|
|
1096
|
|
|
def test_len(self): |
1097
|
|
|
""" Test the __len__ method of FutureChain. |
1098
|
|
|
""" |
1099
|
|
|
# Sids 0, 1, & 2 have started, 3 has not yet started, but all are in |
1100
|
|
|
# the chain |
1101
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-01', 'CL') |
1102
|
|
|
self.assertEqual(len(cl), 4) |
1103
|
|
|
|
1104
|
|
|
# Sid 0 is still valid on its notice date. |
1105
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-20', 'CL') |
1106
|
|
|
self.assertEqual(len(cl), 4) |
1107
|
|
|
|
1108
|
|
|
# Sid 0 is now invalid, leaving Sids 1 & 2 valid (and 3 not started). |
1109
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-21', 'CL') |
1110
|
|
|
self.assertEqual(len(cl), 3) |
1111
|
|
|
|
1112
|
|
|
# Sid 3 has started, so 1, 2, & 3 are now valid. |
1113
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2006-02-01', 'CL') |
1114
|
|
|
self.assertEqual(len(cl), 3) |
1115
|
|
|
|
1116
|
|
|
# All contracts are no longer valid. |
1117
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2006-09-21', 'CL') |
1118
|
|
|
self.assertEqual(len(cl), 0) |
1119
|
|
|
|
1120
|
|
|
def test_getitem(self): |
1121
|
|
|
""" Test the __getitem__ method of FutureChain. |
1122
|
|
|
""" |
1123
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-01', 'CL') |
1124
|
|
|
self.assertEqual(cl[0], 0) |
1125
|
|
|
self.assertEqual(cl[1], 1) |
1126
|
|
|
self.assertEqual(cl[2], 2) |
1127
|
|
|
|
1128
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-20', 'CL') |
1129
|
|
|
self.assertEqual(cl[0], 0) |
1130
|
|
|
|
1131
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-21', 'CL') |
1132
|
|
|
self.assertEqual(cl[0], 1) |
1133
|
|
|
|
1134
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2006-02-01', 'CL') |
1135
|
|
|
self.assertEqual(cl[-1], 3) |
1136
|
|
|
|
1137
|
|
|
def test_iter(self): |
1138
|
|
|
""" Test the __iter__ method of FutureChain. |
1139
|
|
|
""" |
1140
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-01', 'CL') |
1141
|
|
|
for i, contract in enumerate(cl): |
1142
|
|
|
self.assertEqual(contract, i) |
1143
|
|
|
|
1144
|
|
|
# First contract is now invalid, so sids will be offset by one |
1145
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-21', 'CL') |
1146
|
|
|
for i, contract in enumerate(cl): |
1147
|
|
|
self.assertEqual(contract, i + 1) |
1148
|
|
|
|
1149
|
|
|
def test_root_symbols(self): |
1150
|
|
|
""" Test that different variations on root symbols are handled |
1151
|
|
|
as expected. |
1152
|
|
|
""" |
1153
|
|
|
# Make sure this successfully gets the chain for CL. |
1154
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-01', 'CL') |
1155
|
|
|
self.assertEqual(cl.root_symbol, 'CL') |
1156
|
|
|
|
1157
|
|
|
# These root symbols don't exist, so RootSymbolNotFound should |
1158
|
|
|
# be raised immediately. |
1159
|
|
|
with self.assertRaises(RootSymbolNotFound): |
1160
|
|
|
FutureChain(self.asset_finder, lambda: '2005-12-01', 'CLZ') |
1161
|
|
|
|
1162
|
|
|
with self.assertRaises(RootSymbolNotFound): |
1163
|
|
|
FutureChain(self.asset_finder, lambda: '2005-12-01', '') |
1164
|
|
|
|
1165
|
|
|
def test_repr(self): |
1166
|
|
|
""" Test the __repr__ method of FutureChain. |
1167
|
|
|
""" |
1168
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-01', 'CL') |
1169
|
|
|
cl_feb = FutureChain(self.asset_finder, lambda: '2005-12-01', 'CL', |
1170
|
|
|
as_of_date=pd.Timestamp('2006-02-01', tz='UTC')) |
1171
|
|
|
|
1172
|
|
|
# The default chain should not include the as of date. |
1173
|
|
|
self.assertEqual(repr(cl), "FutureChain(root_symbol='CL')") |
1174
|
|
|
|
1175
|
|
|
# An explicit as of date should show up in the repr. |
1176
|
|
|
self.assertEqual( |
1177
|
|
|
repr(cl_feb), |
1178
|
|
|
("FutureChain(root_symbol='CL', " |
1179
|
|
|
"as_of_date='2006-02-01 00:00:00+00:00')") |
1180
|
|
|
) |
1181
|
|
|
|
1182
|
|
|
def test_as_of(self): |
1183
|
|
|
""" Test the as_of method of FutureChain. |
1184
|
|
|
""" |
1185
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-01', 'CL') |
1186
|
|
|
|
1187
|
|
|
# Test that the as_of_date is set correctly to the future |
1188
|
|
|
feb = pd.Timestamp('2006-02-01', tz='UTC') |
1189
|
|
|
cl_feb = cl.as_of(feb) |
1190
|
|
|
self.assertEqual( |
1191
|
|
|
cl_feb.as_of_date, |
1192
|
|
|
pd.Timestamp(feb, tz='UTC') |
1193
|
|
|
) |
1194
|
|
|
|
1195
|
|
|
# Test that the as_of_date is set correctly to the past, with |
1196
|
|
|
# args of str, datetime.datetime, and pd.Timestamp. |
1197
|
|
|
feb_prev = pd.Timestamp('2005-02-01', tz='UTC') |
1198
|
|
|
cl_feb_prev = cl.as_of(feb_prev) |
1199
|
|
|
self.assertEqual( |
1200
|
|
|
cl_feb_prev.as_of_date, |
1201
|
|
|
pd.Timestamp(feb_prev, tz='UTC') |
1202
|
|
|
) |
1203
|
|
|
|
1204
|
|
|
feb_prev = pd.Timestamp(datetime(year=2005, month=2, day=1), tz='UTC') |
1205
|
|
|
cl_feb_prev = cl.as_of(feb_prev) |
1206
|
|
|
self.assertEqual( |
1207
|
|
|
cl_feb_prev.as_of_date, |
1208
|
|
|
pd.Timestamp(feb_prev, tz='UTC') |
1209
|
|
|
) |
1210
|
|
|
|
1211
|
|
|
feb_prev = pd.Timestamp('2005-02-01', tz='UTC') |
1212
|
|
|
cl_feb_prev = cl.as_of(feb_prev) |
1213
|
|
|
self.assertEqual( |
1214
|
|
|
cl_feb_prev.as_of_date, |
1215
|
|
|
pd.Timestamp(feb_prev, tz='UTC') |
1216
|
|
|
) |
1217
|
|
|
|
1218
|
|
|
# Test that the as_of() method works with str args |
1219
|
|
|
feb_str = '2006-02-01' |
1220
|
|
|
cl_feb = cl.as_of(feb_str) |
1221
|
|
|
self.assertEqual( |
1222
|
|
|
cl_feb.as_of_date, |
1223
|
|
|
pd.Timestamp(feb, tz='UTC') |
1224
|
|
|
) |
1225
|
|
|
|
1226
|
|
|
# The chain as of the current dt should always be the same as |
1227
|
|
|
# the defualt chain. |
1228
|
|
|
self.assertEqual(cl[0], cl.as_of(pd.Timestamp('2005-12-01'))[0]) |
1229
|
|
|
|
1230
|
|
|
def test_offset(self): |
1231
|
|
|
""" Test the offset method of FutureChain. |
1232
|
|
|
""" |
1233
|
|
|
cl = FutureChain(self.asset_finder, lambda: '2005-12-01', 'CL') |
1234
|
|
|
|
1235
|
|
|
# Test that an offset forward sets as_of_date as expected |
1236
|
|
|
self.assertEqual( |
1237
|
|
|
cl.offset('3 days').as_of_date, |
1238
|
|
|
cl.as_of_date + pd.Timedelta(days=3) |
1239
|
|
|
) |
1240
|
|
|
|
1241
|
|
|
# Test that an offset backward sets as_of_date as expected, with |
1242
|
|
|
# time delta given as str, datetime.timedelta, and pd.Timedelta. |
1243
|
|
|
self.assertEqual( |
1244
|
|
|
cl.offset('-1000 days').as_of_date, |
1245
|
|
|
cl.as_of_date + pd.Timedelta(days=-1000) |
1246
|
|
|
) |
1247
|
|
|
self.assertEqual( |
1248
|
|
|
cl.offset(timedelta(days=-1000)).as_of_date, |
1249
|
|
|
cl.as_of_date + pd.Timedelta(days=-1000) |
1250
|
|
|
) |
1251
|
|
|
self.assertEqual( |
1252
|
|
|
cl.offset(pd.Timedelta('-1000 days')).as_of_date, |
1253
|
|
|
cl.as_of_date + pd.Timedelta(days=-1000) |
1254
|
|
|
) |
1255
|
|
|
|
1256
|
|
|
# An offset of zero should give the original chain. |
1257
|
|
|
self.assertEqual(cl[0], cl.offset(0)[0]) |
1258
|
|
|
self.assertEqual(cl[0], cl.offset("0 days")[0]) |
1259
|
|
|
|
1260
|
|
|
# A string that doesn't represent a time delta should raise a |
1261
|
|
|
# ValueError. |
1262
|
|
|
with self.assertRaises(ValueError): |
1263
|
|
|
cl.offset("blah") |
1264
|
|
|
|
1265
|
|
|
def test_cme_code_to_month(self): |
1266
|
|
|
codes = { |
1267
|
|
|
'F': 1, # January |
1268
|
|
|
'G': 2, # February |
1269
|
|
|
'H': 3, # March |
1270
|
|
|
'J': 4, # April |
1271
|
|
|
'K': 5, # May |
1272
|
|
|
'M': 6, # June |
1273
|
|
|
'N': 7, # July |
1274
|
|
|
'Q': 8, # August |
1275
|
|
|
'U': 9, # September |
1276
|
|
|
'V': 10, # October |
1277
|
|
|
'X': 11, # November |
1278
|
|
|
'Z': 12 # December |
1279
|
|
|
} |
1280
|
|
|
for key in codes: |
1281
|
|
|
self.assertEqual(codes[key], cme_code_to_month(key)) |
1282
|
|
|
|
1283
|
|
|
def test_month_to_cme_code(self): |
1284
|
|
|
codes = { |
1285
|
|
|
1: 'F', # January |
1286
|
|
|
2: 'G', # February |
1287
|
|
|
3: 'H', # March |
1288
|
|
|
4: 'J', # April |
1289
|
|
|
5: 'K', # May |
1290
|
|
|
6: 'M', # June |
1291
|
|
|
7: 'N', # July |
1292
|
|
|
8: 'Q', # August |
1293
|
|
|
9: 'U', # September |
1294
|
|
|
10: 'V', # October |
1295
|
|
|
11: 'X', # November |
1296
|
|
|
12: 'Z', # December |
1297
|
|
|
} |
1298
|
|
|
for key in codes: |
1299
|
|
|
self.assertEqual(codes[key], month_to_cme_code(key)) |
1300
|
|
|
|
1301
|
|
|
|
1302
|
|
|
class TestAssetDBVersioning(TestCase): |
1303
|
|
|
|
1304
|
|
|
def test_check_version(self): |
1305
|
|
|
env = TradingEnvironment(load=noop_load) |
1306
|
|
|
version_table = env.asset_finder.version_info |
1307
|
|
|
|
1308
|
|
|
# This should not raise an error |
1309
|
|
|
check_version_info(version_table, ASSET_DB_VERSION) |
1310
|
|
|
|
1311
|
|
|
# This should fail because the version is too low |
1312
|
|
|
with self.assertRaises(AssetDBVersionError): |
1313
|
|
|
check_version_info(version_table, ASSET_DB_VERSION - 1) |
1314
|
|
|
|
1315
|
|
|
# This should fail because the version is too high |
1316
|
|
|
with self.assertRaises(AssetDBVersionError): |
1317
|
|
|
check_version_info(version_table, ASSET_DB_VERSION + 1) |
1318
|
|
|
|
1319
|
|
|
def test_write_version(self): |
1320
|
|
|
env = TradingEnvironment(load=noop_load) |
1321
|
|
|
metadata = sa.MetaData(bind=env.engine) |
1322
|
|
|
version_table = _version_table_schema(metadata) |
1323
|
|
|
version_table.delete().execute() |
1324
|
|
|
|
1325
|
|
|
# Assert that the version is not present in the table |
1326
|
|
|
self.assertIsNone(sa.select((version_table.c.version,)).scalar()) |
1327
|
|
|
|
1328
|
|
|
# This should fail because the table has no version info and is, |
1329
|
|
|
# therefore, consdered v0 |
1330
|
|
|
with self.assertRaises(AssetDBVersionError): |
1331
|
|
|
check_version_info(version_table, -2) |
1332
|
|
|
|
1333
|
|
|
# This should not raise an error because the version has been written |
1334
|
|
|
write_version_info(version_table, -2) |
1335
|
|
|
check_version_info(version_table, -2) |
1336
|
|
|
|
1337
|
|
|
# Assert that the version is in the table and correct |
1338
|
|
|
self.assertEqual(sa.select((version_table.c.version,)).scalar(), -2) |
1339
|
|
|
|
1340
|
|
|
# Assert that trying to overwrite the version fails |
1341
|
|
|
with self.assertRaises(sa.exc.IntegrityError): |
1342
|
|
|
write_version_info(version_table, -3) |
1343
|
|
|
|
1344
|
|
|
def test_finder_checks_version(self): |
1345
|
|
|
# Create an env and give it a bogus version number |
1346
|
|
|
env = TradingEnvironment(load=noop_load) |
1347
|
|
|
metadata = sa.MetaData(bind=env.engine) |
1348
|
|
|
version_table = _version_table_schema(metadata) |
1349
|
|
|
version_table.delete().execute() |
1350
|
|
|
write_version_info(version_table, -2) |
1351
|
|
|
check_version_info(version_table, -2) |
1352
|
|
|
|
1353
|
|
|
# Assert that trying to build a finder with a bad db raises an error |
1354
|
|
|
with self.assertRaises(AssetDBVersionError): |
1355
|
|
|
AssetFinder(engine=env.engine) |
1356
|
|
|
|
1357
|
|
|
# Change the version number of the db to the correct version |
1358
|
|
|
version_table.delete().execute() |
1359
|
|
|
write_version_info(version_table, ASSET_DB_VERSION) |
1360
|
|
|
check_version_info(version_table, ASSET_DB_VERSION) |
1361
|
|
|
|
1362
|
|
|
# Now that the versions match, this Finder should succeed |
1363
|
|
|
AssetFinder(engine=env.engine) |
1364
|
|
|
|
1365
|
|
|
def test_downgrade(self): |
1366
|
|
|
# Attempt to downgrade a current assets db all the way down to v0 |
1367
|
|
|
env = TradingEnvironment(load=noop_load) |
1368
|
|
|
conn = env.engine.connect() |
1369
|
|
|
downgrade(env.engine, 0) |
1370
|
|
|
|
1371
|
|
|
# Verify that the db version is now 0 |
1372
|
|
|
metadata = sa.MetaData(conn) |
1373
|
|
|
metadata.reflect(bind=env.engine) |
1374
|
|
|
version_table = metadata.tables['version_info'] |
1375
|
|
|
check_version_info(version_table, 0) |
1376
|
|
|
|
1377
|
|
|
# Check some of the v1-to-v0 downgrades |
1378
|
|
|
self.assertTrue('futures_contracts' in metadata.tables) |
1379
|
|
|
self.assertTrue('version_info' in metadata.tables) |
1380
|
|
|
self.assertFalse(metadata.tables['futures_contracts']\ |
1381
|
|
|
.columns.has_key('tick_size')) |
1382
|
|
|
self.assertTrue(metadata.tables['futures_contracts']\ |
1383
|
|
|
.columns.has_key('contract_multiplier')) |
1384
|
|
|
|
1385
|
|
|
def test_impossible_downgrade(self): |
1386
|
|
|
# Attempt to downgrade a current assets db to a |
1387
|
|
|
# higher-than-current version |
1388
|
|
|
env = TradingEnvironment(load=noop_load) |
1389
|
|
|
with self.assertRaises(AssetDBImpossibleDowngrade): |
1390
|
|
|
downgrade(env.engine, ASSET_DB_VERSION + 5) |
1391
|
|
|
|