1
|
|
|
# |
2
|
|
|
# Copyright 2013 Quantopian, Inc. |
3
|
|
|
# |
4
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
5
|
|
|
# you may not use this file except in compliance with the License. |
6
|
|
|
# You may obtain a copy of the License at |
7
|
|
|
# |
8
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
9
|
|
|
# |
10
|
|
|
# Unless required by applicable law or agreed to in writing, software |
11
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
12
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13
|
|
|
# See the License for the specific language governing permissions and |
14
|
|
|
# limitations under the License. |
15
|
|
|
|
16
|
|
|
from __future__ import division |
17
|
|
|
|
18
|
|
|
from datetime import ( |
19
|
|
|
datetime, |
20
|
|
|
timedelta, |
21
|
|
|
) |
22
|
|
|
import logging |
23
|
|
|
|
24
|
|
|
from testfixtures import TempDirectory |
25
|
|
|
import unittest |
26
|
|
|
import nose.tools as nt |
27
|
|
|
import pytz |
28
|
|
|
|
29
|
|
|
import pandas as pd |
30
|
|
|
import numpy as np |
31
|
|
|
from six.moves import range, zip |
32
|
|
|
|
33
|
|
|
from zipline.data.us_equity_pricing import ( |
34
|
|
|
SQLiteAdjustmentWriter, |
35
|
|
|
SQLiteAdjustmentReader, |
36
|
|
|
) |
37
|
|
|
import zipline.utils.factory as factory |
38
|
|
|
import zipline.finance.performance as perf |
39
|
|
|
from zipline.finance.transaction import create_transaction |
40
|
|
|
import zipline.utils.math_utils as zp_math |
41
|
|
|
|
42
|
|
|
from zipline.finance.blotter import Order |
43
|
|
|
from zipline.finance.commission import PerShare, PerTrade, PerDollar |
44
|
|
|
from zipline.finance.trading import TradingEnvironment |
45
|
|
|
from zipline.pipeline.loaders.synthetic import NullAdjustmentReader |
46
|
|
|
from zipline.utils.factory import create_simulation_parameters |
47
|
|
|
from zipline.utils.serialization_utils import ( |
48
|
|
|
loads_with_persistent_ids, dumps_with_persistent_ids |
49
|
|
|
) |
50
|
|
|
import zipline.protocol as zp |
51
|
|
|
from zipline.protocol import Event |
52
|
|
|
from zipline.utils.test_utils import create_data_portal_from_trade_history |
53
|
|
|
|
54
|
|
|
logger = logging.getLogger('Test Perf Tracking') |
55
|
|
|
|
56
|
|
|
onesec = timedelta(seconds=1) |
57
|
|
|
oneday = timedelta(days=1) |
58
|
|
|
tradingday = timedelta(hours=6, minutes=30) |
59
|
|
|
|
60
|
|
|
# nose.tools changed name in python 3 |
61
|
|
|
if not hasattr(nt, 'assert_count_equal'): |
62
|
|
|
nt.assert_count_equal = nt.assert_items_equal |
63
|
|
|
|
64
|
|
|
|
65
|
|
|
def check_perf_period(pp, |
66
|
|
|
gross_leverage, |
67
|
|
|
net_leverage, |
68
|
|
|
long_exposure, |
69
|
|
|
longs_count, |
70
|
|
|
short_exposure, |
71
|
|
|
shorts_count): |
72
|
|
|
|
73
|
|
|
perf_data = pp.to_dict() |
74
|
|
|
np.testing.assert_allclose( |
75
|
|
|
gross_leverage, perf_data['gross_leverage'], rtol=1e-3) |
76
|
|
|
np.testing.assert_allclose( |
77
|
|
|
net_leverage, perf_data['net_leverage'], rtol=1e-3) |
78
|
|
|
np.testing.assert_allclose( |
79
|
|
|
long_exposure, perf_data['long_exposure'], rtol=1e-3) |
80
|
|
|
np.testing.assert_allclose( |
81
|
|
|
longs_count, perf_data['longs_count'], rtol=1e-3) |
82
|
|
|
np.testing.assert_allclose( |
83
|
|
|
short_exposure, perf_data['short_exposure'], rtol=1e-3) |
84
|
|
|
np.testing.assert_allclose( |
85
|
|
|
shorts_count, perf_data['shorts_count'], rtol=1e-3) |
86
|
|
|
|
87
|
|
|
|
88
|
|
|
def check_account(account, |
89
|
|
|
settled_cash, |
90
|
|
|
equity_with_loan, |
91
|
|
|
total_positions_value, |
92
|
|
|
regt_equity, |
93
|
|
|
available_funds, |
94
|
|
|
excess_liquidity, |
95
|
|
|
cushion, |
96
|
|
|
leverage, |
97
|
|
|
net_leverage, |
98
|
|
|
net_liquidation): |
99
|
|
|
# this is a long only portfolio that is only partially invested |
100
|
|
|
# so net and gross leverage are equal. |
101
|
|
|
|
102
|
|
|
np.testing.assert_allclose(settled_cash, |
103
|
|
|
account['settled_cash'], rtol=1e-3) |
104
|
|
|
np.testing.assert_allclose(equity_with_loan, |
105
|
|
|
account['equity_with_loan'], rtol=1e-3) |
106
|
|
|
np.testing.assert_allclose(total_positions_value, |
107
|
|
|
account['total_positions_value'], rtol=1e-3) |
108
|
|
|
np.testing.assert_allclose(regt_equity, |
109
|
|
|
account['regt_equity'], rtol=1e-3) |
110
|
|
|
np.testing.assert_allclose(available_funds, |
111
|
|
|
account['available_funds'], rtol=1e-3) |
112
|
|
|
np.testing.assert_allclose(excess_liquidity, |
113
|
|
|
account['excess_liquidity'], rtol=1e-3) |
114
|
|
|
np.testing.assert_allclose(cushion, |
115
|
|
|
account['cushion'], rtol=1e-3) |
116
|
|
|
np.testing.assert_allclose(leverage, account['leverage'], rtol=1e-3) |
117
|
|
|
np.testing.assert_allclose(net_leverage, |
118
|
|
|
account['net_leverage'], rtol=1e-3) |
119
|
|
|
np.testing.assert_allclose(net_liquidation, |
120
|
|
|
account['net_liquidation'], rtol=1e-3) |
121
|
|
|
|
122
|
|
|
|
123
|
|
|
def create_txn(sid, dt, price, amount): |
124
|
|
|
""" |
125
|
|
|
Create a fake transaction to be filled and processed prior to the execution |
126
|
|
|
of a given trade event. |
127
|
|
|
""" |
128
|
|
|
mock_order = Order(dt, sid, amount, id=None) |
129
|
|
|
return create_transaction(sid, dt, mock_order, price, amount) |
130
|
|
|
|
131
|
|
|
|
132
|
|
|
def benchmark_events_in_range(sim_params, env): |
133
|
|
|
return [ |
134
|
|
|
Event({'dt': dt, |
135
|
|
|
'returns': ret, |
136
|
|
|
'type': zp.DATASOURCE_TYPE.BENCHMARK, |
137
|
|
|
# We explicitly rely on the behavior that benchmarks sort before |
138
|
|
|
# any other events. |
139
|
|
|
'source_id': '1Abenchmarks'}) |
140
|
|
|
for dt, ret in env.benchmark_returns.iteritems() |
141
|
|
|
if dt.date() >= sim_params.period_start.date() and |
142
|
|
|
dt.date() <= sim_params.period_end.date() |
143
|
|
|
] |
144
|
|
|
|
145
|
|
|
|
146
|
|
|
def calculate_results(sim_params, |
147
|
|
|
env, |
148
|
|
|
tempdir, |
149
|
|
|
benchmark_events, |
150
|
|
|
trade_events, |
151
|
|
|
adjustment_reader, |
152
|
|
|
splits=None, |
153
|
|
|
txns=None, |
154
|
|
|
commissions=None): |
155
|
|
|
""" |
156
|
|
|
Run the given events through a stripped down version of the loop in |
157
|
|
|
AlgorithmSimulator.transform. |
158
|
|
|
|
159
|
|
|
IMPORTANT NOTE FOR TEST WRITERS/READERS: |
160
|
|
|
|
161
|
|
|
This loop has some wonky logic for the order of event processing for |
162
|
|
|
datasource types. This exists mostly to accomodate legacy tests accomodate |
163
|
|
|
existing tests that were making assumptions about how events would be |
164
|
|
|
sorted. |
165
|
|
|
|
166
|
|
|
In particular: |
167
|
|
|
|
168
|
|
|
- Dividends passed for a given date are processed PRIOR to any events |
169
|
|
|
for that date. |
170
|
|
|
- Splits passed for a given date are process AFTER any events for that |
171
|
|
|
date. |
172
|
|
|
|
173
|
|
|
Tests that use this helper should not be considered useful guarantees of |
174
|
|
|
the behavior of AlgorithmSimulator on a stream containing the same events |
175
|
|
|
unless the subgroups have been explicitly re-sorted in this way. |
176
|
|
|
""" |
177
|
|
|
|
178
|
|
|
txns = txns or [] |
179
|
|
|
splits = splits or {} |
180
|
|
|
commissions = commissions or {} |
181
|
|
|
|
182
|
|
|
adjustment_reader = adjustment_reader or NullAdjustmentReader() |
183
|
|
|
|
184
|
|
|
data_portal = create_data_portal_from_trade_history( |
185
|
|
|
env, |
186
|
|
|
tempdir, |
187
|
|
|
sim_params, |
188
|
|
|
trade_events, |
189
|
|
|
) |
190
|
|
|
data_portal._adjustment_reader = adjustment_reader |
191
|
|
|
|
192
|
|
|
perf_tracker = perf.PerformanceTracker(sim_params, env, data_portal) |
193
|
|
|
|
194
|
|
|
results = [] |
195
|
|
|
|
196
|
|
|
for date in sim_params.trading_days: |
197
|
|
|
|
198
|
|
|
for txn in filter(lambda txn: txn.dt == date, txns): |
199
|
|
|
# Process txns for this date. |
200
|
|
|
perf_tracker.process_transaction(txn) |
201
|
|
|
|
202
|
|
|
try: |
203
|
|
|
commissions_for_date = commissions[date] |
204
|
|
|
for comm in commissions_for_date: |
205
|
|
|
perf_tracker.process_commission(comm) |
206
|
|
|
except KeyError: |
207
|
|
|
pass |
208
|
|
|
|
209
|
|
|
try: |
210
|
|
|
splits_for_date = splits[date] |
211
|
|
|
perf_tracker.handle_splits(splits_for_date) |
212
|
|
|
except KeyError: |
213
|
|
|
pass |
214
|
|
|
|
215
|
|
|
msg = perf_tracker.handle_market_close_daily(date) |
216
|
|
|
msg['account'] = perf_tracker.get_account(True, date) |
217
|
|
|
results.append(msg) |
218
|
|
|
return results |
219
|
|
|
|
220
|
|
|
|
221
|
|
|
def check_perf_tracker_serialization(perf_tracker): |
222
|
|
|
scalar_keys = [ |
223
|
|
|
'emission_rate', |
224
|
|
|
'txn_count', |
225
|
|
|
'market_open', |
226
|
|
|
'last_close', |
227
|
|
|
'period_start', |
228
|
|
|
'day_count', |
229
|
|
|
'capital_base', |
230
|
|
|
'market_close', |
231
|
|
|
'saved_dt', |
232
|
|
|
'period_end', |
233
|
|
|
'total_days', |
234
|
|
|
] |
235
|
|
|
p_string = dumps_with_persistent_ids(perf_tracker) |
236
|
|
|
|
237
|
|
|
test = loads_with_persistent_ids(p_string, env=perf_tracker.env) |
238
|
|
|
|
239
|
|
|
for k in scalar_keys: |
240
|
|
|
nt.assert_equal(getattr(test, k), getattr(perf_tracker, k), k) |
241
|
|
|
|
242
|
|
|
perf_periods = ( |
243
|
|
|
test.cumulative_performance, |
244
|
|
|
test.todays_performance |
245
|
|
|
) |
246
|
|
|
for period in perf_periods: |
247
|
|
|
nt.assert_true(hasattr(period, '_position_tracker')) |
248
|
|
|
|
249
|
|
|
|
250
|
|
|
def setup_env_data(env, sim_params, sids): |
251
|
|
|
data = {} |
252
|
|
|
for sid in sids: |
253
|
|
|
data[sid] = { |
254
|
|
|
"start_date": sim_params.trading_days[0], |
255
|
|
|
"end_date": sim_params.trading_days[-1] |
256
|
|
|
} |
257
|
|
|
|
258
|
|
|
env.write_data(equities_data=data) |
259
|
|
|
|
260
|
|
|
|
261
|
|
|
class TestSplitPerformance(unittest.TestCase): |
262
|
|
|
@classmethod |
263
|
|
|
def setUpClass(cls): |
264
|
|
|
cls.env = TradingEnvironment() |
265
|
|
|
cls.sim_params = create_simulation_parameters(num_days=2, |
266
|
|
|
capital_base=10e3) |
267
|
|
|
|
268
|
|
|
setup_env_data(cls.env, cls.sim_params, [1]) |
269
|
|
|
|
270
|
|
|
cls.benchmark_events = benchmark_events_in_range(cls.sim_params, |
271
|
|
|
cls.env) |
272
|
|
|
cls.tempdir = TempDirectory() |
273
|
|
|
|
274
|
|
|
@classmethod |
275
|
|
|
def tearDownClass(cls): |
276
|
|
|
cls.tempdir.cleanup() |
277
|
|
|
|
278
|
|
|
def test_split_long_position(self): |
279
|
|
|
events = factory.create_trade_history( |
280
|
|
|
1, |
281
|
|
|
# TODO: Should we provide adjusted prices in the tests, or provide |
282
|
|
|
# raw prices and adjust via DataPortal? |
283
|
|
|
[20, 60], |
284
|
|
|
[100, 100], |
285
|
|
|
oneday, |
286
|
|
|
self.sim_params, |
287
|
|
|
env=self.env |
288
|
|
|
) |
289
|
|
|
|
290
|
|
|
# set up a long position in sid 1 |
291
|
|
|
# 100 shares at $20 apiece = $2000 position |
292
|
|
|
txns = [create_txn(events[0].sid, events[0].dt, 20, 100)] |
293
|
|
|
|
294
|
|
|
# set up a split with ratio 3 occurring at the start of the second |
295
|
|
|
# day. |
296
|
|
|
splits = { |
297
|
|
|
events[1].dt: [(1, 3)] |
298
|
|
|
} |
299
|
|
|
|
300
|
|
|
results = calculate_results(self.sim_params, self.env, |
301
|
|
|
self.tempdir, |
302
|
|
|
self.benchmark_events, |
303
|
|
|
{1: events}, |
304
|
|
|
NullAdjustmentReader(), |
305
|
|
|
txns=txns, splits=splits) |
306
|
|
|
|
307
|
|
|
# should have 33 shares (at $60 apiece) and $20 in cash |
308
|
|
|
self.assertEqual(2, len(results)) |
309
|
|
|
|
310
|
|
|
latest_positions = results[1]['daily_perf']['positions'] |
311
|
|
|
self.assertEqual(1, len(latest_positions)) |
312
|
|
|
|
313
|
|
|
# check the last position to make sure it's been updated |
314
|
|
|
position = latest_positions[0] |
315
|
|
|
|
316
|
|
|
self.assertEqual(1, position['sid']) |
317
|
|
|
self.assertEqual(33, position['amount']) |
318
|
|
|
self.assertEqual(60, position['cost_basis']) |
319
|
|
|
self.assertEqual(60, position['last_sale_price']) |
320
|
|
|
|
321
|
|
|
# since we started with $10000, and we spent $2000 on the |
322
|
|
|
# position, but then got $20 back, we should have $8020 |
323
|
|
|
# (or close to it) in cash. |
324
|
|
|
|
325
|
|
|
# we won't get exactly 8020 because sometimes a split is |
326
|
|
|
# denoted as a ratio like 0.3333, and we lose some digits |
327
|
|
|
# of precision. thus, make sure we're pretty close. |
328
|
|
|
daily_perf = results[1]['daily_perf'] |
329
|
|
|
|
330
|
|
|
self.assertTrue( |
331
|
|
|
zp_math.tolerant_equals(8020, |
332
|
|
|
daily_perf['ending_cash'], 1), |
333
|
|
|
"ending_cash was {0}".format(daily_perf['ending_cash'])) |
334
|
|
|
|
335
|
|
|
# Validate that the account attributes were updated. |
336
|
|
|
account = results[1]['account'] |
337
|
|
|
self.assertEqual(float('inf'), account['day_trades_remaining']) |
338
|
|
|
# this is a long only portfolio that is only partially invested |
339
|
|
|
# so net and gross leverage are equal. |
340
|
|
|
np.testing.assert_allclose(0.198, account['leverage'], rtol=1e-3) |
341
|
|
|
np.testing.assert_allclose(0.198, account['net_leverage'], rtol=1e-3) |
342
|
|
|
np.testing.assert_allclose(8020, account['regt_equity'], rtol=1e-3) |
343
|
|
|
self.assertEqual(float('inf'), account['regt_margin']) |
344
|
|
|
np.testing.assert_allclose(8020, account['available_funds'], rtol=1e-3) |
345
|
|
|
self.assertEqual(0, account['maintenance_margin_requirement']) |
346
|
|
|
np.testing.assert_allclose(10000, |
347
|
|
|
account['equity_with_loan'], rtol=1e-3) |
348
|
|
|
self.assertEqual(float('inf'), account['buying_power']) |
349
|
|
|
self.assertEqual(0, account['initial_margin_requirement']) |
350
|
|
|
np.testing.assert_allclose(8020, account['excess_liquidity'], |
351
|
|
|
rtol=1e-3) |
352
|
|
|
np.testing.assert_allclose(8020, account['settled_cash'], rtol=1e-3) |
353
|
|
|
np.testing.assert_allclose(10000, account['net_liquidation'], |
354
|
|
|
rtol=1e-3) |
355
|
|
|
np.testing.assert_allclose(0.802, account['cushion'], rtol=1e-3) |
356
|
|
|
np.testing.assert_allclose(1980, account['total_positions_value'], |
357
|
|
|
rtol=1e-3) |
358
|
|
|
self.assertEqual(0, account['accrued_interest']) |
359
|
|
|
|
360
|
|
|
for i, result in enumerate(results): |
361
|
|
|
for perf_kind in ('daily_perf', 'cumulative_perf'): |
362
|
|
|
perf_result = result[perf_kind] |
363
|
|
|
# prices aren't changing, so pnl and returns should be 0.0 |
364
|
|
|
self.assertEqual(0.0, perf_result['pnl'], |
365
|
|
|
"day %s %s pnl %s instead of 0.0" % |
366
|
|
|
(i, perf_kind, perf_result['pnl'])) |
367
|
|
|
self.assertEqual(0.0, perf_result['returns'], |
368
|
|
|
"day %s %s returns %s instead of 0.0" % |
369
|
|
|
(i, perf_kind, perf_result['returns'])) |
370
|
|
|
|
371
|
|
|
|
372
|
|
|
class TestCommissionEvents(unittest.TestCase): |
373
|
|
|
@classmethod |
374
|
|
|
def setUpClass(cls): |
375
|
|
|
cls.env = TradingEnvironment() |
376
|
|
|
cls.sim_params = create_simulation_parameters(num_days=5, |
377
|
|
|
capital_base=10e3) |
378
|
|
|
setup_env_data(cls.env, cls.sim_params, [0, 1, 133]) |
379
|
|
|
|
380
|
|
|
cls.benchmark_events = benchmark_events_in_range(cls.sim_params, |
381
|
|
|
cls.env) |
382
|
|
|
cls.tempdir = TempDirectory() |
383
|
|
|
|
384
|
|
|
@classmethod |
385
|
|
|
def tearDownClass(cls): |
386
|
|
|
cls.tempdir.cleanup() |
387
|
|
|
|
388
|
|
|
def test_commission_event(self): |
389
|
|
|
trade_events = factory.create_trade_history( |
390
|
|
|
1, |
391
|
|
|
[10, 10, 10, 10, 10], |
392
|
|
|
[100, 100, 100, 100, 100], |
393
|
|
|
oneday, |
394
|
|
|
self.sim_params, |
395
|
|
|
env=self.env |
396
|
|
|
) |
397
|
|
|
|
398
|
|
|
# Test commission models and validate result |
399
|
|
|
# Expected commission amounts: |
400
|
|
|
# PerShare commission: 1.00, 1.00, 1.50 = $3.50 |
401
|
|
|
# PerTrade commission: 5.00, 5.00, 5.00 = $15.00 |
402
|
|
|
# PerDollar commission: 1.50, 3.00, 4.50 = $9.00 |
403
|
|
|
# Total commission = $3.50 + $15.00 + $9.00 = $27.50 |
404
|
|
|
|
405
|
|
|
# Create 3 transactions: 50, 100, 150 shares traded @ $20 |
406
|
|
|
first_trade = trade_events[0] |
407
|
|
|
transactions = [create_txn(first_trade.sid, first_trade.dt, 20, i) |
408
|
|
|
for i in [50, 100, 150]] |
409
|
|
|
|
410
|
|
|
# Create commission models and validate that produce expected |
411
|
|
|
# commissions. |
412
|
|
|
models = [PerShare(cost=0.01, min_trade_cost=1.00), |
413
|
|
|
PerTrade(cost=5.00), |
414
|
|
|
PerDollar(cost=0.0015)] |
415
|
|
|
expected_results = [3.50, 15.0, 9.0] |
416
|
|
|
|
417
|
|
|
for model, expected in zip(models, expected_results): |
418
|
|
|
total_commission = 0 |
419
|
|
|
for trade in transactions: |
420
|
|
|
total_commission += model.calculate(trade)[1] |
421
|
|
|
self.assertEqual(total_commission, expected) |
422
|
|
|
|
423
|
|
|
# Verify that commission events are handled correctly by |
424
|
|
|
# PerformanceTracker. |
425
|
|
|
commissions = {} |
426
|
|
|
cash_adj_dt = trade_events[0].dt |
427
|
|
|
cash_adjustment = factory.create_commission(1, 300.0, cash_adj_dt) |
428
|
|
|
commissions[cash_adj_dt] = [cash_adjustment] |
429
|
|
|
|
430
|
|
|
# Insert a purchase order. |
431
|
|
|
txns = [create_txn(1, cash_adj_dt, 20, 1)] |
432
|
|
|
results = calculate_results(self.sim_params, |
433
|
|
|
self.env, |
434
|
|
|
self.tempdir, |
435
|
|
|
self.benchmark_events, |
436
|
|
|
{1: trade_events}, |
437
|
|
|
NullAdjustmentReader(), |
438
|
|
|
txns=txns, |
439
|
|
|
commissions=commissions) |
440
|
|
|
|
441
|
|
|
# Validate that we lost 320 dollars from our cash pool. |
442
|
|
|
self.assertEqual(results[-1]['cumulative_perf']['ending_cash'], |
443
|
|
|
9680, "Should have lost 320 from cash pool.") |
444
|
|
|
# Validate that the cost basis of our position changed. |
445
|
|
|
self.assertEqual(results[-1]['daily_perf']['positions'] |
446
|
|
|
[0]['cost_basis'], 320.0) |
447
|
|
|
# Validate that the account attributes were updated. |
448
|
|
|
account = results[1]['account'] |
449
|
|
|
self.assertEqual(float('inf'), account['day_trades_remaining']) |
450
|
|
|
np.testing.assert_allclose(0.001, account['leverage'], rtol=1e-3, |
451
|
|
|
atol=1e-4) |
452
|
|
|
np.testing.assert_allclose(9680, account['regt_equity'], rtol=1e-3) |
453
|
|
|
self.assertEqual(float('inf'), account['regt_margin']) |
454
|
|
|
np.testing.assert_allclose(9680, account['available_funds'], |
455
|
|
|
rtol=1e-3) |
456
|
|
|
self.assertEqual(0, account['maintenance_margin_requirement']) |
457
|
|
|
np.testing.assert_allclose(9690, |
458
|
|
|
account['equity_with_loan'], rtol=1e-3) |
459
|
|
|
self.assertEqual(float('inf'), account['buying_power']) |
460
|
|
|
self.assertEqual(0, account['initial_margin_requirement']) |
461
|
|
|
np.testing.assert_allclose(9680, account['excess_liquidity'], |
462
|
|
|
rtol=1e-3) |
463
|
|
|
np.testing.assert_allclose(9680, account['settled_cash'], |
464
|
|
|
rtol=1e-3) |
465
|
|
|
np.testing.assert_allclose(9690, account['net_liquidation'], |
466
|
|
|
rtol=1e-3) |
467
|
|
|
np.testing.assert_allclose(0.999, account['cushion'], rtol=1e-3) |
468
|
|
|
np.testing.assert_allclose(10, account['total_positions_value'], |
469
|
|
|
rtol=1e-3) |
470
|
|
|
self.assertEqual(0, account['accrued_interest']) |
471
|
|
|
|
472
|
|
|
def test_commission_zero_position(self): |
473
|
|
|
""" |
474
|
|
|
Ensure no div-by-zero errors. |
475
|
|
|
""" |
476
|
|
|
events = factory.create_trade_history( |
477
|
|
|
1, |
478
|
|
|
[10, 10, 10, 10, 10], |
479
|
|
|
[100, 100, 100, 100, 100], |
480
|
|
|
oneday, |
481
|
|
|
self.sim_params, |
482
|
|
|
env=self.env |
483
|
|
|
) |
484
|
|
|
|
485
|
|
|
# Buy and sell the same sid so that we have a zero position by the |
486
|
|
|
# time of events[3]. |
487
|
|
|
txns = [ |
488
|
|
|
create_txn(events[0].sid, events[0].dt, 20, 1), |
489
|
|
|
create_txn(events[1].sid, events[1].dt, 20, -1), |
490
|
|
|
] |
491
|
|
|
|
492
|
|
|
# Add a cash adjustment at the time of event[3]. |
493
|
|
|
cash_adj_dt = events[3].dt |
494
|
|
|
commissions = {} |
495
|
|
|
cash_adjustment = factory.create_commission(1, 300.0, cash_adj_dt) |
496
|
|
|
commissions[cash_adj_dt] = [cash_adjustment] |
497
|
|
|
|
498
|
|
|
results = calculate_results(self.sim_params, |
499
|
|
|
self.env, |
500
|
|
|
self.tempdir, |
501
|
|
|
self.benchmark_events, |
502
|
|
|
{1: events}, |
503
|
|
|
NullAdjustmentReader(), |
504
|
|
|
txns=txns, |
505
|
|
|
commissions=commissions) |
506
|
|
|
# Validate that we lost 300 dollars from our cash pool. |
507
|
|
|
self.assertEqual(results[-1]['cumulative_perf']['ending_cash'], |
508
|
|
|
9700) |
509
|
|
|
|
510
|
|
|
def test_commission_no_position(self): |
511
|
|
|
""" |
512
|
|
|
Ensure no position-not-found or sid-not-found errors. |
513
|
|
|
""" |
514
|
|
|
events = factory.create_trade_history( |
515
|
|
|
1, |
516
|
|
|
[10, 10, 10, 10, 10], |
517
|
|
|
[100, 100, 100, 100, 100], |
518
|
|
|
oneday, |
519
|
|
|
self.sim_params, |
520
|
|
|
env=self.env |
521
|
|
|
) |
522
|
|
|
|
523
|
|
|
# Add a cash adjustment at the time of event[3]. |
524
|
|
|
cash_adj_dt = events[3].dt |
525
|
|
|
commissions = {} |
526
|
|
|
cash_adjustment = factory.create_commission(1, 300.0, cash_adj_dt) |
527
|
|
|
commissions[cash_adj_dt] = [cash_adjustment] |
528
|
|
|
|
529
|
|
|
results = calculate_results(self.sim_params, |
530
|
|
|
self.env, |
531
|
|
|
self.tempdir, |
532
|
|
|
self.benchmark_events, |
533
|
|
|
{1: events}, |
534
|
|
|
NullAdjustmentReader(), |
535
|
|
|
commissions=commissions) |
536
|
|
|
# Validate that we lost 300 dollars from our cash pool. |
537
|
|
|
self.assertEqual(results[-1]['cumulative_perf']['ending_cash'], |
538
|
|
|
9700) |
539
|
|
|
|
540
|
|
|
|
541
|
|
|
class MockDailyBarSpotReader(object): |
542
|
|
|
|
543
|
|
|
def spot_price(self, sid, day, colname): |
544
|
|
|
return 100.0 |
545
|
|
|
|
546
|
|
|
|
547
|
|
|
class TestDividendPerformance(unittest.TestCase): |
548
|
|
|
|
549
|
|
|
@classmethod |
550
|
|
|
def setUpClass(cls): |
551
|
|
|
cls.env = TradingEnvironment() |
552
|
|
|
cls.sim_params = create_simulation_parameters(num_days=6, |
553
|
|
|
capital_base=10e3) |
554
|
|
|
|
555
|
|
|
setup_env_data(cls.env, cls.sim_params, [1, 2]) |
556
|
|
|
|
557
|
|
|
cls.benchmark_events = benchmark_events_in_range(cls.sim_params, |
558
|
|
|
cls.env) |
559
|
|
|
|
560
|
|
|
@classmethod |
561
|
|
|
def tearDownClass(cls): |
562
|
|
|
del cls.env |
563
|
|
|
|
564
|
|
|
def setUp(self): |
565
|
|
|
self.tempdir = TempDirectory() |
566
|
|
|
|
567
|
|
|
def tearDown(self): |
568
|
|
|
self.tempdir.cleanup() |
569
|
|
|
|
570
|
|
|
def test_market_hours_calculations(self): |
571
|
|
|
# DST in US/Eastern began on Sunday March 14, 2010 |
572
|
|
|
before = datetime(2010, 3, 12, 14, 31, tzinfo=pytz.utc) |
573
|
|
|
after = factory.get_next_trading_dt( |
574
|
|
|
before, |
575
|
|
|
timedelta(days=1), |
576
|
|
|
self.env, |
577
|
|
|
) |
578
|
|
|
self.assertEqual(after.hour, 13) |
579
|
|
|
|
580
|
|
|
def test_long_position_receives_dividend(self): |
581
|
|
|
# post some trades in the market |
582
|
|
|
events = factory.create_trade_history( |
583
|
|
|
1, |
584
|
|
|
[10, 10, 10, 10, 10, 10], |
585
|
|
|
[100, 100, 100, 100, 100, 100], |
586
|
|
|
oneday, |
587
|
|
|
self.sim_params, |
588
|
|
|
env=self.env |
589
|
|
|
) |
590
|
|
|
|
591
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
592
|
|
|
|
593
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
594
|
|
|
MockDailyBarSpotReader()) |
595
|
|
|
splits = mergers = pd.DataFrame( |
596
|
|
|
{ |
597
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
598
|
|
|
'effective_date': np.array([], dtype=int), |
599
|
|
|
'ratio': np.array([], dtype=float), |
600
|
|
|
'sid': np.array([], dtype=int), |
601
|
|
|
}, |
602
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
603
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
604
|
|
|
) |
605
|
|
|
dividends = pd.DataFrame({ |
606
|
|
|
'sid': np.array([1], dtype=np.uint32), |
607
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
608
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
609
|
|
|
'ex_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
610
|
|
|
'record_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
611
|
|
|
'pay_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
612
|
|
|
}) |
613
|
|
|
writer.write(splits, mergers, dividends) |
614
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
615
|
|
|
|
616
|
|
|
# Simulate a transaction being filled prior to the ex_date. |
617
|
|
|
txns = [create_txn(events[0].sid, events[0].dt, 10.0, 100)] |
618
|
|
|
results = calculate_results( |
619
|
|
|
self.sim_params, |
620
|
|
|
self.env, |
621
|
|
|
self.tempdir, |
622
|
|
|
self.benchmark_events, |
623
|
|
|
{1: events}, |
624
|
|
|
adjustment_reader, |
625
|
|
|
txns=txns, |
626
|
|
|
) |
627
|
|
|
|
628
|
|
|
self.assertEqual(len(results), 6) |
629
|
|
|
cumulative_returns = \ |
630
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
631
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.1, 0.1, 0.1, 0.1]) |
632
|
|
|
daily_returns = [event['daily_perf']['returns'] |
633
|
|
|
for event in results] |
634
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.10, 0.0, 0.0, 0.0]) |
635
|
|
|
cash_flows = [event['daily_perf']['capital_used'] |
636
|
|
|
for event in results] |
637
|
|
|
self.assertEqual(cash_flows, [-1000, 0, 1000, 0, 0, 0]) |
638
|
|
|
cumulative_cash_flows = \ |
639
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
640
|
|
|
self.assertEqual(cumulative_cash_flows, [-1000, -1000, 0, 0, 0, 0]) |
641
|
|
|
cash_pos = \ |
642
|
|
|
[event['cumulative_perf']['ending_cash'] for event in results] |
643
|
|
|
self.assertEqual(cash_pos, [9000, 9000, 10000, 10000, 10000, 10000]) |
644
|
|
|
|
645
|
|
|
def test_long_position_receives_stock_dividend(self): |
646
|
|
|
# post some trades in the market |
647
|
|
|
events = {} |
648
|
|
|
for sid in (1, 2): |
649
|
|
|
events[sid] = factory.create_trade_history( |
650
|
|
|
sid, |
651
|
|
|
[10, 10, 10, 10, 10, 10], |
652
|
|
|
[100, 100, 100, 100, 100, 100], |
653
|
|
|
oneday, |
654
|
|
|
self.sim_params, |
655
|
|
|
env=self.env |
656
|
|
|
) |
657
|
|
|
|
658
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
659
|
|
|
|
660
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
661
|
|
|
MockDailyBarSpotReader()) |
662
|
|
|
splits = mergers = pd.DataFrame( |
663
|
|
|
{ |
664
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
665
|
|
|
'effective_date': np.array([], dtype=int), |
666
|
|
|
'ratio': np.array([], dtype=float), |
667
|
|
|
'sid': np.array([], dtype=int), |
668
|
|
|
}, |
669
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
670
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
671
|
|
|
) |
672
|
|
|
dividends = pd.DataFrame({ |
673
|
|
|
'sid': np.array([], dtype=np.uint32), |
674
|
|
|
'amount': np.array([], dtype=np.float64), |
675
|
|
|
'declared_date': np.array([], dtype='datetime64[ns]'), |
676
|
|
|
'ex_date': np.array([], dtype='datetime64[ns]'), |
677
|
|
|
'pay_date': np.array([], dtype='datetime64[ns]'), |
678
|
|
|
'record_date': np.array([], dtype='datetime64[ns]'), |
679
|
|
|
}) |
680
|
|
|
sid_1 = events[1] |
681
|
|
|
stock_dividends = pd.DataFrame({ |
682
|
|
|
'sid': np.array([1], dtype=np.uint32), |
683
|
|
|
'payment_sid': np.array([2], dtype=np.uint32), |
684
|
|
|
'ratio': np.array([2], dtype=np.float64), |
685
|
|
|
'declared_date': np.array([sid_1[0].dt], dtype='datetime64[ns]'), |
686
|
|
|
'ex_date': np.array([sid_1[1].dt], dtype='datetime64[ns]'), |
687
|
|
|
'record_date': np.array([sid_1[1].dt], dtype='datetime64[ns]'), |
688
|
|
|
'pay_date': np.array([sid_1[2].dt], dtype='datetime64[ns]'), |
689
|
|
|
}) |
690
|
|
|
writer.write(splits, mergers, dividends, stock_dividends) |
691
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
692
|
|
|
|
693
|
|
|
txns = [create_txn(events[1][0].sid, events[1][0].dt, 10.0, 100)] |
694
|
|
|
|
695
|
|
|
results = calculate_results( |
696
|
|
|
self.sim_params, |
697
|
|
|
self.env, |
698
|
|
|
self.tempdir, |
699
|
|
|
self.benchmark_events, |
700
|
|
|
events, |
701
|
|
|
adjustment_reader, |
702
|
|
|
txns=txns, |
703
|
|
|
) |
704
|
|
|
|
705
|
|
|
self.assertEqual(len(results), 6) |
706
|
|
|
cumulative_returns = \ |
707
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
708
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.2, 0.2, 0.2, 0.2]) |
709
|
|
|
daily_returns = [event['daily_perf']['returns'] |
710
|
|
|
for event in results] |
711
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.2, 0.0, 0.0, 0.0]) |
712
|
|
|
cash_flows = [event['daily_perf']['capital_used'] |
713
|
|
|
for event in results] |
714
|
|
|
self.assertEqual(cash_flows, [-1000, 0, 0, 0, 0, 0]) |
715
|
|
|
cumulative_cash_flows = \ |
716
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
717
|
|
|
self.assertEqual(cumulative_cash_flows, [-1000] * 6) |
718
|
|
|
cash_pos = \ |
719
|
|
|
[event['cumulative_perf']['ending_cash'] for event in results] |
720
|
|
|
self.assertEqual(cash_pos, [9000] * 6) |
721
|
|
|
|
722
|
|
|
def test_long_position_purchased_on_ex_date_receives_no_dividend(self): |
723
|
|
|
# post some trades in the market |
724
|
|
|
events = factory.create_trade_history( |
725
|
|
|
1, |
726
|
|
|
[10, 10, 10, 10, 10, 10], |
727
|
|
|
[100, 100, 100, 100, 100, 100], |
728
|
|
|
oneday, |
729
|
|
|
self.sim_params, |
730
|
|
|
env=self.env |
731
|
|
|
) |
732
|
|
|
|
733
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
734
|
|
|
|
735
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
736
|
|
|
MockDailyBarSpotReader()) |
737
|
|
|
splits = mergers = pd.DataFrame( |
738
|
|
|
{ |
739
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
740
|
|
|
'effective_date': np.array([], dtype=int), |
741
|
|
|
'ratio': np.array([], dtype=float), |
742
|
|
|
'sid': np.array([], dtype=int), |
743
|
|
|
}, |
744
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
745
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
746
|
|
|
) |
747
|
|
|
dividends = pd.DataFrame({ |
748
|
|
|
'sid': np.array([1], dtype=np.uint32), |
749
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
750
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
751
|
|
|
'ex_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
752
|
|
|
'record_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
753
|
|
|
'pay_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
754
|
|
|
}) |
755
|
|
|
writer.write(splits, mergers, dividends) |
756
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
757
|
|
|
|
758
|
|
|
# Simulate a transaction being filled on the ex_date. |
759
|
|
|
txns = [create_txn(events[1].sid, events[1].dt, 10.0, 100)] |
760
|
|
|
|
761
|
|
|
results = calculate_results( |
762
|
|
|
self.sim_params, |
763
|
|
|
self.env, |
764
|
|
|
self.tempdir, |
765
|
|
|
self.benchmark_events, |
766
|
|
|
{1: events}, |
767
|
|
|
adjustment_reader, |
768
|
|
|
txns=txns, |
769
|
|
|
) |
770
|
|
|
|
771
|
|
|
self.assertEqual(len(results), 6) |
772
|
|
|
cumulative_returns = \ |
773
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
774
|
|
|
self.assertEqual(cumulative_returns, [0, 0, 0, 0, 0, 0]) |
775
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
776
|
|
|
self.assertEqual(daily_returns, [0, 0, 0, 0, 0, 0]) |
777
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
778
|
|
|
self.assertEqual(cash_flows, [0, -1000, 0, 0, 0, 0]) |
779
|
|
|
cumulative_cash_flows = \ |
780
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
781
|
|
|
self.assertEqual(cumulative_cash_flows, |
782
|
|
|
[0, -1000, -1000, -1000, -1000, -1000]) |
783
|
|
|
|
784
|
|
|
def test_selling_before_dividend_payment_still_gets_paid(self): |
785
|
|
|
# post some trades in the market |
786
|
|
|
events = factory.create_trade_history( |
787
|
|
|
1, |
788
|
|
|
[10, 10, 10, 10, 10, 10], |
789
|
|
|
[100, 100, 100, 100, 100, 100], |
790
|
|
|
oneday, |
791
|
|
|
self.sim_params, |
792
|
|
|
env=self.env |
793
|
|
|
) |
794
|
|
|
|
795
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
796
|
|
|
|
797
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
798
|
|
|
MockDailyBarSpotReader()) |
799
|
|
|
splits = mergers = pd.DataFrame( |
800
|
|
|
{ |
801
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
802
|
|
|
'effective_date': np.array([], dtype=int), |
803
|
|
|
'ratio': np.array([], dtype=float), |
804
|
|
|
'sid': np.array([], dtype=int), |
805
|
|
|
}, |
806
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
807
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
808
|
|
|
) |
809
|
|
|
dividends = pd.DataFrame({ |
810
|
|
|
'sid': np.array([1], dtype=np.uint32), |
811
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
812
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
813
|
|
|
'ex_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
814
|
|
|
'record_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
815
|
|
|
'pay_date': np.array([events[3].dt], dtype='datetime64[ns]'), |
816
|
|
|
}) |
817
|
|
|
writer.write(splits, mergers, dividends) |
818
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
819
|
|
|
|
820
|
|
|
buy_txn = create_txn(events[0].sid, events[0].dt, 10.0, 100) |
821
|
|
|
sell_txn = create_txn(events[2].sid, events[2].dt, 10.0, -100) |
822
|
|
|
txns = [buy_txn, sell_txn] |
823
|
|
|
|
824
|
|
|
results = calculate_results( |
825
|
|
|
self.sim_params, |
826
|
|
|
self.env, |
827
|
|
|
self.tempdir, |
828
|
|
|
self.benchmark_events, |
829
|
|
|
{1: events}, |
830
|
|
|
adjustment_reader, |
831
|
|
|
txns=txns, |
832
|
|
|
) |
833
|
|
|
|
834
|
|
|
self.assertEqual(len(results), 6) |
835
|
|
|
cumulative_returns = \ |
836
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
837
|
|
|
self.assertEqual(cumulative_returns, [0, 0, 0, 0.1, 0.1, 0.1]) |
838
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
839
|
|
|
self.assertEqual(daily_returns, [0, 0, 0, 0.1, 0, 0]) |
840
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
841
|
|
|
self.assertEqual(cash_flows, [-1000, 0, 1000, 1000, 0, 0]) |
842
|
|
|
cumulative_cash_flows = \ |
843
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
844
|
|
|
self.assertEqual(cumulative_cash_flows, |
845
|
|
|
[-1000, -1000, 0, 1000, 1000, 1000]) |
846
|
|
|
|
847
|
|
|
def test_buy_and_sell_before_ex(self): |
848
|
|
|
# post some trades in the market |
849
|
|
|
events = factory.create_trade_history( |
850
|
|
|
1, |
851
|
|
|
[10, 10, 10, 10, 10, 10], |
852
|
|
|
[100, 100, 100, 100, 100, 100], |
853
|
|
|
oneday, |
854
|
|
|
self.sim_params, |
855
|
|
|
env=self.env |
856
|
|
|
) |
857
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
858
|
|
|
|
859
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
860
|
|
|
MockDailyBarSpotReader()) |
861
|
|
|
splits = mergers = pd.DataFrame( |
862
|
|
|
{ |
863
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
864
|
|
|
'effective_date': np.array([], dtype=int), |
865
|
|
|
'ratio': np.array([], dtype=float), |
866
|
|
|
'sid': np.array([], dtype=int), |
867
|
|
|
}, |
868
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
869
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
870
|
|
|
) |
871
|
|
|
|
872
|
|
|
dividends = pd.DataFrame({ |
873
|
|
|
'sid': np.array([1], dtype=np.uint32), |
874
|
|
|
'amount': np.array([10.0], dtype=np.float64), |
875
|
|
|
'declared_date': np.array([events[3].dt], dtype='datetime64[ns]'), |
876
|
|
|
'ex_date': np.array([events[4].dt], dtype='datetime64[ns]'), |
877
|
|
|
'pay_date': np.array([events[5].dt], dtype='datetime64[ns]'), |
878
|
|
|
'record_date': np.array([events[4].dt], dtype='datetime64[ns]'), |
879
|
|
|
}) |
880
|
|
|
writer.write(splits, mergers, dividends) |
881
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
882
|
|
|
|
883
|
|
|
buy_txn = create_txn(events[1].sid, events[1].dt, 10.0, 100) |
884
|
|
|
sell_txn = create_txn(events[2].sid, events[2].dt, 10.0, -100) |
885
|
|
|
txns = [buy_txn, sell_txn] |
886
|
|
|
|
887
|
|
|
results = calculate_results( |
888
|
|
|
self.sim_params, |
889
|
|
|
self.env, |
890
|
|
|
self.tempdir, |
891
|
|
|
self.benchmark_events, |
892
|
|
|
{1: events}, |
893
|
|
|
txns=txns, |
894
|
|
|
adjustment_reader=adjustment_reader, |
895
|
|
|
) |
896
|
|
|
|
897
|
|
|
self.assertEqual(len(results), 6) |
898
|
|
|
cumulative_returns = \ |
899
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
900
|
|
|
self.assertEqual(cumulative_returns, [0, 0, 0, 0, 0, 0]) |
901
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
902
|
|
|
self.assertEqual(daily_returns, [0, 0, 0, 0, 0, 0]) |
903
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
904
|
|
|
self.assertEqual(cash_flows, [0, -1000, 1000, 0, 0, 0]) |
905
|
|
|
cumulative_cash_flows = \ |
906
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
907
|
|
|
self.assertEqual(cumulative_cash_flows, [0, -1000, 0, 0, 0, 0]) |
908
|
|
|
|
909
|
|
|
def test_ending_before_pay_date(self): |
910
|
|
|
# post some trades in the market |
911
|
|
|
events = factory.create_trade_history( |
912
|
|
|
1, |
913
|
|
|
[10, 10, 10, 10, 10, 10], |
914
|
|
|
[100, 100, 100, 100, 100, 100], |
915
|
|
|
oneday, |
916
|
|
|
self.sim_params, |
917
|
|
|
env=self.env |
918
|
|
|
) |
919
|
|
|
|
920
|
|
|
pay_date = self.sim_params.first_open |
921
|
|
|
# find pay date that is much later. |
922
|
|
|
for i in range(30): |
923
|
|
|
pay_date = factory.get_next_trading_dt(pay_date, oneday, self.env) |
924
|
|
|
|
925
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
926
|
|
|
|
927
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
928
|
|
|
MockDailyBarSpotReader()) |
929
|
|
|
splits = mergers = pd.DataFrame( |
930
|
|
|
{ |
931
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
932
|
|
|
'effective_date': np.array([], dtype=int), |
933
|
|
|
'ratio': np.array([], dtype=float), |
934
|
|
|
'sid': np.array([], dtype=int), |
935
|
|
|
}, |
936
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
937
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
938
|
|
|
) |
939
|
|
|
dividends = pd.DataFrame({ |
940
|
|
|
'sid': np.array([1], dtype=np.uint32), |
941
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
942
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
943
|
|
|
'ex_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
944
|
|
|
'record_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
945
|
|
|
'pay_date': np.array([pay_date], dtype='datetime64[ns]'), |
946
|
|
|
}) |
947
|
|
|
writer.write(splits, mergers, dividends) |
948
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
949
|
|
|
|
950
|
|
|
txns = [create_txn(events[1].sid, events[1].dt, 10.0, 100)] |
951
|
|
|
|
952
|
|
|
results = calculate_results( |
953
|
|
|
self.sim_params, |
954
|
|
|
self.env, |
955
|
|
|
self.tempdir, |
956
|
|
|
self.benchmark_events, |
957
|
|
|
{1: events}, |
958
|
|
|
txns=txns, |
959
|
|
|
adjustment_reader=adjustment_reader, |
960
|
|
|
) |
961
|
|
|
|
962
|
|
|
self.assertEqual(len(results), 6) |
963
|
|
|
cumulative_returns = \ |
964
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
965
|
|
|
self.assertEqual(cumulative_returns, [0, 0, 0, 0.0, 0.0, 0.0]) |
966
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
967
|
|
|
self.assertEqual(daily_returns, [0, 0, 0, 0, 0, 0]) |
968
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
969
|
|
|
self.assertEqual(cash_flows, [0, -1000, 0, 0, 0, 0]) |
970
|
|
|
cumulative_cash_flows = \ |
971
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
972
|
|
|
self.assertEqual( |
973
|
|
|
cumulative_cash_flows, |
974
|
|
|
[0, -1000, -1000, -1000, -1000, -1000] |
975
|
|
|
) |
976
|
|
|
|
977
|
|
|
def test_short_position_pays_dividend(self): |
978
|
|
|
# post some trades in the market |
979
|
|
|
events = factory.create_trade_history( |
980
|
|
|
1, |
981
|
|
|
[10, 10, 10, 10, 10, 10], |
982
|
|
|
[100, 100, 100, 100, 100, 100], |
983
|
|
|
oneday, |
984
|
|
|
self.sim_params, |
985
|
|
|
env=self.env |
986
|
|
|
) |
987
|
|
|
|
988
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
989
|
|
|
|
990
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
991
|
|
|
MockDailyBarSpotReader()) |
992
|
|
|
splits = mergers = pd.DataFrame( |
993
|
|
|
{ |
994
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
995
|
|
|
'effective_date': np.array([], dtype=int), |
996
|
|
|
'ratio': np.array([], dtype=float), |
997
|
|
|
'sid': np.array([], dtype=int), |
998
|
|
|
}, |
999
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
1000
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
1001
|
|
|
) |
1002
|
|
|
dividends = pd.DataFrame({ |
1003
|
|
|
'sid': np.array([1], dtype=np.uint32), |
1004
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
1005
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
1006
|
|
|
'ex_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
1007
|
|
|
'record_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
1008
|
|
|
'pay_date': np.array([events[3].dt], dtype='datetime64[ns]'), |
1009
|
|
|
}) |
1010
|
|
|
writer.write(splits, mergers, dividends) |
1011
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
1012
|
|
|
|
1013
|
|
|
txns = [create_txn(events[1].sid, events[1].dt, 10.0, -100)] |
1014
|
|
|
|
1015
|
|
|
results = calculate_results( |
1016
|
|
|
self.sim_params, |
1017
|
|
|
self.env, |
1018
|
|
|
self.tempdir, |
1019
|
|
|
self.benchmark_events, |
1020
|
|
|
{1: events}, |
1021
|
|
|
adjustment_reader, |
1022
|
|
|
txns=txns, |
1023
|
|
|
) |
1024
|
|
|
|
1025
|
|
|
self.assertEqual(len(results), 6) |
1026
|
|
|
cumulative_returns = \ |
1027
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
1028
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.0, -0.1, -0.1, -0.1]) |
1029
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
1030
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.0, -0.1, 0.0, 0.0]) |
1031
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
1032
|
|
|
self.assertEqual(cash_flows, [0, 1000, 0, -1000, 0, 0]) |
1033
|
|
|
cumulative_cash_flows = \ |
1034
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
1035
|
|
|
self.assertEqual(cumulative_cash_flows, [0, 1000, 1000, 0, 0, 0]) |
1036
|
|
|
|
1037
|
|
|
def test_no_position_receives_no_dividend(self): |
1038
|
|
|
# post some trades in the market |
1039
|
|
|
events = factory.create_trade_history( |
1040
|
|
|
1, |
1041
|
|
|
[10, 10, 10, 10, 10, 10], |
1042
|
|
|
[100, 100, 100, 100, 100, 100], |
1043
|
|
|
oneday, |
1044
|
|
|
self.sim_params, |
1045
|
|
|
env=self.env |
1046
|
|
|
) |
1047
|
|
|
|
1048
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
1049
|
|
|
|
1050
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
1051
|
|
|
MockDailyBarSpotReader()) |
1052
|
|
|
splits = mergers = pd.DataFrame( |
1053
|
|
|
{ |
1054
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
1055
|
|
|
'effective_date': np.array([], dtype=int), |
1056
|
|
|
'ratio': np.array([], dtype=float), |
1057
|
|
|
'sid': np.array([], dtype=int), |
1058
|
|
|
}, |
1059
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
1060
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
1061
|
|
|
) |
1062
|
|
|
dividends = pd.DataFrame({ |
1063
|
|
|
'sid': np.array([1], dtype=np.uint32), |
1064
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
1065
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
1066
|
|
|
'ex_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
1067
|
|
|
'pay_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
1068
|
|
|
'record_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
1069
|
|
|
}) |
1070
|
|
|
writer.write(splits, mergers, dividends) |
1071
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
1072
|
|
|
|
1073
|
|
|
results = calculate_results( |
1074
|
|
|
self.sim_params, |
1075
|
|
|
self.env, |
1076
|
|
|
self.tempdir, |
1077
|
|
|
self.benchmark_events, |
1078
|
|
|
{1: events}, |
1079
|
|
|
adjustment_reader, |
1080
|
|
|
) |
1081
|
|
|
|
1082
|
|
|
self.assertEqual(len(results), 6) |
1083
|
|
|
cumulative_returns = \ |
1084
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
1085
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) |
1086
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
1087
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) |
1088
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
1089
|
|
|
self.assertEqual(cash_flows, [0, 0, 0, 0, 0, 0]) |
1090
|
|
|
cumulative_cash_flows = \ |
1091
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
1092
|
|
|
self.assertEqual(cumulative_cash_flows, [0, 0, 0, 0, 0, 0]) |
1093
|
|
|
|
1094
|
|
|
def test_no_dividend_at_simulation_end(self): |
1095
|
|
|
# post some trades in the market |
1096
|
|
|
events = factory.create_trade_history( |
1097
|
|
|
1, |
1098
|
|
|
[10, 10, 10, 10, 10], |
1099
|
|
|
[100, 100, 100, 100, 100], |
1100
|
|
|
oneday, |
1101
|
|
|
self.sim_params, |
1102
|
|
|
env=self.env |
1103
|
|
|
) |
1104
|
|
|
|
1105
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
1106
|
|
|
|
1107
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
1108
|
|
|
MockDailyBarSpotReader()) |
1109
|
|
|
splits = mergers = pd.DataFrame( |
1110
|
|
|
{ |
1111
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
1112
|
|
|
'effective_date': np.array([], dtype=int), |
1113
|
|
|
'ratio': np.array([], dtype=float), |
1114
|
|
|
'sid': np.array([], dtype=int), |
1115
|
|
|
}, |
1116
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
1117
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
1118
|
|
|
) |
1119
|
|
|
dividends = pd.DataFrame({ |
1120
|
|
|
'sid': np.array([1], dtype=np.uint32), |
1121
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
1122
|
|
|
'declared_date': np.array([events[-3].dt], dtype='datetime64[ns]'), |
1123
|
|
|
'ex_date': np.array([events[-2].dt], dtype='datetime64[ns]'), |
1124
|
|
|
'record_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
1125
|
|
|
'pay_date': np.array([self.env.next_trading_day(events[-1].dt)], |
1126
|
|
|
dtype='datetime64[ns]'), |
1127
|
|
|
}) |
1128
|
|
|
writer.write(splits, mergers, dividends) |
1129
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
1130
|
|
|
|
1131
|
|
|
# Set the last day to be the last event |
1132
|
|
|
sim_params = create_simulation_parameters( |
1133
|
|
|
num_days=6, |
1134
|
|
|
capital_base=10e3, |
1135
|
|
|
start=self.sim_params.period_start, |
1136
|
|
|
end=self.sim_params.period_end |
1137
|
|
|
) |
1138
|
|
|
|
1139
|
|
|
sim_params.period_end = events[-1].dt |
1140
|
|
|
sim_params.update_internal_from_env(self.env) |
1141
|
|
|
|
1142
|
|
|
# Simulate a transaction being filled prior to the ex_date. |
1143
|
|
|
txns = [create_txn(events[0].sid, events[0].dt, 10.0, 100)] |
1144
|
|
|
results = calculate_results( |
1145
|
|
|
sim_params, |
1146
|
|
|
self.env, |
1147
|
|
|
self.tempdir, |
1148
|
|
|
self.benchmark_events, |
1149
|
|
|
{1: events}, |
1150
|
|
|
adjustment_reader=adjustment_reader, |
1151
|
|
|
txns=txns, |
1152
|
|
|
) |
1153
|
|
|
|
1154
|
|
|
self.assertEqual(len(results), 5) |
1155
|
|
|
cumulative_returns = \ |
1156
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
1157
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.0, 0.0, 0.0]) |
1158
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
1159
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.0, 0.0, 0.0]) |
1160
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
1161
|
|
|
self.assertEqual(cash_flows, [-1000, 0, 0, 0, 0]) |
1162
|
|
|
cumulative_cash_flows = \ |
1163
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
1164
|
|
|
self.assertEqual(cumulative_cash_flows, |
1165
|
|
|
[-1000, -1000, -1000, -1000, -1000]) |
1166
|
|
|
|
1167
|
|
|
|
1168
|
|
|
class TestDividendPerformanceHolidayStyle(TestDividendPerformance): |
1169
|
|
|
|
1170
|
|
|
# The holiday tests begins the simulation on the day |
1171
|
|
|
# before Thanksgiving, so that the next trading day is |
1172
|
|
|
# two days ahead. Any tests that hard code events |
1173
|
|
|
# to be start + oneday will fail, since those events will |
1174
|
|
|
# be skipped by the simulation. |
1175
|
|
|
|
1176
|
|
|
@classmethod |
1177
|
|
|
def setUpClass(cls): |
1178
|
|
|
cls.env = TradingEnvironment() |
1179
|
|
|
cls.sim_params = create_simulation_parameters( |
1180
|
|
|
num_days=6, |
1181
|
|
|
capital_base=10e3, |
1182
|
|
|
start=pd.Timestamp("2003-11-30", tz='UTC'), |
1183
|
|
|
end=pd.Timestamp("2003-12-08", tz='UTC') |
1184
|
|
|
) |
1185
|
|
|
|
1186
|
|
|
setup_env_data(cls.env, cls.sim_params, [1, 2]) |
1187
|
|
|
|
1188
|
|
|
cls.benchmark_events = benchmark_events_in_range(cls.sim_params, |
1189
|
|
|
cls.env) |
1190
|
|
|
|
1191
|
|
|
|
1192
|
|
|
class TestPositionPerformance(unittest.TestCase): |
1193
|
|
|
|
1194
|
|
|
def setUp(self): |
1195
|
|
|
self.tempdir = TempDirectory() |
1196
|
|
|
|
1197
|
|
|
def create_environment_stuff(self, num_days=4, sids=[1, 2]): |
1198
|
|
|
self.env = TradingEnvironment() |
1199
|
|
|
self.sim_params = create_simulation_parameters(num_days=num_days) |
1200
|
|
|
|
1201
|
|
|
setup_env_data(self.env, self.sim_params, [1, 2]) |
1202
|
|
|
|
1203
|
|
|
self.finder = self.env.asset_finder |
1204
|
|
|
|
1205
|
|
|
self.benchmark_events = benchmark_events_in_range(self.sim_params, |
1206
|
|
|
self.env) |
1207
|
|
|
|
1208
|
|
|
def tearDown(self): |
1209
|
|
|
self.tempdir.cleanup() |
1210
|
|
|
del self.env |
1211
|
|
|
|
1212
|
|
|
def test_long_short_positions(self): |
1213
|
|
|
""" |
1214
|
|
|
start with $1000 |
1215
|
|
|
buy 100 stock1 shares at $10 |
1216
|
|
|
sell short 100 stock2 shares at $10 |
1217
|
|
|
stock1 then goes down to $9 |
1218
|
|
|
stock2 goes to $11 |
1219
|
|
|
""" |
1220
|
|
|
self.create_environment_stuff() |
1221
|
|
|
|
1222
|
|
|
trades_1 = factory.create_trade_history( |
1223
|
|
|
1, |
1224
|
|
|
[10, 10, 10, 9], |
1225
|
|
|
[100, 100, 100, 100], |
1226
|
|
|
oneday, |
1227
|
|
|
self.sim_params, |
1228
|
|
|
env=self.env |
1229
|
|
|
) |
1230
|
|
|
|
1231
|
|
|
trades_2 = factory.create_trade_history( |
1232
|
|
|
2, |
1233
|
|
|
[10, 10, 10, 11], |
1234
|
|
|
[100, 100, 100, 100], |
1235
|
|
|
oneday, |
1236
|
|
|
self.sim_params, |
1237
|
|
|
env=self.env |
1238
|
|
|
) |
1239
|
|
|
|
1240
|
|
|
txn1 = create_txn(trades_1[1].sid, trades_1[1].dt, 10.0, 100) |
1241
|
|
|
txn2 = create_txn(trades_2[1].sid, trades_1[1].dt, 10.0, -100) |
1242
|
|
|
|
1243
|
|
|
data_portal = create_data_portal_from_trade_history( |
1244
|
|
|
self.env, |
1245
|
|
|
self.tempdir, |
1246
|
|
|
self.sim_params, |
1247
|
|
|
{1: trades_1, 2: trades_2} |
1248
|
|
|
) |
1249
|
|
|
|
1250
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1251
|
|
|
self.sim_params.data_frequency) |
1252
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1253
|
|
|
self.sim_params.data_frequency, |
1254
|
|
|
data_portal) |
1255
|
|
|
pp.position_tracker = pt |
1256
|
|
|
pt.execute_transaction(txn1) |
1257
|
|
|
pp.handle_execution(txn1) |
1258
|
|
|
pt.execute_transaction(txn2) |
1259
|
|
|
pp.handle_execution(txn2) |
1260
|
|
|
|
1261
|
|
|
dt = trades_1[-2].dt |
1262
|
|
|
pt.sync_last_sale_prices(dt) |
1263
|
|
|
|
1264
|
|
|
pp.calculate_performance() |
1265
|
|
|
|
1266
|
|
|
check_perf_period( |
1267
|
|
|
pp, |
1268
|
|
|
gross_leverage=2.0, |
1269
|
|
|
net_leverage=0.0, |
1270
|
|
|
long_exposure=1000.0, |
1271
|
|
|
longs_count=1, |
1272
|
|
|
short_exposure=-1000.0, |
1273
|
|
|
shorts_count=1) |
1274
|
|
|
# Validate that the account attributes were updated. |
1275
|
|
|
account = pp.as_account() |
1276
|
|
|
check_account(account, |
1277
|
|
|
settled_cash=1000.0, |
1278
|
|
|
equity_with_loan=1000.0, |
1279
|
|
|
total_positions_value=0.0, |
1280
|
|
|
regt_equity=1000.0, |
1281
|
|
|
available_funds=1000.0, |
1282
|
|
|
excess_liquidity=1000.0, |
1283
|
|
|
cushion=1.0, |
1284
|
|
|
leverage=2.0, |
1285
|
|
|
net_leverage=0.0, |
1286
|
|
|
net_liquidation=1000.0) |
1287
|
|
|
|
1288
|
|
|
dt = trades_1[-1].dt |
1289
|
|
|
pt.sync_last_sale_prices(dt) |
1290
|
|
|
|
1291
|
|
|
pp.calculate_performance() |
1292
|
|
|
|
1293
|
|
|
# Validate that the account attributes were updated. |
1294
|
|
|
account = pp.as_account() |
1295
|
|
|
|
1296
|
|
|
check_perf_period( |
1297
|
|
|
pp, |
1298
|
|
|
gross_leverage=2.5, |
1299
|
|
|
net_leverage=-0.25, |
1300
|
|
|
long_exposure=900.0, |
1301
|
|
|
longs_count=1, |
1302
|
|
|
short_exposure=-1100.0, |
1303
|
|
|
shorts_count=1) |
1304
|
|
|
|
1305
|
|
|
check_account(account, |
1306
|
|
|
settled_cash=1000.0, |
1307
|
|
|
equity_with_loan=800.0, |
1308
|
|
|
total_positions_value=-200.0, |
1309
|
|
|
regt_equity=1000.0, |
1310
|
|
|
available_funds=1000.0, |
1311
|
|
|
excess_liquidity=1000.0, |
1312
|
|
|
cushion=1.25, |
1313
|
|
|
leverage=2.5, |
1314
|
|
|
net_leverage=-0.25, |
1315
|
|
|
net_liquidation=800.0) |
1316
|
|
|
|
1317
|
|
|
def test_levered_long_position(self): |
1318
|
|
|
""" |
1319
|
|
|
start with $1,000, then buy 1000 shares at $10. |
1320
|
|
|
price goes to $11 |
1321
|
|
|
""" |
1322
|
|
|
# post some trades in the market |
1323
|
|
|
|
1324
|
|
|
self.create_environment_stuff() |
1325
|
|
|
|
1326
|
|
|
trades = factory.create_trade_history( |
1327
|
|
|
1, |
1328
|
|
|
[10, 10, 10, 11], |
1329
|
|
|
[100, 100, 100, 100], |
1330
|
|
|
oneday, |
1331
|
|
|
self.sim_params, |
1332
|
|
|
env=self.env |
1333
|
|
|
) |
1334
|
|
|
|
1335
|
|
|
data_portal = create_data_portal_from_trade_history( |
1336
|
|
|
self.env, |
1337
|
|
|
self.tempdir, |
1338
|
|
|
self.sim_params, |
1339
|
|
|
{1: trades}) |
1340
|
|
|
|
1341
|
|
|
txn = create_txn(trades[1].sid, trades[1].dt, 10.0, 1000) |
1342
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1343
|
|
|
self.sim_params.data_frequency) |
1344
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1345
|
|
|
self.sim_params.data_frequency, |
1346
|
|
|
data_portal) |
1347
|
|
|
pp.position_tracker = pt |
1348
|
|
|
|
1349
|
|
|
pt.execute_transaction(txn) |
1350
|
|
|
pp.handle_execution(txn) |
1351
|
|
|
|
1352
|
|
|
pp.calculate_performance() |
1353
|
|
|
|
1354
|
|
|
check_perf_period( |
1355
|
|
|
pp, |
1356
|
|
|
gross_leverage=10.0, |
1357
|
|
|
net_leverage=10.0, |
1358
|
|
|
long_exposure=10000.0, |
1359
|
|
|
longs_count=1, |
1360
|
|
|
short_exposure=0.0, |
1361
|
|
|
shorts_count=0) |
1362
|
|
|
|
1363
|
|
|
# Validate that the account attributes were updated. |
1364
|
|
|
pt.sync_last_sale_prices(trades[-2].dt) |
1365
|
|
|
|
1366
|
|
|
# Validate that the account attributes were updated. |
1367
|
|
|
account = pp.as_account() |
1368
|
|
|
check_account(account, |
1369
|
|
|
settled_cash=-9000.0, |
1370
|
|
|
equity_with_loan=1000.0, |
1371
|
|
|
total_positions_value=10000.0, |
1372
|
|
|
regt_equity=-9000.0, |
1373
|
|
|
available_funds=-9000.0, |
1374
|
|
|
excess_liquidity=-9000.0, |
1375
|
|
|
cushion=-9.0, |
1376
|
|
|
leverage=10.0, |
1377
|
|
|
net_leverage=10.0, |
1378
|
|
|
net_liquidation=1000.0) |
1379
|
|
|
|
1380
|
|
|
# now simulate a price jump to $11 |
1381
|
|
|
pt.sync_last_sale_prices(trades[-1].dt) |
1382
|
|
|
|
1383
|
|
|
pp.calculate_performance() |
1384
|
|
|
|
1385
|
|
|
check_perf_period( |
1386
|
|
|
pp, |
1387
|
|
|
gross_leverage=5.5, |
1388
|
|
|
net_leverage=5.5, |
1389
|
|
|
long_exposure=11000.0, |
1390
|
|
|
longs_count=1, |
1391
|
|
|
short_exposure=0.0, |
1392
|
|
|
shorts_count=0) |
1393
|
|
|
|
1394
|
|
|
# Validate that the account attributes were updated. |
1395
|
|
|
account = pp.as_account() |
1396
|
|
|
|
1397
|
|
|
check_account(account, |
1398
|
|
|
settled_cash=-9000.0, |
1399
|
|
|
equity_with_loan=2000.0, |
1400
|
|
|
total_positions_value=11000.0, |
1401
|
|
|
regt_equity=-9000.0, |
1402
|
|
|
available_funds=-9000.0, |
1403
|
|
|
excess_liquidity=-9000.0, |
1404
|
|
|
cushion=-4.5, |
1405
|
|
|
leverage=5.5, |
1406
|
|
|
net_leverage=5.5, |
1407
|
|
|
net_liquidation=2000.0) |
1408
|
|
|
|
1409
|
|
|
def test_long_position(self): |
1410
|
|
|
""" |
1411
|
|
|
verify that the performance period calculates properly for a |
1412
|
|
|
single buy transaction |
1413
|
|
|
""" |
1414
|
|
|
self.create_environment_stuff() |
1415
|
|
|
|
1416
|
|
|
# post some trades in the market |
1417
|
|
|
trades = factory.create_trade_history( |
1418
|
|
|
1, |
1419
|
|
|
[10, 10, 10, 11], |
1420
|
|
|
[100, 100, 100, 100], |
1421
|
|
|
oneday, |
1422
|
|
|
self.sim_params, |
1423
|
|
|
env=self.env |
1424
|
|
|
) |
1425
|
|
|
|
1426
|
|
|
data_portal = create_data_portal_from_trade_history( |
1427
|
|
|
self.env, |
1428
|
|
|
self.tempdir, |
1429
|
|
|
self.sim_params, |
1430
|
|
|
{1: trades}) |
1431
|
|
|
|
1432
|
|
|
txn = create_txn(trades[1].sid, trades[1].dt, 10.0, 100) |
1433
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1434
|
|
|
self.sim_params.data_frequency) |
1435
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1436
|
|
|
self.sim_params.data_frequency, |
1437
|
|
|
data_portal, |
1438
|
|
|
period_open=self.sim_params.period_start, |
1439
|
|
|
period_close=self.sim_params.period_end) |
1440
|
|
|
pp.position_tracker = pt |
1441
|
|
|
|
1442
|
|
|
pt.execute_transaction(txn) |
1443
|
|
|
pp.handle_execution(txn) |
1444
|
|
|
|
1445
|
|
|
# This verifies that the last sale price is being correctly |
1446
|
|
|
# set in the positions. If this is not the case then returns can |
1447
|
|
|
# incorrectly show as sharply dipping if a transaction arrives |
1448
|
|
|
# before a trade. This is caused by returns being based on holding |
1449
|
|
|
# stocks with a last sale price of 0. |
1450
|
|
|
self.assertEqual(pp.positions[1].last_sale_price, 10.0) |
1451
|
|
|
|
1452
|
|
|
pt.sync_last_sale_prices(trades[-1].dt) |
1453
|
|
|
|
1454
|
|
|
pp.calculate_performance() |
1455
|
|
|
|
1456
|
|
|
self.assertEqual( |
1457
|
|
|
pp.period_cash_flow, |
1458
|
|
|
-1 * txn.price * txn.amount, |
1459
|
|
|
"capital used should be equal to the opposite of the transaction \ |
1460
|
|
|
cost of sole txn in test" |
1461
|
|
|
) |
1462
|
|
|
|
1463
|
|
|
self.assertEqual( |
1464
|
|
|
len(pp.positions), |
1465
|
|
|
1, |
1466
|
|
|
"should be just one position") |
1467
|
|
|
|
1468
|
|
|
self.assertEqual( |
1469
|
|
|
pp.positions[1].sid, |
1470
|
|
|
txn.sid, |
1471
|
|
|
"position should be in security with id 1") |
1472
|
|
|
|
1473
|
|
|
self.assertEqual( |
1474
|
|
|
pp.positions[1].amount, |
1475
|
|
|
txn.amount, |
1476
|
|
|
"should have a position of {sharecount} shares".format( |
1477
|
|
|
sharecount=txn.amount |
1478
|
|
|
) |
1479
|
|
|
) |
1480
|
|
|
|
1481
|
|
|
self.assertEqual( |
1482
|
|
|
pp.positions[1].cost_basis, |
1483
|
|
|
txn.price, |
1484
|
|
|
"should have a cost basis of 10" |
1485
|
|
|
) |
1486
|
|
|
|
1487
|
|
|
self.assertEqual( |
1488
|
|
|
pp.positions[1].last_sale_price, |
1489
|
|
|
trades[-1]['price'], |
1490
|
|
|
"last sale should be same as last trade. \ |
1491
|
|
|
expected {exp} actual {act}".format( |
1492
|
|
|
exp=trades[-1]['price'], |
1493
|
|
|
act=pp.positions[1].last_sale_price) |
1494
|
|
|
) |
1495
|
|
|
|
1496
|
|
|
self.assertEqual( |
1497
|
|
|
pp.ending_value, |
1498
|
|
|
1100, |
1499
|
|
|
"ending value should be price of last trade times number of \ |
1500
|
|
|
shares in position" |
1501
|
|
|
) |
1502
|
|
|
|
1503
|
|
|
self.assertEqual(pp.pnl, 100, "gain of 1 on 100 shares should be 100") |
1504
|
|
|
|
1505
|
|
|
check_perf_period( |
1506
|
|
|
pp, |
1507
|
|
|
gross_leverage=1.0, |
1508
|
|
|
net_leverage=1.0, |
1509
|
|
|
long_exposure=1100.0, |
1510
|
|
|
longs_count=1, |
1511
|
|
|
short_exposure=0.0, |
1512
|
|
|
shorts_count=0) |
1513
|
|
|
|
1514
|
|
|
# Validate that the account attributes were updated. |
1515
|
|
|
account = pp.as_account() |
1516
|
|
|
check_account(account, |
1517
|
|
|
settled_cash=0.0, |
1518
|
|
|
equity_with_loan=1100.0, |
1519
|
|
|
total_positions_value=1100.0, |
1520
|
|
|
regt_equity=0.0, |
1521
|
|
|
available_funds=0.0, |
1522
|
|
|
excess_liquidity=0.0, |
1523
|
|
|
cushion=0.0, |
1524
|
|
|
leverage=1.0, |
1525
|
|
|
net_leverage=1.0, |
1526
|
|
|
net_liquidation=1100.0) |
1527
|
|
|
|
1528
|
|
|
def test_short_position(self): |
1529
|
|
|
"""verify that the performance period calculates properly for a \ |
1530
|
|
|
single short-sale transaction""" |
1531
|
|
|
self.create_environment_stuff(num_days=6) |
1532
|
|
|
|
1533
|
|
|
trades = factory.create_trade_history( |
1534
|
|
|
1, |
1535
|
|
|
[10, 10, 10, 11, 10, 9], |
1536
|
|
|
[100, 100, 100, 100, 100, 100], |
1537
|
|
|
oneday, |
1538
|
|
|
self.sim_params, |
1539
|
|
|
env=self.env |
1540
|
|
|
) |
1541
|
|
|
|
1542
|
|
|
trades_1 = trades[:-2] |
1543
|
|
|
|
1544
|
|
|
data_portal = create_data_portal_from_trade_history( |
1545
|
|
|
self.env, |
1546
|
|
|
self.tempdir, |
1547
|
|
|
self.sim_params, |
1548
|
|
|
{1: trades}) |
1549
|
|
|
|
1550
|
|
|
txn = create_txn(trades[1].sid, trades[1].dt, 10.0, -100) |
1551
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1552
|
|
|
self.sim_params.data_frequency) |
1553
|
|
|
pp = perf.PerformancePeriod( |
1554
|
|
|
1000.0, self.env.asset_finder, |
1555
|
|
|
self.sim_params.data_frequency, |
1556
|
|
|
data_portal) |
1557
|
|
|
pp.position_tracker = pt |
1558
|
|
|
|
1559
|
|
|
pt.execute_transaction(txn) |
1560
|
|
|
pp.handle_execution(txn) |
1561
|
|
|
|
1562
|
|
|
pt.sync_last_sale_prices(trades_1[-1].dt) |
1563
|
|
|
|
1564
|
|
|
pp.calculate_performance() |
1565
|
|
|
|
1566
|
|
|
self.assertEqual( |
1567
|
|
|
pp.period_cash_flow, |
1568
|
|
|
-1 * txn.price * txn.amount, |
1569
|
|
|
"capital used should be equal to the opposite of the transaction\ |
1570
|
|
|
cost of sole txn in test" |
1571
|
|
|
) |
1572
|
|
|
|
1573
|
|
|
self.assertEqual( |
1574
|
|
|
len(pp.positions), |
1575
|
|
|
1, |
1576
|
|
|
"should be just one position") |
1577
|
|
|
|
1578
|
|
|
self.assertEqual( |
1579
|
|
|
pp.positions[1].sid, |
1580
|
|
|
txn.sid, |
1581
|
|
|
"position should be in security from the transaction" |
1582
|
|
|
) |
1583
|
|
|
|
1584
|
|
|
self.assertEqual( |
1585
|
|
|
pp.positions[1].amount, |
1586
|
|
|
-100, |
1587
|
|
|
"should have a position of -100 shares" |
1588
|
|
|
) |
1589
|
|
|
|
1590
|
|
|
self.assertEqual( |
1591
|
|
|
pp.positions[1].cost_basis, |
1592
|
|
|
txn.price, |
1593
|
|
|
"should have a cost basis of 10" |
1594
|
|
|
) |
1595
|
|
|
|
1596
|
|
|
self.assertEqual( |
1597
|
|
|
pp.positions[1].last_sale_price, |
1598
|
|
|
trades_1[-1]['price'], |
1599
|
|
|
"last sale should be price of last trade" |
1600
|
|
|
) |
1601
|
|
|
|
1602
|
|
|
self.assertEqual( |
1603
|
|
|
pp.ending_value, |
1604
|
|
|
-1100, |
1605
|
|
|
"ending value should be price of last trade times number of \ |
1606
|
|
|
shares in position" |
1607
|
|
|
) |
1608
|
|
|
|
1609
|
|
|
self.assertEqual(pp.pnl, -100, "gain of 1 on 100 shares should be 100") |
1610
|
|
|
|
1611
|
|
|
# simulate additional trades, and ensure that the position value |
1612
|
|
|
# reflects the new price |
1613
|
|
|
trades_2 = trades[-2:] |
1614
|
|
|
|
1615
|
|
|
# simulate a rollover to a new period |
1616
|
|
|
pp.rollover() |
1617
|
|
|
|
1618
|
|
|
pt.sync_last_sale_prices(trades[-1].dt) |
1619
|
|
|
|
1620
|
|
|
pp.calculate_performance() |
1621
|
|
|
|
1622
|
|
|
self.assertEqual( |
1623
|
|
|
pp.period_cash_flow, |
1624
|
|
|
0, |
1625
|
|
|
"capital used should be zero, there were no transactions in \ |
1626
|
|
|
performance period" |
1627
|
|
|
) |
1628
|
|
|
|
1629
|
|
|
self.assertEqual( |
1630
|
|
|
len(pp.positions), |
1631
|
|
|
1, |
1632
|
|
|
"should be just one position" |
1633
|
|
|
) |
1634
|
|
|
|
1635
|
|
|
self.assertEqual( |
1636
|
|
|
pp.positions[1].sid, |
1637
|
|
|
txn.sid, |
1638
|
|
|
"position should be in security from the transaction" |
1639
|
|
|
) |
1640
|
|
|
|
1641
|
|
|
self.assertEqual( |
1642
|
|
|
pp.positions[1].amount, |
1643
|
|
|
-100, |
1644
|
|
|
"should have a position of -100 shares" |
1645
|
|
|
) |
1646
|
|
|
|
1647
|
|
|
self.assertEqual( |
1648
|
|
|
pp.positions[1].cost_basis, |
1649
|
|
|
txn.price, |
1650
|
|
|
"should have a cost basis of 10" |
1651
|
|
|
) |
1652
|
|
|
|
1653
|
|
|
self.assertEqual( |
1654
|
|
|
pp.positions[1].last_sale_price, |
1655
|
|
|
trades_2[-1].price, |
1656
|
|
|
"last sale should be price of last trade" |
1657
|
|
|
) |
1658
|
|
|
|
1659
|
|
|
self.assertEqual( |
1660
|
|
|
pp.ending_value, |
1661
|
|
|
-900, |
1662
|
|
|
"ending value should be price of last trade times number of \ |
1663
|
|
|
shares in position") |
1664
|
|
|
|
1665
|
|
|
self.assertEqual( |
1666
|
|
|
pp.pnl, |
1667
|
|
|
200, |
1668
|
|
|
"drop of 2 on -100 shares should be 200" |
1669
|
|
|
) |
1670
|
|
|
|
1671
|
|
|
# now run a performance period encompassing the entire trade sample. |
1672
|
|
|
ptTotal = perf.PositionTracker(self.env.asset_finder, data_portal, |
1673
|
|
|
self.sim_params.data_frequency) |
1674
|
|
|
ppTotal = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1675
|
|
|
self.sim_params.data_frequency, |
1676
|
|
|
data_portal) |
1677
|
|
|
ppTotal.position_tracker = pt |
1678
|
|
|
|
1679
|
|
|
ptTotal.execute_transaction(txn) |
1680
|
|
|
ppTotal.handle_execution(txn) |
1681
|
|
|
|
1682
|
|
|
ptTotal.sync_last_sale_prices(trades[-1].dt) |
1683
|
|
|
|
1684
|
|
|
ppTotal.calculate_performance() |
1685
|
|
|
|
1686
|
|
|
self.assertEqual( |
1687
|
|
|
ppTotal.period_cash_flow, |
1688
|
|
|
-1 * txn.price * txn.amount, |
1689
|
|
|
"capital used should be equal to the opposite of the transaction \ |
1690
|
|
|
cost of sole txn in test" |
1691
|
|
|
) |
1692
|
|
|
|
1693
|
|
|
self.assertEqual( |
1694
|
|
|
len(ppTotal.positions), |
1695
|
|
|
1, |
1696
|
|
|
"should be just one position" |
1697
|
|
|
) |
1698
|
|
|
self.assertEqual( |
1699
|
|
|
ppTotal.positions[1].sid, |
1700
|
|
|
txn.sid, |
1701
|
|
|
"position should be in security from the transaction" |
1702
|
|
|
) |
1703
|
|
|
|
1704
|
|
|
self.assertEqual( |
1705
|
|
|
ppTotal.positions[1].amount, |
1706
|
|
|
-100, |
1707
|
|
|
"should have a position of -100 shares" |
1708
|
|
|
) |
1709
|
|
|
|
1710
|
|
|
self.assertEqual( |
1711
|
|
|
ppTotal.positions[1].cost_basis, |
1712
|
|
|
txn.price, |
1713
|
|
|
"should have a cost basis of 10" |
1714
|
|
|
) |
1715
|
|
|
|
1716
|
|
|
self.assertEqual( |
1717
|
|
|
ppTotal.positions[1].last_sale_price, |
1718
|
|
|
trades_2[-1].price, |
1719
|
|
|
"last sale should be price of last trade" |
1720
|
|
|
) |
1721
|
|
|
|
1722
|
|
|
self.assertEqual( |
1723
|
|
|
ppTotal.ending_value, |
1724
|
|
|
-900, |
1725
|
|
|
"ending value should be price of last trade times number of \ |
1726
|
|
|
shares in position") |
1727
|
|
|
|
1728
|
|
|
self.assertEqual( |
1729
|
|
|
ppTotal.pnl, |
1730
|
|
|
100, |
1731
|
|
|
"drop of 1 on -100 shares should be 100" |
1732
|
|
|
) |
1733
|
|
|
|
1734
|
|
|
check_perf_period( |
1735
|
|
|
pp, |
1736
|
|
|
gross_leverage=0.8181, |
1737
|
|
|
net_leverage=-0.8181, |
1738
|
|
|
long_exposure=0.0, |
1739
|
|
|
longs_count=0, |
1740
|
|
|
short_exposure=-900.0, |
1741
|
|
|
shorts_count=1) |
1742
|
|
|
|
1743
|
|
|
# Validate that the account attributes. |
1744
|
|
|
account = ppTotal.as_account() |
1745
|
|
|
check_account(account, |
1746
|
|
|
settled_cash=2000.0, |
1747
|
|
|
equity_with_loan=1100.0, |
1748
|
|
|
total_positions_value=-900.0, |
1749
|
|
|
regt_equity=2000.0, |
1750
|
|
|
available_funds=2000.0, |
1751
|
|
|
excess_liquidity=2000.0, |
1752
|
|
|
cushion=1.8181, |
1753
|
|
|
leverage=0.8181, |
1754
|
|
|
net_leverage=-0.8181, |
1755
|
|
|
net_liquidation=1100.0) |
1756
|
|
|
|
1757
|
|
|
def test_covering_short(self): |
1758
|
|
|
"""verify performance where short is bought and covered, and shares \ |
1759
|
|
|
trade after cover""" |
1760
|
|
|
self.create_environment_stuff(num_days=10) |
1761
|
|
|
|
1762
|
|
|
trades = factory.create_trade_history( |
1763
|
|
|
1, |
1764
|
|
|
[10, 10, 10, 11, 9, 8, 7, 8, 9, 10], |
1765
|
|
|
[100, 100, 100, 100, 100, 100, 100, 100, 100, 100], |
1766
|
|
|
onesec, |
1767
|
|
|
self.sim_params, |
1768
|
|
|
env=self.env |
1769
|
|
|
) |
1770
|
|
|
|
1771
|
|
|
data_portal = create_data_portal_from_trade_history( |
1772
|
|
|
self.env, |
1773
|
|
|
self.tempdir, |
1774
|
|
|
self.sim_params, |
1775
|
|
|
{1: trades}) |
1776
|
|
|
|
1777
|
|
|
short_txn = create_txn( |
1778
|
|
|
trades[1].sid, |
1779
|
|
|
trades[1].dt, |
1780
|
|
|
10.0, |
1781
|
|
|
-100, |
1782
|
|
|
) |
1783
|
|
|
cover_txn = create_txn(trades[6].sid, trades[6].dt, 7.0, 100) |
1784
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1785
|
|
|
self.sim_params.data_frequency) |
1786
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1787
|
|
|
self.sim_params.data_frequency, |
1788
|
|
|
data_portal) |
1789
|
|
|
pp.position_tracker = pt |
1790
|
|
|
|
1791
|
|
|
pt.execute_transaction(short_txn) |
1792
|
|
|
pp.handle_execution(short_txn) |
1793
|
|
|
pt.execute_transaction(cover_txn) |
1794
|
|
|
pp.handle_execution(cover_txn) |
1795
|
|
|
|
1796
|
|
|
pt.sync_last_sale_prices(trades[-1].dt) |
1797
|
|
|
|
1798
|
|
|
pp.calculate_performance() |
1799
|
|
|
|
1800
|
|
|
short_txn_cost = short_txn.price * short_txn.amount |
1801
|
|
|
cover_txn_cost = cover_txn.price * cover_txn.amount |
1802
|
|
|
|
1803
|
|
|
self.assertEqual( |
1804
|
|
|
pp.period_cash_flow, |
1805
|
|
|
-1 * short_txn_cost - cover_txn_cost, |
1806
|
|
|
"capital used should be equal to the net transaction costs" |
1807
|
|
|
) |
1808
|
|
|
|
1809
|
|
|
self.assertEqual( |
1810
|
|
|
len(pp.positions), |
1811
|
|
|
1, |
1812
|
|
|
"should be just one position" |
1813
|
|
|
) |
1814
|
|
|
|
1815
|
|
|
self.assertEqual( |
1816
|
|
|
pp.positions[1].sid, |
1817
|
|
|
short_txn.sid, |
1818
|
|
|
"position should be in security from the transaction" |
1819
|
|
|
) |
1820
|
|
|
|
1821
|
|
|
self.assertEqual( |
1822
|
|
|
pp.positions[1].amount, |
1823
|
|
|
0, |
1824
|
|
|
"should have a position of -100 shares" |
1825
|
|
|
) |
1826
|
|
|
|
1827
|
|
|
self.assertEqual( |
1828
|
|
|
pp.positions[1].cost_basis, |
1829
|
|
|
0, |
1830
|
|
|
"a covered position should have a cost basis of 0" |
1831
|
|
|
) |
1832
|
|
|
|
1833
|
|
|
self.assertEqual( |
1834
|
|
|
pp.positions[1].last_sale_price, |
1835
|
|
|
trades[-1].price, |
1836
|
|
|
"last sale should be price of last trade" |
1837
|
|
|
) |
1838
|
|
|
|
1839
|
|
|
self.assertEqual( |
1840
|
|
|
pp.ending_value, |
1841
|
|
|
0, |
1842
|
|
|
"ending value should be price of last trade times number of \ |
1843
|
|
|
shares in position" |
1844
|
|
|
) |
1845
|
|
|
|
1846
|
|
|
self.assertEqual( |
1847
|
|
|
pp.pnl, |
1848
|
|
|
300, |
1849
|
|
|
"gain of 1 on 100 shares should be 300" |
1850
|
|
|
) |
1851
|
|
|
|
1852
|
|
|
check_perf_period( |
1853
|
|
|
pp, |
1854
|
|
|
gross_leverage=0.0, |
1855
|
|
|
net_leverage=0.0, |
1856
|
|
|
long_exposure=0.0, |
1857
|
|
|
longs_count=0, |
1858
|
|
|
short_exposure=0.0, |
1859
|
|
|
shorts_count=0) |
1860
|
|
|
|
1861
|
|
|
account = pp.as_account() |
1862
|
|
|
check_account(account, |
1863
|
|
|
settled_cash=1300.0, |
1864
|
|
|
equity_with_loan=1300.0, |
1865
|
|
|
total_positions_value=0.0, |
1866
|
|
|
regt_equity=1300.0, |
1867
|
|
|
available_funds=1300.0, |
1868
|
|
|
excess_liquidity=1300.0, |
1869
|
|
|
cushion=1.0, |
1870
|
|
|
leverage=0.0, |
1871
|
|
|
net_leverage=0.0, |
1872
|
|
|
net_liquidation=1300.0) |
1873
|
|
|
|
1874
|
|
|
def test_cost_basis_calc(self): |
1875
|
|
|
self.create_environment_stuff(num_days=5) |
1876
|
|
|
|
1877
|
|
|
history_args = ( |
1878
|
|
|
1, |
1879
|
|
|
[10, 11, 11, 12, 10], |
1880
|
|
|
[100, 100, 100, 100, 100], |
1881
|
|
|
oneday, |
1882
|
|
|
self.sim_params, |
1883
|
|
|
self.env |
1884
|
|
|
) |
1885
|
|
|
trades = factory.create_trade_history(*history_args) |
1886
|
|
|
transactions = factory.create_txn_history(*history_args)[:4] |
1887
|
|
|
|
1888
|
|
|
data_portal = create_data_portal_from_trade_history( |
1889
|
|
|
self.env, |
1890
|
|
|
self.tempdir, |
1891
|
|
|
self.sim_params, |
1892
|
|
|
{1: trades}) |
1893
|
|
|
|
1894
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1895
|
|
|
self.sim_params.data_frequency) |
1896
|
|
|
pp = perf.PerformancePeriod( |
1897
|
|
|
1000.0, |
1898
|
|
|
self.env.asset_finder, |
1899
|
|
|
self.sim_params.data_frequency, |
1900
|
|
|
data_portal, |
1901
|
|
|
period_open=self.sim_params.period_start, |
1902
|
|
|
period_close=self.sim_params.trading_days[-1] |
1903
|
|
|
) |
1904
|
|
|
pp.position_tracker = pt |
1905
|
|
|
|
1906
|
|
|
average_cost = 0 |
1907
|
|
|
for i, txn in enumerate(transactions): |
1908
|
|
|
pt.execute_transaction(txn) |
1909
|
|
|
pp.handle_execution(txn) |
1910
|
|
|
average_cost = (average_cost * i + txn.price) / (i + 1) |
1911
|
|
|
self.assertEqual(pt.positions[1].cost_basis, average_cost) |
1912
|
|
|
|
1913
|
|
|
dt = trades[-2].dt |
1914
|
|
|
self.assertEqual( |
1915
|
|
|
pt.positions[1].last_sale_price, |
1916
|
|
|
trades[-2].price, |
1917
|
|
|
"should have a last sale of 12, got {val}".format( |
1918
|
|
|
val=pt.positions[1].last_sale_price) |
1919
|
|
|
) |
1920
|
|
|
|
1921
|
|
|
self.assertEqual( |
1922
|
|
|
pt.positions[1].cost_basis, |
1923
|
|
|
11, |
1924
|
|
|
"should have a cost basis of 11" |
1925
|
|
|
) |
1926
|
|
|
|
1927
|
|
|
pt.sync_last_sale_prices(dt) |
1928
|
|
|
|
1929
|
|
|
pp.calculate_performance() |
1930
|
|
|
|
1931
|
|
|
self.assertEqual( |
1932
|
|
|
pp.pnl, |
1933
|
|
|
400 |
1934
|
|
|
) |
1935
|
|
|
|
1936
|
|
|
down_tick = trades[-1] |
1937
|
|
|
|
1938
|
|
|
sale_txn = create_txn( |
1939
|
|
|
down_tick.sid, |
1940
|
|
|
down_tick.dt, |
1941
|
|
|
10.0, |
1942
|
|
|
-100) |
1943
|
|
|
|
1944
|
|
|
pp.rollover() |
1945
|
|
|
|
1946
|
|
|
pt.execute_transaction(sale_txn) |
1947
|
|
|
pp.handle_execution(sale_txn) |
1948
|
|
|
|
1949
|
|
|
dt = down_tick.dt |
1950
|
|
|
pt.sync_last_sale_prices(dt) |
1951
|
|
|
|
1952
|
|
|
pp.calculate_performance() |
1953
|
|
|
self.assertEqual( |
1954
|
|
|
pp.positions[1].last_sale_price, |
1955
|
|
|
10, |
1956
|
|
|
"should have a last sale of 10, was {val}".format( |
1957
|
|
|
val=pp.positions[1].last_sale_price) |
1958
|
|
|
) |
1959
|
|
|
|
1960
|
|
|
self.assertEqual( |
1961
|
|
|
pp.positions[1].cost_basis, |
1962
|
|
|
11, |
1963
|
|
|
"should have a cost basis of 11" |
1964
|
|
|
) |
1965
|
|
|
|
1966
|
|
|
self.assertEqual(pp.pnl, -800, "this period goes from +400 to -400") |
1967
|
|
|
|
1968
|
|
|
pt3 = perf.PositionTracker(self.env.asset_finder, data_portal, |
1969
|
|
|
self.sim_params.data_frequency) |
1970
|
|
|
pp3 = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1971
|
|
|
self.sim_params.data_frequency, |
1972
|
|
|
data_portal) |
1973
|
|
|
pp3.position_tracker = pt3 |
1974
|
|
|
|
1975
|
|
|
average_cost = 0 |
1976
|
|
|
for i, txn in enumerate(transactions): |
1977
|
|
|
pt3.execute_transaction(txn) |
1978
|
|
|
pp3.handle_execution(txn) |
1979
|
|
|
average_cost = (average_cost * i + txn.price) / (i + 1) |
1980
|
|
|
self.assertEqual(pp3.positions[1].cost_basis, average_cost) |
1981
|
|
|
|
1982
|
|
|
pt3.execute_transaction(sale_txn) |
1983
|
|
|
pp3.handle_execution(sale_txn) |
1984
|
|
|
|
1985
|
|
|
trades.append(down_tick) |
1986
|
|
|
pt3.sync_last_sale_prices(trades[-1].dt) |
1987
|
|
|
|
1988
|
|
|
pp3.calculate_performance() |
1989
|
|
|
self.assertEqual( |
1990
|
|
|
pp3.positions[1].last_sale_price, |
1991
|
|
|
10, |
1992
|
|
|
"should have a last sale of 10" |
1993
|
|
|
) |
1994
|
|
|
|
1995
|
|
|
self.assertEqual( |
1996
|
|
|
pp3.positions[1].cost_basis, |
1997
|
|
|
11, |
1998
|
|
|
"should have a cost basis of 11" |
1999
|
|
|
) |
2000
|
|
|
|
2001
|
|
|
self.assertEqual( |
2002
|
|
|
pp3.pnl, |
2003
|
|
|
-400, |
2004
|
|
|
"should be -400 for all trades and transactions in period" |
2005
|
|
|
) |
2006
|
|
|
|
2007
|
|
|
def test_cost_basis_calc_close_pos(self): |
2008
|
|
|
self.create_environment_stuff(num_days=8) |
2009
|
|
|
|
2010
|
|
|
history_args = ( |
2011
|
|
|
1, |
2012
|
|
|
[10, 9, 11, 8, 9, 12, 13, 14], |
2013
|
|
|
[200, -100, -100, 100, -300, 100, 500, 400], |
2014
|
|
|
onesec, |
2015
|
|
|
self.sim_params, |
2016
|
|
|
self.env |
2017
|
|
|
) |
2018
|
|
|
cost_bases = [10, 10, 0, 8, 9, 9, 13, 13.5] |
2019
|
|
|
|
2020
|
|
|
trades = factory.create_trade_history(*history_args) |
2021
|
|
|
transactions = factory.create_txn_history(*history_args) |
2022
|
|
|
|
2023
|
|
|
data_portal = create_data_portal_from_trade_history( |
2024
|
|
|
self.env, |
2025
|
|
|
self.tempdir, |
2026
|
|
|
self.sim_params, |
2027
|
|
|
{1: trades}) |
2028
|
|
|
|
2029
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
2030
|
|
|
self.sim_params.data_frequency) |
2031
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, data_portal, |
2032
|
|
|
self.sim_params.data_frequency) |
2033
|
|
|
pp.position_tracker = pt |
2034
|
|
|
|
2035
|
|
|
for txn, cb in zip(transactions, cost_bases): |
2036
|
|
|
pt.execute_transaction(txn) |
2037
|
|
|
pp.handle_execution(txn) |
2038
|
|
|
self.assertEqual(pp.positions[1].cost_basis, cb) |
2039
|
|
|
|
2040
|
|
|
pp.calculate_performance() |
2041
|
|
|
|
2042
|
|
|
self.assertEqual(pp.positions[1].cost_basis, cost_bases[-1]) |
2043
|
|
|
|
2044
|
|
|
|
2045
|
|
|
class TestPosition(unittest.TestCase): |
2046
|
|
|
def setUp(self): |
2047
|
|
|
pass |
2048
|
|
|
|
2049
|
|
|
def test_serialization(self): |
2050
|
|
|
dt = pd.Timestamp("1984/03/06 3:00PM") |
2051
|
|
|
pos = perf.Position(10, amount=np.float64(120.0), last_sale_date=dt, |
2052
|
|
|
last_sale_price=3.4) |
2053
|
|
|
|
2054
|
|
|
p_string = dumps_with_persistent_ids(pos) |
2055
|
|
|
|
2056
|
|
|
test = loads_with_persistent_ids(p_string, env=None) |
2057
|
|
|
nt.assert_dict_equal(test.__dict__, pos.__dict__) |
2058
|
|
|
|
2059
|
|
|
|
2060
|
|
|
class TestPositionTracker(unittest.TestCase): |
2061
|
|
|
|
2062
|
|
|
@classmethod |
2063
|
|
|
def setUpClass(cls): |
2064
|
|
|
cls.env = TradingEnvironment() |
2065
|
|
|
futures_metadata = {3: {'contract_multiplier': 1000}, |
2066
|
|
|
4: {'contract_multiplier': 1000}} |
2067
|
|
|
cls.env.write_data(equities_identifiers=[1, 2], |
2068
|
|
|
futures_data=futures_metadata) |
2069
|
|
|
|
2070
|
|
|
@classmethod |
2071
|
|
|
def tearDownClass(cls): |
2072
|
|
|
del cls.env |
2073
|
|
|
|
2074
|
|
|
def setUp(self): |
2075
|
|
|
self.tempdir = TempDirectory() |
2076
|
|
|
|
2077
|
|
|
def tearDown(self): |
2078
|
|
|
self.tempdir.cleanup() |
2079
|
|
|
|
2080
|
|
|
def test_empty_positions(self): |
2081
|
|
|
""" |
2082
|
|
|
make sure all the empty position stats return a numeric 0 |
2083
|
|
|
|
2084
|
|
|
Originally this bug was due to np.dot([], []) returning |
2085
|
|
|
np.bool_(False) |
2086
|
|
|
""" |
2087
|
|
|
<<<<<<< HEAD |
2088
|
|
|
sim_params = factory.create_simulation_parameters( |
2089
|
|
|
num_days=4, env=self.env |
2090
|
|
|
) |
2091
|
|
|
trades = factory.create_trade_history( |
2092
|
|
|
1, |
2093
|
|
|
[10, 10, 10, 11], |
2094
|
|
|
[100, 100, 100, 100], |
2095
|
|
|
oneday, |
2096
|
|
|
sim_params, |
2097
|
|
|
env=self.env |
2098
|
|
|
) |
2099
|
|
|
|
2100
|
|
|
data_portal = create_data_portal_from_trade_history( |
2101
|
|
|
self.env, |
2102
|
|
|
self.tempdir, |
2103
|
|
|
sim_params, |
2104
|
|
|
{1: trades}) |
2105
|
|
|
|
2106
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
2107
|
|
|
sim_params.data_frequency) |
2108
|
|
|
======= |
2109
|
|
|
pt = perf.PositionTracker(self.env.asset_finder) |
2110
|
|
|
>>>>>>> master |
2111
|
|
|
pos_stats = pt.stats() |
2112
|
|
|
|
2113
|
|
|
stats = [ |
2114
|
|
|
'net_value', |
2115
|
|
|
'net_exposure', |
2116
|
|
|
'gross_value', |
2117
|
|
|
'gross_exposure', |
2118
|
|
|
'short_value', |
2119
|
|
|
'short_exposure', |
2120
|
|
|
'shorts_count', |
2121
|
|
|
'long_value', |
2122
|
|
|
'long_exposure', |
2123
|
|
|
'longs_count', |
2124
|
|
|
] |
2125
|
|
|
for name in stats: |
2126
|
|
|
val = getattr(pos_stats, name) |
2127
|
|
|
self.assertEquals(val, 0) |
2128
|
|
|
self.assertNotIsInstance(val, (bool, np.bool_)) |
2129
|
|
|
|
2130
|
|
|
def test_position_values_and_exposures(self): |
2131
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, None, None) |
2132
|
|
|
dt = pd.Timestamp("1984/03/06 3:00PM") |
2133
|
|
|
pos1 = perf.Position(1, amount=np.float64(10.0), |
2134
|
|
|
last_sale_date=dt, last_sale_price=10) |
2135
|
|
|
pos2 = perf.Position(2, amount=np.float64(-20.0), |
2136
|
|
|
last_sale_date=dt, last_sale_price=10) |
2137
|
|
|
pos3 = perf.Position(3, amount=np.float64(30.0), |
2138
|
|
|
last_sale_date=dt, last_sale_price=10) |
2139
|
|
|
pos4 = perf.Position(4, amount=np.float64(-40.0), |
2140
|
|
|
last_sale_date=dt, last_sale_price=10) |
2141
|
|
|
pt.update_positions({1: pos1, 2: pos2, 3: pos3, 4: pos4}) |
2142
|
|
|
|
2143
|
|
|
# Test long-only methods |
2144
|
|
|
|
2145
|
|
|
pos_stats = pt.stats() |
2146
|
|
|
self.assertEqual(100, pos_stats.long_value) |
2147
|
|
|
self.assertEqual(100 + 300000, pos_stats.long_exposure) |
2148
|
|
|
self.assertEqual(2, pos_stats.longs_count) |
2149
|
|
|
|
2150
|
|
|
# Test short-only methods |
2151
|
|
|
self.assertEqual(-200, pos_stats.short_value) |
2152
|
|
|
self.assertEqual(-200 - 400000, pos_stats.short_exposure) |
2153
|
|
|
self.assertEqual(2, pos_stats.shorts_count) |
2154
|
|
|
|
2155
|
|
|
# Test gross and net values |
2156
|
|
|
self.assertEqual(100 + 200, pos_stats.gross_value) |
2157
|
|
|
self.assertEqual(100 - 200, pos_stats.net_value) |
2158
|
|
|
|
2159
|
|
|
# Test gross and net exposures |
2160
|
|
|
self.assertEqual(100 + 200 + 300000 + 400000, pos_stats.gross_exposure) |
2161
|
|
|
self.assertEqual(100 - 200 + 300000 - 400000, pos_stats.net_exposure) |
2162
|
|
|
|
2163
|
|
|
def test_serialization(self): |
2164
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, None, None) |
2165
|
|
|
dt = pd.Timestamp("1984/03/06 3:00PM") |
2166
|
|
|
pos1 = perf.Position(1, amount=np.float64(120.0), |
2167
|
|
|
last_sale_date=dt, last_sale_price=3.4) |
2168
|
|
|
pos3 = perf.Position(3, amount=np.float64(100.0), |
2169
|
|
|
last_sale_date=dt, last_sale_price=3.4) |
2170
|
|
|
|
2171
|
|
|
pt.update_positions({1: pos1, 3: pos3}) |
2172
|
|
|
p_string = dumps_with_persistent_ids(pt) |
2173
|
|
|
test = loads_with_persistent_ids(p_string, env=self.env) |
2174
|
|
|
nt.assert_count_equal(test.positions.keys(), pt.positions.keys()) |
2175
|
|
|
for sid in pt.positions: |
2176
|
|
|
nt.assert_dict_equal(test.positions[sid].__dict__, |
2177
|
|
|
pt.positions[sid].__dict__) |
2178
|
|
|
|
2179
|
|
|
|
2180
|
|
|
class TestPerformancePeriod(unittest.TestCase): |
2181
|
|
|
|
2182
|
|
|
def test_serialization(self): |
2183
|
|
|
env = TradingEnvironment() |
2184
|
|
|
pp = perf.PerformancePeriod(100, env.asset_finder, 'minute', None) |
2185
|
|
|
|
2186
|
|
|
p_string = dumps_with_persistent_ids(pp) |
2187
|
|
|
test = loads_with_persistent_ids(p_string, env=env) |
2188
|
|
|
|
2189
|
|
|
correct = pp.__dict__.copy() |
2190
|
|
|
correct.pop('_data_portal') |
2191
|
|
|
|
2192
|
|
|
nt.assert_count_equal(test.__dict__.keys(), correct.keys()) |
2193
|
|
|
|
2194
|
|
|
equal_keys = list(correct.keys()) |
2195
|
|
|
equal_keys.remove('_account_store') |
2196
|
|
|
equal_keys.remove('_portfolio_store') |
2197
|
|
|
|
2198
|
|
|
for k in equal_keys: |
2199
|
|
|
nt.assert_equal(test.__dict__[k], correct[k]) |
2200
|
|
|
|