1
|
|
|
# |
2
|
|
|
# Copyright 2013 Quantopian, Inc. |
3
|
|
|
# |
4
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
5
|
|
|
# you may not use this file except in compliance with the License. |
6
|
|
|
# You may obtain a copy of the License at |
7
|
|
|
# |
8
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0 |
9
|
|
|
# |
10
|
|
|
# Unless required by applicable law or agreed to in writing, software |
11
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, |
12
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13
|
|
|
# See the License for the specific language governing permissions and |
14
|
|
|
# limitations under the License. |
15
|
|
|
|
16
|
|
|
from __future__ import division |
17
|
|
|
|
18
|
|
|
from datetime import ( |
19
|
|
|
datetime, |
20
|
|
|
timedelta, |
21
|
|
|
) |
22
|
|
|
import logging |
23
|
|
|
|
24
|
|
|
from testfixtures import TempDirectory |
25
|
|
|
import unittest |
26
|
|
|
import nose.tools as nt |
27
|
|
|
import pytz |
28
|
|
|
|
29
|
|
|
import pandas as pd |
30
|
|
|
import numpy as np |
31
|
|
|
from six.moves import range, zip |
32
|
|
|
|
33
|
|
|
from zipline.data.us_equity_pricing import ( |
34
|
|
|
SQLiteAdjustmentWriter, |
35
|
|
|
SQLiteAdjustmentReader, |
36
|
|
|
) |
37
|
|
|
import zipline.utils.factory as factory |
38
|
|
|
import zipline.finance.performance as perf |
39
|
|
|
from zipline.finance.transaction import create_transaction |
40
|
|
|
import zipline.utils.math_utils as zp_math |
41
|
|
|
|
42
|
|
|
from zipline.finance.blotter import Order |
43
|
|
|
from zipline.finance.commission import PerShare, PerTrade, PerDollar |
44
|
|
|
from zipline.finance.slippage import TradeBar |
45
|
|
|
from zipline.finance.trading import TradingEnvironment |
46
|
|
|
from zipline.pipeline.loaders.synthetic import NullAdjustmentReader |
47
|
|
|
from zipline.utils.factory import create_simulation_parameters |
48
|
|
|
from zipline.utils.serialization_utils import ( |
49
|
|
|
loads_with_persistent_ids, dumps_with_persistent_ids |
50
|
|
|
) |
51
|
|
|
import zipline.protocol as zp |
52
|
|
|
from zipline.protocol import Event |
53
|
|
|
from zipline.utils.test_utils import create_data_portal_from_trade_history |
54
|
|
|
|
55
|
|
|
logger = logging.getLogger('Test Perf Tracking') |
56
|
|
|
|
57
|
|
|
onesec = timedelta(seconds=1) |
58
|
|
|
oneday = timedelta(days=1) |
59
|
|
|
tradingday = timedelta(hours=6, minutes=30) |
60
|
|
|
|
61
|
|
|
# nose.tools changed name in python 3 |
62
|
|
|
if not hasattr(nt, 'assert_count_equal'): |
63
|
|
|
nt.assert_count_equal = nt.assert_items_equal |
64
|
|
|
|
65
|
|
|
|
66
|
|
|
def check_perf_period(pp, |
67
|
|
|
gross_leverage, |
68
|
|
|
net_leverage, |
69
|
|
|
long_exposure, |
70
|
|
|
longs_count, |
71
|
|
|
short_exposure, |
72
|
|
|
shorts_count): |
73
|
|
|
|
74
|
|
|
perf_data = pp.to_dict() |
75
|
|
|
np.testing.assert_allclose( |
76
|
|
|
gross_leverage, perf_data['gross_leverage'], rtol=1e-3) |
77
|
|
|
np.testing.assert_allclose( |
78
|
|
|
net_leverage, perf_data['net_leverage'], rtol=1e-3) |
79
|
|
|
np.testing.assert_allclose( |
80
|
|
|
long_exposure, perf_data['long_exposure'], rtol=1e-3) |
81
|
|
|
np.testing.assert_allclose( |
82
|
|
|
longs_count, perf_data['longs_count'], rtol=1e-3) |
83
|
|
|
np.testing.assert_allclose( |
84
|
|
|
short_exposure, perf_data['short_exposure'], rtol=1e-3) |
85
|
|
|
np.testing.assert_allclose( |
86
|
|
|
shorts_count, perf_data['shorts_count'], rtol=1e-3) |
87
|
|
|
|
88
|
|
|
|
89
|
|
|
def check_account(account, |
90
|
|
|
settled_cash, |
91
|
|
|
equity_with_loan, |
92
|
|
|
total_positions_value, |
93
|
|
|
regt_equity, |
94
|
|
|
available_funds, |
95
|
|
|
excess_liquidity, |
96
|
|
|
cushion, |
97
|
|
|
leverage, |
98
|
|
|
net_leverage, |
99
|
|
|
net_liquidation): |
100
|
|
|
# this is a long only portfolio that is only partially invested |
101
|
|
|
# so net and gross leverage are equal. |
102
|
|
|
|
103
|
|
|
np.testing.assert_allclose(settled_cash, |
104
|
|
|
account['settled_cash'], rtol=1e-3) |
105
|
|
|
np.testing.assert_allclose(equity_with_loan, |
106
|
|
|
account['equity_with_loan'], rtol=1e-3) |
107
|
|
|
np.testing.assert_allclose(total_positions_value, |
108
|
|
|
account['total_positions_value'], rtol=1e-3) |
109
|
|
|
np.testing.assert_allclose(regt_equity, |
110
|
|
|
account['regt_equity'], rtol=1e-3) |
111
|
|
|
np.testing.assert_allclose(available_funds, |
112
|
|
|
account['available_funds'], rtol=1e-3) |
113
|
|
|
np.testing.assert_allclose(excess_liquidity, |
114
|
|
|
account['excess_liquidity'], rtol=1e-3) |
115
|
|
|
np.testing.assert_allclose(cushion, |
116
|
|
|
account['cushion'], rtol=1e-3) |
117
|
|
|
np.testing.assert_allclose(leverage, account['leverage'], rtol=1e-3) |
118
|
|
|
np.testing.assert_allclose(net_leverage, |
119
|
|
|
account['net_leverage'], rtol=1e-3) |
120
|
|
|
np.testing.assert_allclose(net_liquidation, |
121
|
|
|
account['net_liquidation'], rtol=1e-3) |
122
|
|
|
|
123
|
|
|
|
124
|
|
|
def create_txn(sid, dt, price, amount): |
125
|
|
|
""" |
126
|
|
|
Create a fake transaction to be filled and processed prior to the execution |
127
|
|
|
of a given trade event. |
128
|
|
|
""" |
129
|
|
|
mock_order = Order(dt, sid, amount, id=None) |
130
|
|
|
trade_bar = TradeBar(sid, dt, price, None) |
131
|
|
|
return create_transaction(trade_bar, mock_order, price, amount) |
132
|
|
|
|
133
|
|
|
|
134
|
|
|
def benchmark_events_in_range(sim_params, env): |
135
|
|
|
return [ |
136
|
|
|
Event({'dt': dt, |
137
|
|
|
'returns': ret, |
138
|
|
|
'type': zp.DATASOURCE_TYPE.BENCHMARK, |
139
|
|
|
# We explicitly rely on the behavior that benchmarks sort before |
140
|
|
|
# any other events. |
141
|
|
|
'source_id': '1Abenchmarks'}) |
142
|
|
|
for dt, ret in env.benchmark_returns.iteritems() |
143
|
|
|
if dt.date() >= sim_params.period_start.date() and |
144
|
|
|
dt.date() <= sim_params.period_end.date() |
145
|
|
|
] |
146
|
|
|
|
147
|
|
|
|
148
|
|
|
def calculate_results(sim_params, |
149
|
|
|
env, |
150
|
|
|
tempdir, |
151
|
|
|
benchmark_events, |
152
|
|
|
trade_events, |
153
|
|
|
adjustment_reader, |
154
|
|
|
splits=None, |
155
|
|
|
txns=None, |
156
|
|
|
commissions=None): |
157
|
|
|
""" |
158
|
|
|
Run the given events through a stripped down version of the loop in |
159
|
|
|
AlgorithmSimulator.transform. |
160
|
|
|
|
161
|
|
|
IMPORTANT NOTE FOR TEST WRITERS/READERS: |
162
|
|
|
|
163
|
|
|
This loop has some wonky logic for the order of event processing for |
164
|
|
|
datasource types. This exists mostly to accomodate legacy tests accomodate |
165
|
|
|
existing tests that were making assumptions about how events would be |
166
|
|
|
sorted. |
167
|
|
|
|
168
|
|
|
In particular: |
169
|
|
|
|
170
|
|
|
- Dividends passed for a given date are processed PRIOR to any events |
171
|
|
|
for that date. |
172
|
|
|
- Splits passed for a given date are process AFTER any events for that |
173
|
|
|
date. |
174
|
|
|
|
175
|
|
|
Tests that use this helper should not be considered useful guarantees of |
176
|
|
|
the behavior of AlgorithmSimulator on a stream containing the same events |
177
|
|
|
unless the subgroups have been explicitly re-sorted in this way. |
178
|
|
|
""" |
179
|
|
|
|
180
|
|
|
txns = txns or [] |
181
|
|
|
splits = splits or {} |
182
|
|
|
commissions = commissions or {} |
183
|
|
|
|
184
|
|
|
adjustment_reader = adjustment_reader or NullAdjustmentReader() |
185
|
|
|
|
186
|
|
|
data_portal = create_data_portal_from_trade_history( |
187
|
|
|
env, |
188
|
|
|
tempdir, |
189
|
|
|
sim_params, |
190
|
|
|
trade_events, |
191
|
|
|
) |
192
|
|
|
data_portal._adjustment_reader = adjustment_reader |
193
|
|
|
|
194
|
|
|
perf_tracker = perf.PerformanceTracker(sim_params, env, data_portal) |
195
|
|
|
|
196
|
|
|
results = [] |
197
|
|
|
|
198
|
|
|
for date in sim_params.trading_days: |
199
|
|
|
|
200
|
|
|
for txn in filter(lambda txn: txn.dt == date, txns): |
201
|
|
|
# Process txns for this date. |
202
|
|
|
perf_tracker.process_transaction(txn) |
203
|
|
|
|
204
|
|
|
try: |
205
|
|
|
commissions_for_date = commissions[date] |
206
|
|
|
for comm in commissions_for_date: |
207
|
|
|
perf_tracker.process_commission(comm) |
208
|
|
|
except KeyError: |
209
|
|
|
pass |
210
|
|
|
|
211
|
|
|
try: |
212
|
|
|
splits_for_date = splits[date] |
213
|
|
|
perf_tracker.handle_splits(splits_for_date) |
214
|
|
|
except KeyError: |
215
|
|
|
pass |
216
|
|
|
|
217
|
|
|
msg = perf_tracker.handle_market_close_daily(date) |
218
|
|
|
msg['account'] = perf_tracker.get_account(True, date) |
219
|
|
|
results.append(msg) |
220
|
|
|
return results |
221
|
|
|
|
222
|
|
|
|
223
|
|
|
def check_perf_tracker_serialization(perf_tracker): |
224
|
|
|
scalar_keys = [ |
225
|
|
|
'emission_rate', |
226
|
|
|
'txn_count', |
227
|
|
|
'market_open', |
228
|
|
|
'last_close', |
229
|
|
|
'period_start', |
230
|
|
|
'day_count', |
231
|
|
|
'capital_base', |
232
|
|
|
'market_close', |
233
|
|
|
'saved_dt', |
234
|
|
|
'period_end', |
235
|
|
|
'total_days', |
236
|
|
|
] |
237
|
|
|
p_string = dumps_with_persistent_ids(perf_tracker) |
238
|
|
|
|
239
|
|
|
test = loads_with_persistent_ids(p_string, env=perf_tracker.env) |
240
|
|
|
|
241
|
|
|
for k in scalar_keys: |
242
|
|
|
nt.assert_equal(getattr(test, k), getattr(perf_tracker, k), k) |
243
|
|
|
|
244
|
|
|
perf_periods = ( |
245
|
|
|
test.cumulative_performance, |
246
|
|
|
test.todays_performance |
247
|
|
|
) |
248
|
|
|
for period in perf_periods: |
249
|
|
|
nt.assert_true(hasattr(period, '_position_tracker')) |
250
|
|
|
|
251
|
|
|
|
252
|
|
|
def setup_env_data(env, sim_params, sids): |
253
|
|
|
data = {} |
254
|
|
|
for sid in sids: |
255
|
|
|
data[sid] = { |
256
|
|
|
"start_date": sim_params.trading_days[0], |
257
|
|
|
"end_date": sim_params.trading_days[-1] |
258
|
|
|
} |
259
|
|
|
|
260
|
|
|
env.write_data(equities_data=data) |
261
|
|
|
|
262
|
|
|
|
263
|
|
|
class TestSplitPerformance(unittest.TestCase): |
264
|
|
|
@classmethod |
265
|
|
|
def setUpClass(cls): |
266
|
|
|
cls.env = TradingEnvironment() |
267
|
|
|
cls.sim_params = create_simulation_parameters(num_days=2, |
268
|
|
|
capital_base=10e3) |
269
|
|
|
|
270
|
|
|
setup_env_data(cls.env, cls.sim_params, [1]) |
271
|
|
|
|
272
|
|
|
cls.benchmark_events = benchmark_events_in_range(cls.sim_params, |
273
|
|
|
cls.env) |
274
|
|
|
cls.tempdir = TempDirectory() |
275
|
|
|
|
276
|
|
|
@classmethod |
277
|
|
|
def tearDownClass(cls): |
278
|
|
|
cls.tempdir.cleanup() |
279
|
|
|
|
280
|
|
|
def test_split_long_position(self): |
281
|
|
|
events = factory.create_trade_history( |
282
|
|
|
1, |
283
|
|
|
# TODO: Should we provide adjusted prices in the tests, or provide |
284
|
|
|
# raw prices and adjust via DataPortal? |
285
|
|
|
[20, 60], |
286
|
|
|
[100, 100], |
287
|
|
|
oneday, |
288
|
|
|
self.sim_params, |
289
|
|
|
env=self.env |
290
|
|
|
) |
291
|
|
|
|
292
|
|
|
# set up a long position in sid 1 |
293
|
|
|
# 100 shares at $20 apiece = $2000 position |
294
|
|
|
txns = [create_txn(events[0].sid, events[0].dt, 20, 100)] |
295
|
|
|
|
296
|
|
|
# set up a split with ratio 3 occurring at the start of the second |
297
|
|
|
# day. |
298
|
|
|
splits = { |
299
|
|
|
events[1].dt: [(1, 3)] |
300
|
|
|
} |
301
|
|
|
|
302
|
|
|
results = calculate_results(self.sim_params, self.env, |
303
|
|
|
self.tempdir, |
304
|
|
|
self.benchmark_events, |
305
|
|
|
{1: events}, |
306
|
|
|
NullAdjustmentReader(), |
307
|
|
|
txns=txns, splits=splits) |
308
|
|
|
|
309
|
|
|
# should have 33 shares (at $60 apiece) and $20 in cash |
310
|
|
|
self.assertEqual(2, len(results)) |
311
|
|
|
|
312
|
|
|
latest_positions = results[1]['daily_perf']['positions'] |
313
|
|
|
self.assertEqual(1, len(latest_positions)) |
314
|
|
|
|
315
|
|
|
# check the last position to make sure it's been updated |
316
|
|
|
position = latest_positions[0] |
317
|
|
|
|
318
|
|
|
self.assertEqual(1, position['sid']) |
319
|
|
|
self.assertEqual(33, position['amount']) |
320
|
|
|
self.assertEqual(60, position['cost_basis']) |
321
|
|
|
self.assertEqual(60, position['last_sale_price']) |
322
|
|
|
|
323
|
|
|
# since we started with $10000, and we spent $2000 on the |
324
|
|
|
# position, but then got $20 back, we should have $8020 |
325
|
|
|
# (or close to it) in cash. |
326
|
|
|
|
327
|
|
|
# we won't get exactly 8020 because sometimes a split is |
328
|
|
|
# denoted as a ratio like 0.3333, and we lose some digits |
329
|
|
|
# of precision. thus, make sure we're pretty close. |
330
|
|
|
daily_perf = results[1]['daily_perf'] |
331
|
|
|
|
332
|
|
|
self.assertTrue( |
333
|
|
|
zp_math.tolerant_equals(8020, |
334
|
|
|
daily_perf['ending_cash'], 1), |
335
|
|
|
"ending_cash was {0}".format(daily_perf['ending_cash'])) |
336
|
|
|
|
337
|
|
|
# Validate that the account attributes were updated. |
338
|
|
|
account = results[1]['account'] |
339
|
|
|
self.assertEqual(float('inf'), account['day_trades_remaining']) |
340
|
|
|
# this is a long only portfolio that is only partially invested |
341
|
|
|
# so net and gross leverage are equal. |
342
|
|
|
np.testing.assert_allclose(0.198, account['leverage'], rtol=1e-3) |
343
|
|
|
np.testing.assert_allclose(0.198, account['net_leverage'], rtol=1e-3) |
344
|
|
|
np.testing.assert_allclose(8020, account['regt_equity'], rtol=1e-3) |
345
|
|
|
self.assertEqual(float('inf'), account['regt_margin']) |
346
|
|
|
np.testing.assert_allclose(8020, account['available_funds'], rtol=1e-3) |
347
|
|
|
self.assertEqual(0, account['maintenance_margin_requirement']) |
348
|
|
|
np.testing.assert_allclose(10000, |
349
|
|
|
account['equity_with_loan'], rtol=1e-3) |
350
|
|
|
self.assertEqual(float('inf'), account['buying_power']) |
351
|
|
|
self.assertEqual(0, account['initial_margin_requirement']) |
352
|
|
|
np.testing.assert_allclose(8020, account['excess_liquidity'], |
353
|
|
|
rtol=1e-3) |
354
|
|
|
np.testing.assert_allclose(8020, account['settled_cash'], rtol=1e-3) |
355
|
|
|
np.testing.assert_allclose(10000, account['net_liquidation'], |
356
|
|
|
rtol=1e-3) |
357
|
|
|
np.testing.assert_allclose(0.802, account['cushion'], rtol=1e-3) |
358
|
|
|
np.testing.assert_allclose(1980, account['total_positions_value'], |
359
|
|
|
rtol=1e-3) |
360
|
|
|
self.assertEqual(0, account['accrued_interest']) |
361
|
|
|
|
362
|
|
|
for i, result in enumerate(results): |
363
|
|
|
for perf_kind in ('daily_perf', 'cumulative_perf'): |
364
|
|
|
perf_result = result[perf_kind] |
365
|
|
|
# prices aren't changing, so pnl and returns should be 0.0 |
366
|
|
|
self.assertEqual(0.0, perf_result['pnl'], |
367
|
|
|
"day %s %s pnl %s instead of 0.0" % |
368
|
|
|
(i, perf_kind, perf_result['pnl'])) |
369
|
|
|
self.assertEqual(0.0, perf_result['returns'], |
370
|
|
|
"day %s %s returns %s instead of 0.0" % |
371
|
|
|
(i, perf_kind, perf_result['returns'])) |
372
|
|
|
|
373
|
|
|
|
374
|
|
|
class TestCommissionEvents(unittest.TestCase): |
375
|
|
|
@classmethod |
376
|
|
|
def setUpClass(cls): |
377
|
|
|
cls.env = TradingEnvironment() |
378
|
|
|
cls.sim_params = create_simulation_parameters(num_days=5, |
379
|
|
|
capital_base=10e3) |
380
|
|
|
setup_env_data(cls.env, cls.sim_params, [0, 1, 133]) |
381
|
|
|
|
382
|
|
|
cls.benchmark_events = benchmark_events_in_range(cls.sim_params, |
383
|
|
|
cls.env) |
384
|
|
|
cls.tempdir = TempDirectory() |
385
|
|
|
|
386
|
|
|
@classmethod |
387
|
|
|
def tearDownClass(cls): |
388
|
|
|
cls.tempdir.cleanup() |
389
|
|
|
|
390
|
|
|
def test_commission_event(self): |
391
|
|
|
trade_events = factory.create_trade_history( |
392
|
|
|
1, |
393
|
|
|
[10, 10, 10, 10, 10], |
394
|
|
|
[100, 100, 100, 100, 100], |
395
|
|
|
oneday, |
396
|
|
|
self.sim_params, |
397
|
|
|
env=self.env |
398
|
|
|
) |
399
|
|
|
|
400
|
|
|
# Test commission models and validate result |
401
|
|
|
# Expected commission amounts: |
402
|
|
|
# PerShare commission: 1.00, 1.00, 1.50 = $3.50 |
403
|
|
|
# PerTrade commission: 5.00, 5.00, 5.00 = $15.00 |
404
|
|
|
# PerDollar commission: 1.50, 3.00, 4.50 = $9.00 |
405
|
|
|
# Total commission = $3.50 + $15.00 + $9.00 = $27.50 |
406
|
|
|
|
407
|
|
|
# Create 3 transactions: 50, 100, 150 shares traded @ $20 |
408
|
|
|
first_trade = trade_events[0] |
409
|
|
|
transactions = [create_txn(first_trade.sid, first_trade.dt, 20, i) |
410
|
|
|
for i in [50, 100, 150]] |
411
|
|
|
|
412
|
|
|
# Create commission models and validate that produce expected |
413
|
|
|
# commissions. |
414
|
|
|
models = [PerShare(cost=0.01, min_trade_cost=1.00), |
415
|
|
|
PerTrade(cost=5.00), |
416
|
|
|
PerDollar(cost=0.0015)] |
417
|
|
|
expected_results = [3.50, 15.0, 9.0] |
418
|
|
|
|
419
|
|
|
for model, expected in zip(models, expected_results): |
420
|
|
|
total_commission = 0 |
421
|
|
|
for trade in transactions: |
422
|
|
|
total_commission += model.calculate(trade)[1] |
423
|
|
|
self.assertEqual(total_commission, expected) |
424
|
|
|
|
425
|
|
|
# Verify that commission events are handled correctly by |
426
|
|
|
# PerformanceTracker. |
427
|
|
|
commissions = {} |
428
|
|
|
cash_adj_dt = trade_events[0].dt |
429
|
|
|
cash_adjustment = factory.create_commission(1, 300.0, cash_adj_dt) |
430
|
|
|
commissions[cash_adj_dt] = [cash_adjustment] |
431
|
|
|
|
432
|
|
|
# Insert a purchase order. |
433
|
|
|
txns = [create_txn(1, cash_adj_dt, 20, 1)] |
434
|
|
|
results = calculate_results(self.sim_params, |
435
|
|
|
self.env, |
436
|
|
|
self.tempdir, |
437
|
|
|
self.benchmark_events, |
438
|
|
|
{1: trade_events}, |
439
|
|
|
NullAdjustmentReader(), |
440
|
|
|
txns=txns, |
441
|
|
|
commissions=commissions) |
442
|
|
|
|
443
|
|
|
# Validate that we lost 320 dollars from our cash pool. |
444
|
|
|
self.assertEqual(results[-1]['cumulative_perf']['ending_cash'], |
445
|
|
|
9680, "Should have lost 320 from cash pool.") |
446
|
|
|
# Validate that the cost basis of our position changed. |
447
|
|
|
self.assertEqual(results[-1]['daily_perf']['positions'] |
448
|
|
|
[0]['cost_basis'], 320.0) |
449
|
|
|
# Validate that the account attributes were updated. |
450
|
|
|
account = results[1]['account'] |
451
|
|
|
self.assertEqual(float('inf'), account['day_trades_remaining']) |
452
|
|
|
np.testing.assert_allclose(0.001, account['leverage'], rtol=1e-3, |
453
|
|
|
atol=1e-4) |
454
|
|
|
np.testing.assert_allclose(9680, account['regt_equity'], rtol=1e-3) |
455
|
|
|
self.assertEqual(float('inf'), account['regt_margin']) |
456
|
|
|
np.testing.assert_allclose(9680, account['available_funds'], |
457
|
|
|
rtol=1e-3) |
458
|
|
|
self.assertEqual(0, account['maintenance_margin_requirement']) |
459
|
|
|
np.testing.assert_allclose(9690, |
460
|
|
|
account['equity_with_loan'], rtol=1e-3) |
461
|
|
|
self.assertEqual(float('inf'), account['buying_power']) |
462
|
|
|
self.assertEqual(0, account['initial_margin_requirement']) |
463
|
|
|
np.testing.assert_allclose(9680, account['excess_liquidity'], |
464
|
|
|
rtol=1e-3) |
465
|
|
|
np.testing.assert_allclose(9680, account['settled_cash'], |
466
|
|
|
rtol=1e-3) |
467
|
|
|
np.testing.assert_allclose(9690, account['net_liquidation'], |
468
|
|
|
rtol=1e-3) |
469
|
|
|
np.testing.assert_allclose(0.999, account['cushion'], rtol=1e-3) |
470
|
|
|
np.testing.assert_allclose(10, account['total_positions_value'], |
471
|
|
|
rtol=1e-3) |
472
|
|
|
self.assertEqual(0, account['accrued_interest']) |
473
|
|
|
|
474
|
|
|
def test_commission_zero_position(self): |
475
|
|
|
""" |
476
|
|
|
Ensure no div-by-zero errors. |
477
|
|
|
""" |
478
|
|
|
events = factory.create_trade_history( |
479
|
|
|
1, |
480
|
|
|
[10, 10, 10, 10, 10], |
481
|
|
|
[100, 100, 100, 100, 100], |
482
|
|
|
oneday, |
483
|
|
|
self.sim_params, |
484
|
|
|
env=self.env |
485
|
|
|
) |
486
|
|
|
|
487
|
|
|
# Buy and sell the same sid so that we have a zero position by the |
488
|
|
|
# time of events[3]. |
489
|
|
|
txns = [ |
490
|
|
|
create_txn(events[0].sid, events[0].dt, 20, 1), |
491
|
|
|
create_txn(events[1].sid, events[1].dt, 20, -1), |
492
|
|
|
] |
493
|
|
|
|
494
|
|
|
# Add a cash adjustment at the time of event[3]. |
495
|
|
|
cash_adj_dt = events[3].dt |
496
|
|
|
commissions = {} |
497
|
|
|
cash_adjustment = factory.create_commission(1, 300.0, cash_adj_dt) |
498
|
|
|
commissions[cash_adj_dt] = [cash_adjustment] |
499
|
|
|
|
500
|
|
|
results = calculate_results(self.sim_params, |
501
|
|
|
self.env, |
502
|
|
|
self.tempdir, |
503
|
|
|
self.benchmark_events, |
504
|
|
|
{1: events}, |
505
|
|
|
NullAdjustmentReader(), |
506
|
|
|
txns=txns, |
507
|
|
|
commissions=commissions) |
508
|
|
|
# Validate that we lost 300 dollars from our cash pool. |
509
|
|
|
self.assertEqual(results[-1]['cumulative_perf']['ending_cash'], |
510
|
|
|
9700) |
511
|
|
|
|
512
|
|
|
def test_commission_no_position(self): |
513
|
|
|
""" |
514
|
|
|
Ensure no position-not-found or sid-not-found errors. |
515
|
|
|
""" |
516
|
|
|
events = factory.create_trade_history( |
517
|
|
|
1, |
518
|
|
|
[10, 10, 10, 10, 10], |
519
|
|
|
[100, 100, 100, 100, 100], |
520
|
|
|
oneday, |
521
|
|
|
self.sim_params, |
522
|
|
|
env=self.env |
523
|
|
|
) |
524
|
|
|
|
525
|
|
|
# Add a cash adjustment at the time of event[3]. |
526
|
|
|
cash_adj_dt = events[3].dt |
527
|
|
|
commissions = {} |
528
|
|
|
cash_adjustment = factory.create_commission(1, 300.0, cash_adj_dt) |
529
|
|
|
commissions[cash_adj_dt] = [cash_adjustment] |
530
|
|
|
|
531
|
|
|
results = calculate_results(self.sim_params, |
532
|
|
|
self.env, |
533
|
|
|
self.tempdir, |
534
|
|
|
self.benchmark_events, |
535
|
|
|
{1: events}, |
536
|
|
|
NullAdjustmentReader(), |
537
|
|
|
commissions=commissions) |
538
|
|
|
# Validate that we lost 300 dollars from our cash pool. |
539
|
|
|
self.assertEqual(results[-1]['cumulative_perf']['ending_cash'], |
540
|
|
|
9700) |
541
|
|
|
|
542
|
|
|
|
543
|
|
|
class MockDailyBarSpotReader(object): |
544
|
|
|
|
545
|
|
|
def spot_price(self, sid, day, colname): |
546
|
|
|
return 100.0 |
547
|
|
|
|
548
|
|
|
|
549
|
|
|
class TestDividendPerformance(unittest.TestCase): |
550
|
|
|
|
551
|
|
|
@classmethod |
552
|
|
|
def setUpClass(cls): |
553
|
|
|
cls.env = TradingEnvironment() |
554
|
|
|
cls.sim_params = create_simulation_parameters(num_days=6, |
555
|
|
|
capital_base=10e3) |
556
|
|
|
|
557
|
|
|
setup_env_data(cls.env, cls.sim_params, [1, 2]) |
558
|
|
|
|
559
|
|
|
cls.benchmark_events = benchmark_events_in_range(cls.sim_params, |
560
|
|
|
cls.env) |
561
|
|
|
|
562
|
|
|
@classmethod |
563
|
|
|
def tearDownClass(cls): |
564
|
|
|
del cls.env |
565
|
|
|
|
566
|
|
|
def setUp(self): |
567
|
|
|
self.tempdir = TempDirectory() |
568
|
|
|
|
569
|
|
|
def tearDown(self): |
570
|
|
|
self.tempdir.cleanup() |
571
|
|
|
|
572
|
|
|
def test_market_hours_calculations(self): |
573
|
|
|
# DST in US/Eastern began on Sunday March 14, 2010 |
574
|
|
|
before = datetime(2010, 3, 12, 14, 31, tzinfo=pytz.utc) |
575
|
|
|
after = factory.get_next_trading_dt( |
576
|
|
|
before, |
577
|
|
|
timedelta(days=1), |
578
|
|
|
self.env, |
579
|
|
|
) |
580
|
|
|
self.assertEqual(after.hour, 13) |
581
|
|
|
|
582
|
|
|
def test_long_position_receives_dividend(self): |
583
|
|
|
# post some trades in the market |
584
|
|
|
events = factory.create_trade_history( |
585
|
|
|
1, |
586
|
|
|
[10, 10, 10, 10, 10, 10], |
587
|
|
|
[100, 100, 100, 100, 100, 100], |
588
|
|
|
oneday, |
589
|
|
|
self.sim_params, |
590
|
|
|
env=self.env |
591
|
|
|
) |
592
|
|
|
|
593
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
594
|
|
|
|
595
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
596
|
|
|
MockDailyBarSpotReader()) |
597
|
|
|
splits = mergers = pd.DataFrame( |
598
|
|
|
{ |
599
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
600
|
|
|
'effective_date': np.array([], dtype=int), |
601
|
|
|
'ratio': np.array([], dtype=float), |
602
|
|
|
'sid': np.array([], dtype=int), |
603
|
|
|
}, |
604
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
605
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
606
|
|
|
) |
607
|
|
|
dividends = pd.DataFrame({ |
608
|
|
|
'sid': np.array([1], dtype=np.uint32), |
609
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
610
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
611
|
|
|
'ex_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
612
|
|
|
'record_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
613
|
|
|
'pay_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
614
|
|
|
}) |
615
|
|
|
writer.write(splits, mergers, dividends) |
616
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
617
|
|
|
|
618
|
|
|
# Simulate a transaction being filled prior to the ex_date. |
619
|
|
|
txns = [create_txn(events[0].sid, events[0].dt, 10.0, 100)] |
620
|
|
|
results = calculate_results( |
621
|
|
|
self.sim_params, |
622
|
|
|
self.env, |
623
|
|
|
self.tempdir, |
624
|
|
|
self.benchmark_events, |
625
|
|
|
{1: events}, |
626
|
|
|
adjustment_reader, |
627
|
|
|
txns=txns, |
628
|
|
|
) |
629
|
|
|
|
630
|
|
|
self.assertEqual(len(results), 6) |
631
|
|
|
cumulative_returns = \ |
632
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
633
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.1, 0.1, 0.1, 0.1]) |
634
|
|
|
daily_returns = [event['daily_perf']['returns'] |
635
|
|
|
for event in results] |
636
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.10, 0.0, 0.0, 0.0]) |
637
|
|
|
cash_flows = [event['daily_perf']['capital_used'] |
638
|
|
|
for event in results] |
639
|
|
|
self.assertEqual(cash_flows, [-1000, 0, 1000, 0, 0, 0]) |
640
|
|
|
cumulative_cash_flows = \ |
641
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
642
|
|
|
self.assertEqual(cumulative_cash_flows, [-1000, -1000, 0, 0, 0, 0]) |
643
|
|
|
cash_pos = \ |
644
|
|
|
[event['cumulative_perf']['ending_cash'] for event in results] |
645
|
|
|
self.assertEqual(cash_pos, [9000, 9000, 10000, 10000, 10000, 10000]) |
646
|
|
|
|
647
|
|
|
def test_long_position_receives_stock_dividend(self): |
648
|
|
|
# post some trades in the market |
649
|
|
|
events = {} |
650
|
|
|
for sid in (1, 2): |
651
|
|
|
events[sid] = factory.create_trade_history( |
652
|
|
|
sid, |
653
|
|
|
[10, 10, 10, 10, 10, 10], |
654
|
|
|
[100, 100, 100, 100, 100, 100], |
655
|
|
|
oneday, |
656
|
|
|
self.sim_params, |
657
|
|
|
env=self.env |
658
|
|
|
) |
659
|
|
|
|
660
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
661
|
|
|
|
662
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
663
|
|
|
MockDailyBarSpotReader()) |
664
|
|
|
splits = mergers = pd.DataFrame( |
665
|
|
|
{ |
666
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
667
|
|
|
'effective_date': np.array([], dtype=int), |
668
|
|
|
'ratio': np.array([], dtype=float), |
669
|
|
|
'sid': np.array([], dtype=int), |
670
|
|
|
}, |
671
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
672
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
673
|
|
|
) |
674
|
|
|
dividends = pd.DataFrame({ |
675
|
|
|
'sid': np.array([], dtype=np.uint32), |
676
|
|
|
'amount': np.array([], dtype=np.float64), |
677
|
|
|
'declared_date': np.array([], dtype='datetime64[ns]'), |
678
|
|
|
'ex_date': np.array([], dtype='datetime64[ns]'), |
679
|
|
|
'pay_date': np.array([], dtype='datetime64[ns]'), |
680
|
|
|
'record_date': np.array([], dtype='datetime64[ns]'), |
681
|
|
|
}) |
682
|
|
|
sid_1 = events[1] |
683
|
|
|
stock_dividends = pd.DataFrame({ |
684
|
|
|
'sid': np.array([1], dtype=np.uint32), |
685
|
|
|
'payment_sid': np.array([2], dtype=np.uint32), |
686
|
|
|
'ratio': np.array([2], dtype=np.float64), |
687
|
|
|
'declared_date': np.array([sid_1[0].dt], dtype='datetime64[ns]'), |
688
|
|
|
'ex_date': np.array([sid_1[1].dt], dtype='datetime64[ns]'), |
689
|
|
|
'record_date': np.array([sid_1[1].dt], dtype='datetime64[ns]'), |
690
|
|
|
'pay_date': np.array([sid_1[2].dt], dtype='datetime64[ns]'), |
691
|
|
|
}) |
692
|
|
|
writer.write(splits, mergers, dividends, stock_dividends) |
693
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
694
|
|
|
|
695
|
|
|
txns = [create_txn(events[1][0].sid, events[1][0].dt, 10.0, 100)] |
696
|
|
|
|
697
|
|
|
results = calculate_results( |
698
|
|
|
self.sim_params, |
699
|
|
|
self.env, |
700
|
|
|
self.tempdir, |
701
|
|
|
self.benchmark_events, |
702
|
|
|
events, |
703
|
|
|
adjustment_reader, |
704
|
|
|
txns=txns, |
705
|
|
|
) |
706
|
|
|
|
707
|
|
|
self.assertEqual(len(results), 6) |
708
|
|
|
cumulative_returns = \ |
709
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
710
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.2, 0.2, 0.2, 0.2]) |
711
|
|
|
daily_returns = [event['daily_perf']['returns'] |
712
|
|
|
for event in results] |
713
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.2, 0.0, 0.0, 0.0]) |
714
|
|
|
cash_flows = [event['daily_perf']['capital_used'] |
715
|
|
|
for event in results] |
716
|
|
|
self.assertEqual(cash_flows, [-1000, 0, 0, 0, 0, 0]) |
717
|
|
|
cumulative_cash_flows = \ |
718
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
719
|
|
|
self.assertEqual(cumulative_cash_flows, [-1000] * 6) |
720
|
|
|
cash_pos = \ |
721
|
|
|
[event['cumulative_perf']['ending_cash'] for event in results] |
722
|
|
|
self.assertEqual(cash_pos, [9000] * 6) |
723
|
|
|
|
724
|
|
|
def test_long_position_purchased_on_ex_date_receives_no_dividend(self): |
|
|
|
|
725
|
|
|
# post some trades in the market |
726
|
|
|
events = factory.create_trade_history( |
727
|
|
|
1, |
728
|
|
|
[10, 10, 10, 10, 10, 10], |
729
|
|
|
[100, 100, 100, 100, 100, 100], |
730
|
|
|
oneday, |
731
|
|
|
self.sim_params, |
732
|
|
|
env=self.env |
733
|
|
|
) |
734
|
|
|
|
735
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
736
|
|
|
|
737
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
738
|
|
|
MockDailyBarSpotReader()) |
739
|
|
|
splits = mergers = pd.DataFrame( |
740
|
|
|
{ |
741
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
742
|
|
|
'effective_date': np.array([], dtype=int), |
743
|
|
|
'ratio': np.array([], dtype=float), |
744
|
|
|
'sid': np.array([], dtype=int), |
745
|
|
|
}, |
746
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
747
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
748
|
|
|
) |
749
|
|
|
dividends = pd.DataFrame({ |
750
|
|
|
'sid': np.array([1], dtype=np.uint32), |
751
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
752
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
753
|
|
|
'ex_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
754
|
|
|
'record_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
755
|
|
|
'pay_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
756
|
|
|
}) |
757
|
|
|
writer.write(splits, mergers, dividends) |
758
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
759
|
|
|
|
760
|
|
|
# Simulate a transaction being filled on the ex_date. |
761
|
|
|
txns = [create_txn(events[1].sid, events[1].dt, 10.0, 100)] |
762
|
|
|
|
763
|
|
|
results = calculate_results( |
764
|
|
|
self.sim_params, |
765
|
|
|
self.env, |
766
|
|
|
self.tempdir, |
767
|
|
|
self.benchmark_events, |
768
|
|
|
{1: events}, |
769
|
|
|
adjustment_reader, |
770
|
|
|
txns=txns, |
771
|
|
|
) |
772
|
|
|
|
773
|
|
|
self.assertEqual(len(results), 6) |
774
|
|
|
cumulative_returns = \ |
775
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
776
|
|
|
self.assertEqual(cumulative_returns, [0, 0, 0, 0, 0, 0]) |
777
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
778
|
|
|
self.assertEqual(daily_returns, [0, 0, 0, 0, 0, 0]) |
779
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
780
|
|
|
self.assertEqual(cash_flows, [0, -1000, 0, 0, 0, 0]) |
781
|
|
|
cumulative_cash_flows = \ |
782
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
783
|
|
|
self.assertEqual(cumulative_cash_flows, |
784
|
|
|
[0, -1000, -1000, -1000, -1000, -1000]) |
785
|
|
|
|
786
|
|
|
def test_selling_before_dividend_payment_still_gets_paid(self): |
787
|
|
|
# post some trades in the market |
788
|
|
|
events = factory.create_trade_history( |
789
|
|
|
1, |
790
|
|
|
[10, 10, 10, 10, 10, 10], |
791
|
|
|
[100, 100, 100, 100, 100, 100], |
792
|
|
|
oneday, |
793
|
|
|
self.sim_params, |
794
|
|
|
env=self.env |
795
|
|
|
) |
796
|
|
|
|
797
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
798
|
|
|
|
799
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
800
|
|
|
MockDailyBarSpotReader()) |
801
|
|
|
splits = mergers = pd.DataFrame( |
802
|
|
|
{ |
803
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
804
|
|
|
'effective_date': np.array([], dtype=int), |
805
|
|
|
'ratio': np.array([], dtype=float), |
806
|
|
|
'sid': np.array([], dtype=int), |
807
|
|
|
}, |
808
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
809
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
810
|
|
|
) |
811
|
|
|
dividends = pd.DataFrame({ |
812
|
|
|
'sid': np.array([1], dtype=np.uint32), |
813
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
814
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
815
|
|
|
'ex_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
816
|
|
|
'record_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
817
|
|
|
'pay_date': np.array([events[3].dt], dtype='datetime64[ns]'), |
818
|
|
|
}) |
819
|
|
|
writer.write(splits, mergers, dividends) |
820
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
821
|
|
|
|
822
|
|
|
buy_txn = create_txn(events[0].sid, events[0].dt, 10.0, 100) |
823
|
|
|
sell_txn = create_txn(events[2].sid, events[2].dt, 10.0, -100) |
824
|
|
|
txns = [buy_txn, sell_txn] |
825
|
|
|
|
826
|
|
|
results = calculate_results( |
827
|
|
|
self.sim_params, |
828
|
|
|
self.env, |
829
|
|
|
self.tempdir, |
830
|
|
|
self.benchmark_events, |
831
|
|
|
{1: events}, |
832
|
|
|
adjustment_reader, |
833
|
|
|
txns=txns, |
834
|
|
|
) |
835
|
|
|
|
836
|
|
|
self.assertEqual(len(results), 6) |
837
|
|
|
cumulative_returns = \ |
838
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
839
|
|
|
self.assertEqual(cumulative_returns, [0, 0, 0, 0.1, 0.1, 0.1]) |
840
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
841
|
|
|
self.assertEqual(daily_returns, [0, 0, 0, 0.1, 0, 0]) |
842
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
843
|
|
|
self.assertEqual(cash_flows, [-1000, 0, 1000, 1000, 0, 0]) |
844
|
|
|
cumulative_cash_flows = \ |
845
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
846
|
|
|
self.assertEqual(cumulative_cash_flows, |
847
|
|
|
[-1000, -1000, 0, 1000, 1000, 1000]) |
848
|
|
|
|
849
|
|
|
def test_buy_and_sell_before_ex(self): |
850
|
|
|
# post some trades in the market |
851
|
|
|
events = factory.create_trade_history( |
852
|
|
|
1, |
853
|
|
|
[10, 10, 10, 10, 10, 10], |
854
|
|
|
[100, 100, 100, 100, 100, 100], |
855
|
|
|
oneday, |
856
|
|
|
self.sim_params, |
857
|
|
|
env=self.env |
858
|
|
|
) |
859
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
860
|
|
|
|
861
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
862
|
|
|
MockDailyBarSpotReader()) |
863
|
|
|
splits = mergers = pd.DataFrame( |
864
|
|
|
{ |
865
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
866
|
|
|
'effective_date': np.array([], dtype=int), |
867
|
|
|
'ratio': np.array([], dtype=float), |
868
|
|
|
'sid': np.array([], dtype=int), |
869
|
|
|
}, |
870
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
871
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
872
|
|
|
) |
873
|
|
|
|
874
|
|
|
dividends = pd.DataFrame({ |
875
|
|
|
'sid': np.array([1], dtype=np.uint32), |
876
|
|
|
'amount': np.array([10.0], dtype=np.float64), |
877
|
|
|
'declared_date': np.array([events[3].dt], dtype='datetime64[ns]'), |
878
|
|
|
'ex_date': np.array([events[4].dt], dtype='datetime64[ns]'), |
879
|
|
|
'pay_date': np.array([events[5].dt], dtype='datetime64[ns]'), |
880
|
|
|
'record_date': np.array([events[4].dt], dtype='datetime64[ns]'), |
881
|
|
|
}) |
882
|
|
|
writer.write(splits, mergers, dividends) |
883
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
884
|
|
|
|
885
|
|
|
buy_txn = create_txn(events[1].sid, events[1].dt, 10.0, 100) |
886
|
|
|
sell_txn = create_txn(events[2].sid, events[2].dt, 10.0, -100) |
887
|
|
|
txns = [buy_txn, sell_txn] |
888
|
|
|
|
889
|
|
|
results = calculate_results( |
890
|
|
|
self.sim_params, |
891
|
|
|
self.env, |
892
|
|
|
self.tempdir, |
893
|
|
|
self.benchmark_events, |
894
|
|
|
{1: events}, |
895
|
|
|
txns=txns, |
896
|
|
|
adjustment_reader=adjustment_reader, |
897
|
|
|
) |
898
|
|
|
|
899
|
|
|
self.assertEqual(len(results), 6) |
900
|
|
|
cumulative_returns = \ |
901
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
902
|
|
|
self.assertEqual(cumulative_returns, [0, 0, 0, 0, 0, 0]) |
903
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
904
|
|
|
self.assertEqual(daily_returns, [0, 0, 0, 0, 0, 0]) |
905
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
906
|
|
|
self.assertEqual(cash_flows, [0, -1000, 1000, 0, 0, 0]) |
907
|
|
|
cumulative_cash_flows = \ |
908
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
909
|
|
|
self.assertEqual(cumulative_cash_flows, [0, -1000, 0, 0, 0, 0]) |
910
|
|
|
|
911
|
|
|
def test_ending_before_pay_date(self): |
912
|
|
|
# post some trades in the market |
913
|
|
|
events = factory.create_trade_history( |
914
|
|
|
1, |
915
|
|
|
[10, 10, 10, 10, 10, 10], |
916
|
|
|
[100, 100, 100, 100, 100, 100], |
917
|
|
|
oneday, |
918
|
|
|
self.sim_params, |
919
|
|
|
env=self.env |
920
|
|
|
) |
921
|
|
|
|
922
|
|
|
pay_date = self.sim_params.first_open |
923
|
|
|
# find pay date that is much later. |
924
|
|
|
for i in range(30): |
925
|
|
|
pay_date = factory.get_next_trading_dt(pay_date, oneday, self.env) |
926
|
|
|
|
927
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
928
|
|
|
|
929
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
930
|
|
|
MockDailyBarSpotReader()) |
931
|
|
|
splits = mergers = pd.DataFrame( |
932
|
|
|
{ |
933
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
934
|
|
|
'effective_date': np.array([], dtype=int), |
935
|
|
|
'ratio': np.array([], dtype=float), |
936
|
|
|
'sid': np.array([], dtype=int), |
937
|
|
|
}, |
938
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
939
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
940
|
|
|
) |
941
|
|
|
dividends = pd.DataFrame({ |
942
|
|
|
'sid': np.array([1], dtype=np.uint32), |
943
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
944
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
945
|
|
|
'ex_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
946
|
|
|
'record_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
947
|
|
|
'pay_date': np.array([pay_date], dtype='datetime64[ns]'), |
948
|
|
|
}) |
949
|
|
|
writer.write(splits, mergers, dividends) |
950
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
951
|
|
|
|
952
|
|
|
txns = [create_txn(events[1].sid, events[1].dt, 10.0, 100)] |
953
|
|
|
|
954
|
|
|
results = calculate_results( |
955
|
|
|
self.sim_params, |
956
|
|
|
self.env, |
957
|
|
|
self.tempdir, |
958
|
|
|
self.benchmark_events, |
959
|
|
|
{1: events}, |
960
|
|
|
txns=txns, |
961
|
|
|
adjustment_reader=adjustment_reader, |
962
|
|
|
) |
963
|
|
|
|
964
|
|
|
self.assertEqual(len(results), 6) |
965
|
|
|
cumulative_returns = \ |
966
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
967
|
|
|
self.assertEqual(cumulative_returns, [0, 0, 0, 0.0, 0.0, 0.0]) |
968
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
969
|
|
|
self.assertEqual(daily_returns, [0, 0, 0, 0, 0, 0]) |
970
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
971
|
|
|
self.assertEqual(cash_flows, [0, -1000, 0, 0, 0, 0]) |
972
|
|
|
cumulative_cash_flows = \ |
973
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
974
|
|
|
self.assertEqual( |
975
|
|
|
cumulative_cash_flows, |
976
|
|
|
[0, -1000, -1000, -1000, -1000, -1000] |
977
|
|
|
) |
978
|
|
|
|
979
|
|
|
def test_short_position_pays_dividend(self): |
|
|
|
|
980
|
|
|
# post some trades in the market |
981
|
|
|
events = factory.create_trade_history( |
982
|
|
|
1, |
983
|
|
|
[10, 10, 10, 10, 10, 10], |
984
|
|
|
[100, 100, 100, 100, 100, 100], |
985
|
|
|
oneday, |
986
|
|
|
self.sim_params, |
987
|
|
|
env=self.env |
988
|
|
|
) |
989
|
|
|
|
990
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
991
|
|
|
|
992
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
993
|
|
|
MockDailyBarSpotReader()) |
994
|
|
|
splits = mergers = pd.DataFrame( |
995
|
|
|
{ |
996
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
997
|
|
|
'effective_date': np.array([], dtype=int), |
998
|
|
|
'ratio': np.array([], dtype=float), |
999
|
|
|
'sid': np.array([], dtype=int), |
1000
|
|
|
}, |
1001
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
1002
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
1003
|
|
|
) |
1004
|
|
|
dividends = pd.DataFrame({ |
1005
|
|
|
'sid': np.array([1], dtype=np.uint32), |
1006
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
1007
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
1008
|
|
|
'ex_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
1009
|
|
|
'record_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
1010
|
|
|
'pay_date': np.array([events[3].dt], dtype='datetime64[ns]'), |
1011
|
|
|
}) |
1012
|
|
|
writer.write(splits, mergers, dividends) |
1013
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
1014
|
|
|
|
1015
|
|
|
txns = [create_txn(events[1].sid, events[1].dt, 10.0, -100)] |
1016
|
|
|
|
1017
|
|
|
results = calculate_results( |
1018
|
|
|
self.sim_params, |
1019
|
|
|
self.env, |
1020
|
|
|
self.tempdir, |
1021
|
|
|
self.benchmark_events, |
1022
|
|
|
{1: events}, |
1023
|
|
|
adjustment_reader, |
1024
|
|
|
txns=txns, |
1025
|
|
|
) |
1026
|
|
|
|
1027
|
|
|
self.assertEqual(len(results), 6) |
1028
|
|
|
cumulative_returns = \ |
1029
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
1030
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.0, -0.1, -0.1, -0.1]) |
1031
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
1032
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.0, -0.1, 0.0, 0.0]) |
1033
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
1034
|
|
|
self.assertEqual(cash_flows, [0, 1000, 0, -1000, 0, 0]) |
1035
|
|
|
cumulative_cash_flows = \ |
1036
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
1037
|
|
|
self.assertEqual(cumulative_cash_flows, [0, 1000, 1000, 0, 0, 0]) |
1038
|
|
|
|
1039
|
|
|
def test_no_position_receives_no_dividend(self): |
1040
|
|
|
# post some trades in the market |
1041
|
|
|
events = factory.create_trade_history( |
1042
|
|
|
1, |
1043
|
|
|
[10, 10, 10, 10, 10, 10], |
1044
|
|
|
[100, 100, 100, 100, 100, 100], |
1045
|
|
|
oneday, |
1046
|
|
|
self.sim_params, |
1047
|
|
|
env=self.env |
1048
|
|
|
) |
1049
|
|
|
|
1050
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
1051
|
|
|
|
1052
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
1053
|
|
|
MockDailyBarSpotReader()) |
1054
|
|
|
splits = mergers = pd.DataFrame( |
1055
|
|
|
{ |
1056
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
1057
|
|
|
'effective_date': np.array([], dtype=int), |
1058
|
|
|
'ratio': np.array([], dtype=float), |
1059
|
|
|
'sid': np.array([], dtype=int), |
1060
|
|
|
}, |
1061
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
1062
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
1063
|
|
|
) |
1064
|
|
|
dividends = pd.DataFrame({ |
1065
|
|
|
'sid': np.array([1], dtype=np.uint32), |
1066
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
1067
|
|
|
'declared_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
1068
|
|
|
'ex_date': np.array([events[1].dt], dtype='datetime64[ns]'), |
1069
|
|
|
'pay_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
1070
|
|
|
'record_date': np.array([events[2].dt], dtype='datetime64[ns]'), |
1071
|
|
|
}) |
1072
|
|
|
writer.write(splits, mergers, dividends) |
1073
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
1074
|
|
|
|
1075
|
|
|
results = calculate_results( |
1076
|
|
|
self.sim_params, |
1077
|
|
|
self.env, |
1078
|
|
|
self.tempdir, |
1079
|
|
|
self.benchmark_events, |
1080
|
|
|
{1: events}, |
1081
|
|
|
adjustment_reader, |
1082
|
|
|
) |
1083
|
|
|
|
1084
|
|
|
self.assertEqual(len(results), 6) |
1085
|
|
|
cumulative_returns = \ |
1086
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
1087
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) |
1088
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
1089
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) |
1090
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
1091
|
|
|
self.assertEqual(cash_flows, [0, 0, 0, 0, 0, 0]) |
1092
|
|
|
cumulative_cash_flows = \ |
1093
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
1094
|
|
|
self.assertEqual(cumulative_cash_flows, [0, 0, 0, 0, 0, 0]) |
1095
|
|
|
|
1096
|
|
|
def test_no_dividend_at_simulation_end(self): |
1097
|
|
|
# post some trades in the market |
1098
|
|
|
events = factory.create_trade_history( |
1099
|
|
|
1, |
1100
|
|
|
[10, 10, 10, 10, 10], |
1101
|
|
|
[100, 100, 100, 100, 100], |
1102
|
|
|
oneday, |
1103
|
|
|
self.sim_params, |
1104
|
|
|
env=self.env |
1105
|
|
|
) |
1106
|
|
|
|
1107
|
|
|
dbpath = self.tempdir.getpath('adjustments.sqlite') |
1108
|
|
|
|
1109
|
|
|
writer = SQLiteAdjustmentWriter(dbpath, self.env.trading_days, |
1110
|
|
|
MockDailyBarSpotReader()) |
1111
|
|
|
splits = mergers = pd.DataFrame( |
1112
|
|
|
{ |
1113
|
|
|
# Hackery to make the dtypes correct on an empty frame. |
1114
|
|
|
'effective_date': np.array([], dtype=int), |
1115
|
|
|
'ratio': np.array([], dtype=float), |
1116
|
|
|
'sid': np.array([], dtype=int), |
1117
|
|
|
}, |
1118
|
|
|
index=pd.DatetimeIndex([], tz='UTC'), |
1119
|
|
|
columns=['effective_date', 'ratio', 'sid'], |
1120
|
|
|
) |
1121
|
|
|
dividends = pd.DataFrame({ |
1122
|
|
|
'sid': np.array([1], dtype=np.uint32), |
1123
|
|
|
'amount': np.array([10.00], dtype=np.float64), |
1124
|
|
|
'declared_date': np.array([events[-3].dt], dtype='datetime64[ns]'), |
1125
|
|
|
'ex_date': np.array([events[-2].dt], dtype='datetime64[ns]'), |
1126
|
|
|
'record_date': np.array([events[0].dt], dtype='datetime64[ns]'), |
1127
|
|
|
'pay_date': np.array([self.env.next_trading_day(events[-1].dt)], |
1128
|
|
|
dtype='datetime64[ns]'), |
1129
|
|
|
}) |
1130
|
|
|
writer.write(splits, mergers, dividends) |
1131
|
|
|
adjustment_reader = SQLiteAdjustmentReader(dbpath) |
1132
|
|
|
|
1133
|
|
|
# Set the last day to be the last event |
1134
|
|
|
sim_params = create_simulation_parameters( |
1135
|
|
|
num_days=6, |
1136
|
|
|
capital_base=10e3, |
1137
|
|
|
start=self.sim_params.period_start, |
1138
|
|
|
end=self.sim_params.period_end |
1139
|
|
|
) |
1140
|
|
|
|
1141
|
|
|
sim_params.period_end = events[-1].dt |
1142
|
|
|
sim_params.update_internal_from_env(self.env) |
1143
|
|
|
|
1144
|
|
|
# Simulate a transaction being filled prior to the ex_date. |
1145
|
|
|
txns = [create_txn(events[0].sid, events[0].dt, 10.0, 100)] |
1146
|
|
|
results = calculate_results( |
1147
|
|
|
sim_params, |
1148
|
|
|
self.env, |
1149
|
|
|
self.tempdir, |
1150
|
|
|
self.benchmark_events, |
1151
|
|
|
{1: events}, |
1152
|
|
|
adjustment_reader=adjustment_reader, |
1153
|
|
|
txns=txns, |
1154
|
|
|
) |
1155
|
|
|
|
1156
|
|
|
self.assertEqual(len(results), 5) |
1157
|
|
|
cumulative_returns = \ |
1158
|
|
|
[event['cumulative_perf']['returns'] for event in results] |
1159
|
|
|
self.assertEqual(cumulative_returns, [0.0, 0.0, 0.0, 0.0, 0.0]) |
1160
|
|
|
daily_returns = [event['daily_perf']['returns'] for event in results] |
1161
|
|
|
self.assertEqual(daily_returns, [0.0, 0.0, 0.0, 0.0, 0.0]) |
1162
|
|
|
cash_flows = [event['daily_perf']['capital_used'] for event in results] |
1163
|
|
|
self.assertEqual(cash_flows, [-1000, 0, 0, 0, 0]) |
1164
|
|
|
cumulative_cash_flows = \ |
1165
|
|
|
[event['cumulative_perf']['capital_used'] for event in results] |
1166
|
|
|
self.assertEqual(cumulative_cash_flows, |
1167
|
|
|
[-1000, -1000, -1000, -1000, -1000]) |
1168
|
|
|
|
1169
|
|
|
|
1170
|
|
|
class TestDividendPerformanceHolidayStyle(TestDividendPerformance): |
1171
|
|
|
|
1172
|
|
|
# The holiday tests begins the simulation on the day |
1173
|
|
|
# before Thanksgiving, so that the next trading day is |
1174
|
|
|
# two days ahead. Any tests that hard code events |
1175
|
|
|
# to be start + oneday will fail, since those events will |
1176
|
|
|
# be skipped by the simulation. |
1177
|
|
|
|
1178
|
|
|
@classmethod |
1179
|
|
|
def setUpClass(cls): |
1180
|
|
|
cls.env = TradingEnvironment() |
1181
|
|
|
cls.sim_params = create_simulation_parameters( |
1182
|
|
|
num_days=6, |
1183
|
|
|
capital_base=10e3, |
1184
|
|
|
start=pd.Timestamp("2003-11-30", tz='UTC'), |
1185
|
|
|
end=pd.Timestamp("2003-12-08", tz='UTC') |
1186
|
|
|
) |
1187
|
|
|
|
1188
|
|
|
setup_env_data(cls.env, cls.sim_params, [1, 2]) |
1189
|
|
|
|
1190
|
|
|
cls.benchmark_events = benchmark_events_in_range(cls.sim_params, |
1191
|
|
|
cls.env) |
1192
|
|
|
|
1193
|
|
|
|
1194
|
|
|
class TestPositionPerformance(unittest.TestCase): |
1195
|
|
|
|
1196
|
|
|
def setUp(self): |
1197
|
|
|
self.tempdir = TempDirectory() |
1198
|
|
|
|
1199
|
|
|
def create_environment_stuff(self, num_days=4, sids=[1, 2]): |
1200
|
|
|
self.env = TradingEnvironment() |
1201
|
|
|
self.sim_params = create_simulation_parameters(num_days=num_days) |
1202
|
|
|
|
1203
|
|
|
setup_env_data(self.env, self.sim_params, [1, 2]) |
1204
|
|
|
|
1205
|
|
|
self.finder = self.env.asset_finder |
1206
|
|
|
|
1207
|
|
|
self.benchmark_events = benchmark_events_in_range(self.sim_params, |
1208
|
|
|
self.env) |
1209
|
|
|
|
1210
|
|
|
def tearDown(self): |
1211
|
|
|
self.tempdir.cleanup() |
1212
|
|
|
del self.env |
1213
|
|
|
|
1214
|
|
|
def test_long_short_positions(self): |
1215
|
|
|
""" |
1216
|
|
|
start with $1000 |
1217
|
|
|
buy 100 stock1 shares at $10 |
1218
|
|
|
sell short 100 stock2 shares at $10 |
1219
|
|
|
stock1 then goes down to $9 |
1220
|
|
|
stock2 goes to $11 |
1221
|
|
|
""" |
1222
|
|
|
self.create_environment_stuff() |
1223
|
|
|
|
1224
|
|
|
trades_1 = factory.create_trade_history( |
1225
|
|
|
1, |
1226
|
|
|
[10, 10, 10, 9], |
1227
|
|
|
[100, 100, 100, 100], |
1228
|
|
|
oneday, |
1229
|
|
|
self.sim_params, |
1230
|
|
|
env=self.env |
1231
|
|
|
) |
1232
|
|
|
|
1233
|
|
|
trades_2 = factory.create_trade_history( |
1234
|
|
|
2, |
1235
|
|
|
[10, 10, 10, 11], |
1236
|
|
|
[100, 100, 100, 100], |
1237
|
|
|
oneday, |
1238
|
|
|
self.sim_params, |
1239
|
|
|
env=self.env |
1240
|
|
|
) |
1241
|
|
|
|
1242
|
|
|
txn1 = create_txn(trades_1[1].sid, trades_1[1].dt, 10.0, 100) |
1243
|
|
|
txn2 = create_txn(trades_2[1].sid, trades_1[1].dt, 10.0, -100) |
1244
|
|
|
|
1245
|
|
|
data_portal = create_data_portal_from_trade_history( |
1246
|
|
|
self.env, |
1247
|
|
|
self.tempdir, |
1248
|
|
|
self.sim_params, |
1249
|
|
|
{1: trades_1, 2: trades_2} |
1250
|
|
|
) |
1251
|
|
|
|
1252
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1253
|
|
|
self.sim_params.data_frequency) |
1254
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1255
|
|
|
self.sim_params.data_frequency, |
1256
|
|
|
data_portal) |
1257
|
|
|
pp.position_tracker = pt |
1258
|
|
|
pt.execute_transaction(txn1) |
1259
|
|
|
pp.handle_execution(txn1) |
1260
|
|
|
pt.execute_transaction(txn2) |
1261
|
|
|
pp.handle_execution(txn2) |
1262
|
|
|
|
1263
|
|
|
dt = trades_1[-2].dt |
1264
|
|
|
pt.sync_last_sale_prices(dt) |
1265
|
|
|
|
1266
|
|
|
pp.calculate_performance() |
1267
|
|
|
|
1268
|
|
|
check_perf_period( |
1269
|
|
|
pp, |
1270
|
|
|
gross_leverage=2.0, |
1271
|
|
|
net_leverage=0.0, |
1272
|
|
|
long_exposure=1000.0, |
1273
|
|
|
longs_count=1, |
1274
|
|
|
short_exposure=-1000.0, |
1275
|
|
|
shorts_count=1) |
1276
|
|
|
# Validate that the account attributes were updated. |
1277
|
|
|
account = pp.as_account() |
1278
|
|
|
check_account(account, |
1279
|
|
|
settled_cash=1000.0, |
1280
|
|
|
equity_with_loan=1000.0, |
1281
|
|
|
total_positions_value=0.0, |
1282
|
|
|
regt_equity=1000.0, |
1283
|
|
|
available_funds=1000.0, |
1284
|
|
|
excess_liquidity=1000.0, |
1285
|
|
|
cushion=1.0, |
1286
|
|
|
leverage=2.0, |
1287
|
|
|
net_leverage=0.0, |
1288
|
|
|
net_liquidation=1000.0) |
1289
|
|
|
|
1290
|
|
|
dt = trades_1[-1].dt |
1291
|
|
|
pt.sync_last_sale_prices(dt) |
1292
|
|
|
|
1293
|
|
|
pp.calculate_performance() |
1294
|
|
|
|
1295
|
|
|
# Validate that the account attributes were updated. |
1296
|
|
|
account = pp.as_account() |
1297
|
|
|
|
1298
|
|
|
check_perf_period( |
1299
|
|
|
pp, |
1300
|
|
|
gross_leverage=2.5, |
1301
|
|
|
net_leverage=-0.25, |
1302
|
|
|
long_exposure=900.0, |
1303
|
|
|
longs_count=1, |
1304
|
|
|
short_exposure=-1100.0, |
1305
|
|
|
shorts_count=1) |
1306
|
|
|
|
1307
|
|
|
check_account(account, |
1308
|
|
|
settled_cash=1000.0, |
1309
|
|
|
equity_with_loan=800.0, |
1310
|
|
|
total_positions_value=-200.0, |
1311
|
|
|
regt_equity=1000.0, |
1312
|
|
|
available_funds=1000.0, |
1313
|
|
|
excess_liquidity=1000.0, |
1314
|
|
|
cushion=1.25, |
1315
|
|
|
leverage=2.5, |
1316
|
|
|
net_leverage=-0.25, |
1317
|
|
|
net_liquidation=800.0) |
1318
|
|
|
|
1319
|
|
|
def test_levered_long_position(self): |
1320
|
|
|
""" |
1321
|
|
|
start with $1,000, then buy 1000 shares at $10. |
1322
|
|
|
price goes to $11 |
1323
|
|
|
""" |
1324
|
|
|
# post some trades in the market |
1325
|
|
|
|
1326
|
|
|
self.create_environment_stuff() |
1327
|
|
|
|
1328
|
|
|
trades = factory.create_trade_history( |
1329
|
|
|
1, |
1330
|
|
|
[10, 10, 10, 11], |
1331
|
|
|
[100, 100, 100, 100], |
1332
|
|
|
oneday, |
1333
|
|
|
self.sim_params, |
1334
|
|
|
env=self.env |
1335
|
|
|
) |
1336
|
|
|
|
1337
|
|
|
data_portal = create_data_portal_from_trade_history( |
1338
|
|
|
self.env, |
1339
|
|
|
self.tempdir, |
1340
|
|
|
self.sim_params, |
1341
|
|
|
{1: trades}) |
1342
|
|
|
|
1343
|
|
|
txn = create_txn(trades[1].sid, trades[1].dt, 10.0, 1000) |
1344
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1345
|
|
|
self.sim_params.data_frequency) |
1346
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1347
|
|
|
self.sim_params.data_frequency, |
1348
|
|
|
data_portal) |
1349
|
|
|
pp.position_tracker = pt |
1350
|
|
|
|
1351
|
|
|
pt.execute_transaction(txn) |
1352
|
|
|
pp.handle_execution(txn) |
1353
|
|
|
|
1354
|
|
|
pp.calculate_performance() |
1355
|
|
|
|
1356
|
|
|
check_perf_period( |
1357
|
|
|
pp, |
1358
|
|
|
gross_leverage=10.0, |
1359
|
|
|
net_leverage=10.0, |
1360
|
|
|
long_exposure=10000.0, |
1361
|
|
|
longs_count=1, |
1362
|
|
|
short_exposure=0.0, |
1363
|
|
|
shorts_count=0) |
1364
|
|
|
|
1365
|
|
|
# Validate that the account attributes were updated. |
1366
|
|
|
pt.sync_last_sale_prices(trades[-2].dt) |
1367
|
|
|
|
1368
|
|
|
# Validate that the account attributes were updated. |
1369
|
|
|
account = pp.as_account() |
1370
|
|
|
check_account(account, |
1371
|
|
|
settled_cash=-9000.0, |
1372
|
|
|
equity_with_loan=1000.0, |
1373
|
|
|
total_positions_value=10000.0, |
1374
|
|
|
regt_equity=-9000.0, |
1375
|
|
|
available_funds=-9000.0, |
1376
|
|
|
excess_liquidity=-9000.0, |
1377
|
|
|
cushion=-9.0, |
1378
|
|
|
leverage=10.0, |
1379
|
|
|
net_leverage=10.0, |
1380
|
|
|
net_liquidation=1000.0) |
1381
|
|
|
|
1382
|
|
|
# now simulate a price jump to $11 |
1383
|
|
|
pt.sync_last_sale_prices(trades[-1].dt) |
1384
|
|
|
|
1385
|
|
|
pp.calculate_performance() |
1386
|
|
|
|
1387
|
|
|
check_perf_period( |
1388
|
|
|
pp, |
1389
|
|
|
gross_leverage=5.5, |
1390
|
|
|
net_leverage=5.5, |
1391
|
|
|
long_exposure=11000.0, |
1392
|
|
|
longs_count=1, |
1393
|
|
|
short_exposure=0.0, |
1394
|
|
|
shorts_count=0) |
1395
|
|
|
|
1396
|
|
|
# Validate that the account attributes were updated. |
1397
|
|
|
account = pp.as_account() |
1398
|
|
|
|
1399
|
|
|
check_account(account, |
1400
|
|
|
settled_cash=-9000.0, |
1401
|
|
|
equity_with_loan=2000.0, |
1402
|
|
|
total_positions_value=11000.0, |
1403
|
|
|
regt_equity=-9000.0, |
1404
|
|
|
available_funds=-9000.0, |
1405
|
|
|
excess_liquidity=-9000.0, |
1406
|
|
|
cushion=-4.5, |
1407
|
|
|
leverage=5.5, |
1408
|
|
|
net_leverage=5.5, |
1409
|
|
|
net_liquidation=2000.0) |
1410
|
|
|
|
1411
|
|
|
def test_long_position(self): |
1412
|
|
|
""" |
1413
|
|
|
verify that the performance period calculates properly for a |
1414
|
|
|
single buy transaction |
1415
|
|
|
""" |
1416
|
|
|
self.create_environment_stuff() |
1417
|
|
|
|
1418
|
|
|
# post some trades in the market |
1419
|
|
|
trades = factory.create_trade_history( |
1420
|
|
|
1, |
1421
|
|
|
[10, 10, 10, 11], |
1422
|
|
|
[100, 100, 100, 100], |
1423
|
|
|
oneday, |
1424
|
|
|
self.sim_params, |
1425
|
|
|
env=self.env |
1426
|
|
|
) |
1427
|
|
|
|
1428
|
|
|
data_portal = create_data_portal_from_trade_history( |
1429
|
|
|
self.env, |
1430
|
|
|
self.tempdir, |
1431
|
|
|
self.sim_params, |
1432
|
|
|
{1: trades}) |
1433
|
|
|
|
1434
|
|
|
txn = create_txn(trades[1].sid, trades[1].dt, 10.0, 100) |
1435
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1436
|
|
|
self.sim_params.data_frequency) |
1437
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1438
|
|
|
self.sim_params.data_frequency, |
1439
|
|
|
data_portal, |
1440
|
|
|
period_open=self.sim_params.period_start, |
1441
|
|
|
period_close=self.sim_params.period_end) |
1442
|
|
|
pp.position_tracker = pt |
1443
|
|
|
|
1444
|
|
|
pt.execute_transaction(txn) |
1445
|
|
|
pp.handle_execution(txn) |
1446
|
|
|
|
1447
|
|
|
# This verifies that the last sale price is being correctly |
1448
|
|
|
# set in the positions. If this is not the case then returns can |
1449
|
|
|
# incorrectly show as sharply dipping if a transaction arrives |
1450
|
|
|
# before a trade. This is caused by returns being based on holding |
1451
|
|
|
# stocks with a last sale price of 0. |
1452
|
|
|
self.assertEqual(pp.positions[1].last_sale_price, 10.0) |
1453
|
|
|
|
1454
|
|
|
pt.sync_last_sale_prices(trades[-1].dt) |
1455
|
|
|
|
1456
|
|
|
pp.calculate_performance() |
1457
|
|
|
|
1458
|
|
|
self.assertEqual( |
1459
|
|
|
pp.period_cash_flow, |
1460
|
|
|
-1 * txn.price * txn.amount, |
1461
|
|
|
"capital used should be equal to the opposite of the transaction \ |
1462
|
|
|
cost of sole txn in test" |
1463
|
|
|
) |
1464
|
|
|
|
1465
|
|
|
self.assertEqual( |
1466
|
|
|
len(pp.positions), |
1467
|
|
|
1, |
1468
|
|
|
"should be just one position") |
1469
|
|
|
|
1470
|
|
|
self.assertEqual( |
1471
|
|
|
pp.positions[1].sid, |
1472
|
|
|
txn.sid, |
1473
|
|
|
"position should be in security with id 1") |
1474
|
|
|
|
1475
|
|
|
self.assertEqual( |
1476
|
|
|
pp.positions[1].amount, |
1477
|
|
|
txn.amount, |
1478
|
|
|
"should have a position of {sharecount} shares".format( |
1479
|
|
|
sharecount=txn.amount |
1480
|
|
|
) |
1481
|
|
|
) |
1482
|
|
|
|
1483
|
|
|
self.assertEqual( |
1484
|
|
|
pp.positions[1].cost_basis, |
1485
|
|
|
txn.price, |
1486
|
|
|
"should have a cost basis of 10" |
1487
|
|
|
) |
1488
|
|
|
|
1489
|
|
|
self.assertEqual( |
1490
|
|
|
pp.positions[1].last_sale_price, |
1491
|
|
|
trades[-1]['price'], |
1492
|
|
|
"last sale should be same as last trade. \ |
1493
|
|
|
expected {exp} actual {act}".format( |
1494
|
|
|
exp=trades[-1]['price'], |
1495
|
|
|
act=pp.positions[1].last_sale_price) |
1496
|
|
|
) |
1497
|
|
|
|
1498
|
|
|
self.assertEqual( |
1499
|
|
|
pp.ending_value, |
1500
|
|
|
1100, |
1501
|
|
|
"ending value should be price of last trade times number of \ |
1502
|
|
|
shares in position" |
1503
|
|
|
) |
1504
|
|
|
|
1505
|
|
|
self.assertEqual(pp.pnl, 100, "gain of 1 on 100 shares should be 100") |
1506
|
|
|
|
1507
|
|
|
check_perf_period( |
1508
|
|
|
pp, |
1509
|
|
|
gross_leverage=1.0, |
1510
|
|
|
net_leverage=1.0, |
1511
|
|
|
long_exposure=1100.0, |
1512
|
|
|
longs_count=1, |
1513
|
|
|
short_exposure=0.0, |
1514
|
|
|
shorts_count=0) |
1515
|
|
|
|
1516
|
|
|
# Validate that the account attributes were updated. |
1517
|
|
|
account = pp.as_account() |
1518
|
|
|
check_account(account, |
1519
|
|
|
settled_cash=0.0, |
1520
|
|
|
equity_with_loan=1100.0, |
1521
|
|
|
total_positions_value=1100.0, |
1522
|
|
|
regt_equity=0.0, |
1523
|
|
|
available_funds=0.0, |
1524
|
|
|
excess_liquidity=0.0, |
1525
|
|
|
cushion=0.0, |
1526
|
|
|
leverage=1.0, |
1527
|
|
|
net_leverage=1.0, |
1528
|
|
|
net_liquidation=1100.0) |
1529
|
|
|
|
1530
|
|
|
def test_short_position(self): |
1531
|
|
|
"""verify that the performance period calculates properly for a \ |
1532
|
|
|
single short-sale transaction""" |
1533
|
|
|
self.create_environment_stuff(num_days=6) |
1534
|
|
|
|
1535
|
|
|
trades = factory.create_trade_history( |
1536
|
|
|
1, |
1537
|
|
|
[10, 10, 10, 11, 10, 9], |
1538
|
|
|
[100, 100, 100, 100, 100, 100], |
1539
|
|
|
oneday, |
1540
|
|
|
self.sim_params, |
1541
|
|
|
env=self.env |
1542
|
|
|
) |
1543
|
|
|
|
1544
|
|
|
trades_1 = trades[:-2] |
1545
|
|
|
|
1546
|
|
|
data_portal = create_data_portal_from_trade_history( |
1547
|
|
|
self.env, |
1548
|
|
|
self.tempdir, |
1549
|
|
|
self.sim_params, |
1550
|
|
|
{1: trades}) |
1551
|
|
|
|
1552
|
|
|
txn = create_txn(trades[1].sid, trades[1].dt, 10.0, -100) |
1553
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1554
|
|
|
self.sim_params.data_frequency) |
1555
|
|
|
pp = perf.PerformancePeriod( |
1556
|
|
|
1000.0, self.env.asset_finder, |
1557
|
|
|
self.sim_params.data_frequency, |
1558
|
|
|
data_portal) |
1559
|
|
|
pp.position_tracker = pt |
1560
|
|
|
|
1561
|
|
|
pt.execute_transaction(txn) |
1562
|
|
|
pp.handle_execution(txn) |
1563
|
|
|
|
1564
|
|
|
pt.sync_last_sale_prices(trades_1[-1].dt) |
1565
|
|
|
|
1566
|
|
|
pp.calculate_performance() |
1567
|
|
|
|
1568
|
|
|
self.assertEqual( |
1569
|
|
|
pp.period_cash_flow, |
1570
|
|
|
-1 * txn.price * txn.amount, |
1571
|
|
|
"capital used should be equal to the opposite of the transaction\ |
1572
|
|
|
cost of sole txn in test" |
1573
|
|
|
) |
1574
|
|
|
|
1575
|
|
|
self.assertEqual( |
1576
|
|
|
len(pp.positions), |
1577
|
|
|
1, |
1578
|
|
|
"should be just one position") |
1579
|
|
|
|
1580
|
|
|
self.assertEqual( |
1581
|
|
|
pp.positions[1].sid, |
1582
|
|
|
txn.sid, |
1583
|
|
|
"position should be in security from the transaction" |
1584
|
|
|
) |
1585
|
|
|
|
1586
|
|
|
self.assertEqual( |
1587
|
|
|
pp.positions[1].amount, |
1588
|
|
|
-100, |
1589
|
|
|
"should have a position of -100 shares" |
1590
|
|
|
) |
1591
|
|
|
|
1592
|
|
|
self.assertEqual( |
1593
|
|
|
pp.positions[1].cost_basis, |
1594
|
|
|
txn.price, |
1595
|
|
|
"should have a cost basis of 10" |
1596
|
|
|
) |
1597
|
|
|
|
1598
|
|
|
self.assertEqual( |
1599
|
|
|
pp.positions[1].last_sale_price, |
1600
|
|
|
trades_1[-1]['price'], |
1601
|
|
|
"last sale should be price of last trade" |
1602
|
|
|
) |
1603
|
|
|
|
1604
|
|
|
self.assertEqual( |
1605
|
|
|
pp.ending_value, |
1606
|
|
|
-1100, |
1607
|
|
|
"ending value should be price of last trade times number of \ |
1608
|
|
|
shares in position" |
1609
|
|
|
) |
1610
|
|
|
|
1611
|
|
|
self.assertEqual(pp.pnl, -100, "gain of 1 on 100 shares should be 100") |
1612
|
|
|
|
1613
|
|
|
# simulate additional trades, and ensure that the position value |
1614
|
|
|
# reflects the new price |
1615
|
|
|
trades_2 = trades[-2:] |
1616
|
|
|
|
1617
|
|
|
# simulate a rollover to a new period |
1618
|
|
|
pp.rollover() |
1619
|
|
|
|
1620
|
|
|
pt.sync_last_sale_prices(trades[-1].dt) |
1621
|
|
|
|
1622
|
|
|
pp.calculate_performance() |
1623
|
|
|
|
1624
|
|
|
self.assertEqual( |
1625
|
|
|
pp.period_cash_flow, |
1626
|
|
|
0, |
1627
|
|
|
"capital used should be zero, there were no transactions in \ |
1628
|
|
|
performance period" |
1629
|
|
|
) |
1630
|
|
|
|
1631
|
|
|
self.assertEqual( |
1632
|
|
|
len(pp.positions), |
1633
|
|
|
1, |
1634
|
|
|
"should be just one position" |
1635
|
|
|
) |
1636
|
|
|
|
1637
|
|
|
self.assertEqual( |
1638
|
|
|
pp.positions[1].sid, |
1639
|
|
|
txn.sid, |
1640
|
|
|
"position should be in security from the transaction" |
1641
|
|
|
) |
1642
|
|
|
|
1643
|
|
|
self.assertEqual( |
1644
|
|
|
pp.positions[1].amount, |
1645
|
|
|
-100, |
1646
|
|
|
"should have a position of -100 shares" |
1647
|
|
|
) |
1648
|
|
|
|
1649
|
|
|
self.assertEqual( |
1650
|
|
|
pp.positions[1].cost_basis, |
1651
|
|
|
txn.price, |
1652
|
|
|
"should have a cost basis of 10" |
1653
|
|
|
) |
1654
|
|
|
|
1655
|
|
|
self.assertEqual( |
1656
|
|
|
pp.positions[1].last_sale_price, |
1657
|
|
|
trades_2[-1].price, |
1658
|
|
|
"last sale should be price of last trade" |
1659
|
|
|
) |
1660
|
|
|
|
1661
|
|
|
self.assertEqual( |
1662
|
|
|
pp.ending_value, |
1663
|
|
|
-900, |
1664
|
|
|
"ending value should be price of last trade times number of \ |
1665
|
|
|
shares in position") |
1666
|
|
|
|
1667
|
|
|
self.assertEqual( |
1668
|
|
|
pp.pnl, |
1669
|
|
|
200, |
1670
|
|
|
"drop of 2 on -100 shares should be 200" |
1671
|
|
|
) |
1672
|
|
|
|
1673
|
|
|
# now run a performance period encompassing the entire trade sample. |
1674
|
|
|
ptTotal = perf.PositionTracker(self.env.asset_finder, data_portal, |
1675
|
|
|
self.sim_params.data_frequency) |
1676
|
|
|
ppTotal = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1677
|
|
|
self.sim_params.data_frequency, |
1678
|
|
|
data_portal) |
1679
|
|
|
ppTotal.position_tracker = pt |
1680
|
|
|
|
1681
|
|
|
ptTotal.execute_transaction(txn) |
1682
|
|
|
ppTotal.handle_execution(txn) |
1683
|
|
|
|
1684
|
|
|
ptTotal.sync_last_sale_prices(trades[-1].dt) |
1685
|
|
|
|
1686
|
|
|
ppTotal.calculate_performance() |
1687
|
|
|
|
1688
|
|
|
self.assertEqual( |
1689
|
|
|
ppTotal.period_cash_flow, |
1690
|
|
|
-1 * txn.price * txn.amount, |
1691
|
|
|
"capital used should be equal to the opposite of the transaction \ |
1692
|
|
|
cost of sole txn in test" |
1693
|
|
|
) |
1694
|
|
|
|
1695
|
|
|
self.assertEqual( |
1696
|
|
|
len(ppTotal.positions), |
1697
|
|
|
1, |
1698
|
|
|
"should be just one position" |
1699
|
|
|
) |
1700
|
|
|
self.assertEqual( |
1701
|
|
|
ppTotal.positions[1].sid, |
1702
|
|
|
txn.sid, |
1703
|
|
|
"position should be in security from the transaction" |
1704
|
|
|
) |
1705
|
|
|
|
1706
|
|
|
self.assertEqual( |
1707
|
|
|
ppTotal.positions[1].amount, |
1708
|
|
|
-100, |
1709
|
|
|
"should have a position of -100 shares" |
1710
|
|
|
) |
1711
|
|
|
|
1712
|
|
|
self.assertEqual( |
1713
|
|
|
ppTotal.positions[1].cost_basis, |
1714
|
|
|
txn.price, |
1715
|
|
|
"should have a cost basis of 10" |
1716
|
|
|
) |
1717
|
|
|
|
1718
|
|
|
self.assertEqual( |
1719
|
|
|
ppTotal.positions[1].last_sale_price, |
1720
|
|
|
trades_2[-1].price, |
1721
|
|
|
"last sale should be price of last trade" |
1722
|
|
|
) |
1723
|
|
|
|
1724
|
|
|
self.assertEqual( |
1725
|
|
|
ppTotal.ending_value, |
1726
|
|
|
-900, |
1727
|
|
|
"ending value should be price of last trade times number of \ |
1728
|
|
|
shares in position") |
1729
|
|
|
|
1730
|
|
|
self.assertEqual( |
1731
|
|
|
ppTotal.pnl, |
1732
|
|
|
100, |
1733
|
|
|
"drop of 1 on -100 shares should be 100" |
1734
|
|
|
) |
1735
|
|
|
|
1736
|
|
|
check_perf_period( |
1737
|
|
|
pp, |
1738
|
|
|
gross_leverage=0.8181, |
1739
|
|
|
net_leverage=-0.8181, |
1740
|
|
|
long_exposure=0.0, |
1741
|
|
|
longs_count=0, |
1742
|
|
|
short_exposure=-900.0, |
1743
|
|
|
shorts_count=1) |
1744
|
|
|
|
1745
|
|
|
# Validate that the account attributes. |
1746
|
|
|
account = ppTotal.as_account() |
1747
|
|
|
check_account(account, |
1748
|
|
|
settled_cash=2000.0, |
1749
|
|
|
equity_with_loan=1100.0, |
1750
|
|
|
total_positions_value=-900.0, |
1751
|
|
|
regt_equity=2000.0, |
1752
|
|
|
available_funds=2000.0, |
1753
|
|
|
excess_liquidity=2000.0, |
1754
|
|
|
cushion=1.8181, |
1755
|
|
|
leverage=0.8181, |
1756
|
|
|
net_leverage=-0.8181, |
1757
|
|
|
net_liquidation=1100.0) |
1758
|
|
|
|
1759
|
|
|
def test_covering_short(self): |
1760
|
|
|
"""verify performance where short is bought and covered, and shares \ |
1761
|
|
|
trade after cover""" |
1762
|
|
|
self.create_environment_stuff(num_days=10) |
1763
|
|
|
|
1764
|
|
|
trades = factory.create_trade_history( |
1765
|
|
|
1, |
1766
|
|
|
[10, 10, 10, 11, 9, 8, 7, 8, 9, 10], |
1767
|
|
|
[100, 100, 100, 100, 100, 100, 100, 100, 100, 100], |
1768
|
|
|
onesec, |
1769
|
|
|
self.sim_params, |
1770
|
|
|
env=self.env |
1771
|
|
|
) |
1772
|
|
|
|
1773
|
|
|
data_portal = create_data_portal_from_trade_history( |
1774
|
|
|
self.env, |
1775
|
|
|
self.tempdir, |
1776
|
|
|
self.sim_params, |
1777
|
|
|
{1: trades}) |
1778
|
|
|
|
1779
|
|
|
short_txn = create_txn( |
1780
|
|
|
trades[1].sid, |
1781
|
|
|
trades[1].dt, |
1782
|
|
|
10.0, |
1783
|
|
|
-100, |
1784
|
|
|
) |
1785
|
|
|
cover_txn = create_txn(trades[6].sid, trades[6].dt, 7.0, 100) |
1786
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1787
|
|
|
self.sim_params.data_frequency) |
1788
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1789
|
|
|
self.sim_params.data_frequency, |
1790
|
|
|
data_portal) |
1791
|
|
|
pp.position_tracker = pt |
1792
|
|
|
|
1793
|
|
|
pt.execute_transaction(short_txn) |
1794
|
|
|
pp.handle_execution(short_txn) |
1795
|
|
|
pt.execute_transaction(cover_txn) |
1796
|
|
|
pp.handle_execution(cover_txn) |
1797
|
|
|
|
1798
|
|
|
pt.sync_last_sale_prices(trades[-1].dt) |
1799
|
|
|
|
1800
|
|
|
pp.calculate_performance() |
1801
|
|
|
|
1802
|
|
|
short_txn_cost = short_txn.price * short_txn.amount |
1803
|
|
|
cover_txn_cost = cover_txn.price * cover_txn.amount |
1804
|
|
|
|
1805
|
|
|
self.assertEqual( |
1806
|
|
|
pp.period_cash_flow, |
1807
|
|
|
-1 * short_txn_cost - cover_txn_cost, |
1808
|
|
|
"capital used should be equal to the net transaction costs" |
1809
|
|
|
) |
1810
|
|
|
|
1811
|
|
|
self.assertEqual( |
1812
|
|
|
len(pp.positions), |
1813
|
|
|
1, |
1814
|
|
|
"should be just one position" |
1815
|
|
|
) |
1816
|
|
|
|
1817
|
|
|
self.assertEqual( |
1818
|
|
|
pp.positions[1].sid, |
1819
|
|
|
short_txn.sid, |
1820
|
|
|
"position should be in security from the transaction" |
1821
|
|
|
) |
1822
|
|
|
|
1823
|
|
|
self.assertEqual( |
1824
|
|
|
pp.positions[1].amount, |
1825
|
|
|
0, |
1826
|
|
|
"should have a position of -100 shares" |
1827
|
|
|
) |
1828
|
|
|
|
1829
|
|
|
self.assertEqual( |
1830
|
|
|
pp.positions[1].cost_basis, |
1831
|
|
|
0, |
1832
|
|
|
"a covered position should have a cost basis of 0" |
1833
|
|
|
) |
1834
|
|
|
|
1835
|
|
|
self.assertEqual( |
1836
|
|
|
pp.positions[1].last_sale_price, |
1837
|
|
|
trades[-1].price, |
1838
|
|
|
"last sale should be price of last trade" |
1839
|
|
|
) |
1840
|
|
|
|
1841
|
|
|
self.assertEqual( |
1842
|
|
|
pp.ending_value, |
1843
|
|
|
0, |
1844
|
|
|
"ending value should be price of last trade times number of \ |
1845
|
|
|
shares in position" |
1846
|
|
|
) |
1847
|
|
|
|
1848
|
|
|
self.assertEqual( |
1849
|
|
|
pp.pnl, |
1850
|
|
|
300, |
1851
|
|
|
"gain of 1 on 100 shares should be 300" |
1852
|
|
|
) |
1853
|
|
|
|
1854
|
|
|
check_perf_period( |
1855
|
|
|
pp, |
1856
|
|
|
gross_leverage=0.0, |
1857
|
|
|
net_leverage=0.0, |
1858
|
|
|
long_exposure=0.0, |
1859
|
|
|
longs_count=0, |
1860
|
|
|
short_exposure=0.0, |
1861
|
|
|
shorts_count=0) |
1862
|
|
|
|
1863
|
|
|
account = pp.as_account() |
1864
|
|
|
check_account(account, |
1865
|
|
|
settled_cash=1300.0, |
1866
|
|
|
equity_with_loan=1300.0, |
1867
|
|
|
total_positions_value=0.0, |
1868
|
|
|
regt_equity=1300.0, |
1869
|
|
|
available_funds=1300.0, |
1870
|
|
|
excess_liquidity=1300.0, |
1871
|
|
|
cushion=1.0, |
1872
|
|
|
leverage=0.0, |
1873
|
|
|
net_leverage=0.0, |
1874
|
|
|
net_liquidation=1300.0) |
1875
|
|
|
|
1876
|
|
|
def test_cost_basis_calc(self): |
1877
|
|
|
self.create_environment_stuff(num_days=5) |
1878
|
|
|
|
1879
|
|
|
history_args = ( |
1880
|
|
|
1, |
1881
|
|
|
[10, 11, 11, 12, 10], |
1882
|
|
|
[100, 100, 100, 100, 100], |
1883
|
|
|
oneday, |
1884
|
|
|
self.sim_params, |
1885
|
|
|
self.env |
1886
|
|
|
) |
1887
|
|
|
trades = factory.create_trade_history(*history_args) |
1888
|
|
|
transactions = factory.create_txn_history(*history_args)[:4] |
1889
|
|
|
|
1890
|
|
|
data_portal = create_data_portal_from_trade_history( |
1891
|
|
|
self.env, |
1892
|
|
|
self.tempdir, |
1893
|
|
|
self.sim_params, |
1894
|
|
|
{1: trades}) |
1895
|
|
|
|
1896
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
1897
|
|
|
self.sim_params.data_frequency) |
1898
|
|
|
pp = perf.PerformancePeriod( |
1899
|
|
|
1000.0, |
1900
|
|
|
self.env.asset_finder, |
1901
|
|
|
self.sim_params.data_frequency, |
1902
|
|
|
data_portal, |
1903
|
|
|
period_open=self.sim_params.period_start, |
1904
|
|
|
period_close=self.sim_params.trading_days[-1] |
1905
|
|
|
) |
1906
|
|
|
pp.position_tracker = pt |
1907
|
|
|
|
1908
|
|
|
average_cost = 0 |
1909
|
|
|
for i, txn in enumerate(transactions): |
1910
|
|
|
pt.execute_transaction(txn) |
1911
|
|
|
pp.handle_execution(txn) |
1912
|
|
|
average_cost = (average_cost * i + txn.price) / (i + 1) |
1913
|
|
|
self.assertEqual(pt.positions[1].cost_basis, average_cost) |
1914
|
|
|
|
1915
|
|
|
dt = trades[-2].dt |
1916
|
|
|
self.assertEqual( |
1917
|
|
|
pt.positions[1].last_sale_price, |
1918
|
|
|
trades[-2].price, |
1919
|
|
|
"should have a last sale of 12, got {val}".format( |
1920
|
|
|
val=pt.positions[1].last_sale_price) |
1921
|
|
|
) |
1922
|
|
|
|
1923
|
|
|
self.assertEqual( |
1924
|
|
|
pt.positions[1].cost_basis, |
1925
|
|
|
11, |
1926
|
|
|
"should have a cost basis of 11" |
1927
|
|
|
) |
1928
|
|
|
|
1929
|
|
|
pt.sync_last_sale_prices(dt) |
1930
|
|
|
|
1931
|
|
|
pp.calculate_performance() |
1932
|
|
|
|
1933
|
|
|
self.assertEqual( |
1934
|
|
|
pp.pnl, |
1935
|
|
|
400 |
1936
|
|
|
) |
1937
|
|
|
|
1938
|
|
|
down_tick = trades[-1] |
1939
|
|
|
|
1940
|
|
|
sale_txn = create_txn( |
1941
|
|
|
down_tick.sid, |
1942
|
|
|
down_tick.dt, |
1943
|
|
|
10.0, |
1944
|
|
|
-100) |
1945
|
|
|
|
1946
|
|
|
pp.rollover() |
1947
|
|
|
|
1948
|
|
|
pt.execute_transaction(sale_txn) |
1949
|
|
|
pp.handle_execution(sale_txn) |
1950
|
|
|
|
1951
|
|
|
dt = down_tick.dt |
1952
|
|
|
pt.sync_last_sale_prices(dt) |
1953
|
|
|
|
1954
|
|
|
pp.calculate_performance() |
1955
|
|
|
self.assertEqual( |
1956
|
|
|
pp.positions[1].last_sale_price, |
1957
|
|
|
10, |
1958
|
|
|
"should have a last sale of 10, was {val}".format( |
1959
|
|
|
val=pp.positions[1].last_sale_price) |
1960
|
|
|
) |
1961
|
|
|
|
1962
|
|
|
self.assertEqual( |
1963
|
|
|
pp.positions[1].cost_basis, |
1964
|
|
|
11, |
1965
|
|
|
"should have a cost basis of 11" |
1966
|
|
|
) |
1967
|
|
|
|
1968
|
|
|
self.assertEqual(pp.pnl, -800, "this period goes from +400 to -400") |
1969
|
|
|
|
1970
|
|
|
pt3 = perf.PositionTracker(self.env.asset_finder, data_portal, |
1971
|
|
|
self.sim_params.data_frequency) |
1972
|
|
|
pp3 = perf.PerformancePeriod(1000.0, self.env.asset_finder, |
1973
|
|
|
self.sim_params.data_frequency, |
1974
|
|
|
data_portal) |
1975
|
|
|
pp3.position_tracker = pt3 |
1976
|
|
|
|
1977
|
|
|
average_cost = 0 |
1978
|
|
|
for i, txn in enumerate(transactions): |
1979
|
|
|
pt3.execute_transaction(txn) |
1980
|
|
|
pp3.handle_execution(txn) |
1981
|
|
|
average_cost = (average_cost * i + txn.price) / (i + 1) |
1982
|
|
|
self.assertEqual(pp3.positions[1].cost_basis, average_cost) |
1983
|
|
|
|
1984
|
|
|
pt3.execute_transaction(sale_txn) |
1985
|
|
|
pp3.handle_execution(sale_txn) |
1986
|
|
|
|
1987
|
|
|
trades.append(down_tick) |
1988
|
|
|
pt3.sync_last_sale_prices(trades[-1].dt) |
1989
|
|
|
|
1990
|
|
|
pp3.calculate_performance() |
1991
|
|
|
self.assertEqual( |
1992
|
|
|
pp3.positions[1].last_sale_price, |
1993
|
|
|
10, |
1994
|
|
|
"should have a last sale of 10" |
1995
|
|
|
) |
1996
|
|
|
|
1997
|
|
|
self.assertEqual( |
1998
|
|
|
pp3.positions[1].cost_basis, |
1999
|
|
|
11, |
2000
|
|
|
"should have a cost basis of 11" |
2001
|
|
|
) |
2002
|
|
|
|
2003
|
|
|
self.assertEqual( |
2004
|
|
|
pp3.pnl, |
2005
|
|
|
-400, |
2006
|
|
|
"should be -400 for all trades and transactions in period" |
2007
|
|
|
) |
2008
|
|
|
|
2009
|
|
|
def test_cost_basis_calc_close_pos(self): |
2010
|
|
|
self.create_environment_stuff(num_days=8) |
2011
|
|
|
|
2012
|
|
|
history_args = ( |
2013
|
|
|
1, |
2014
|
|
|
[10, 9, 11, 8, 9, 12, 13, 14], |
2015
|
|
|
[200, -100, -100, 100, -300, 100, 500, 400], |
2016
|
|
|
onesec, |
2017
|
|
|
self.sim_params, |
2018
|
|
|
self.env |
2019
|
|
|
) |
2020
|
|
|
cost_bases = [10, 10, 0, 8, 9, 9, 13, 13.5] |
2021
|
|
|
|
2022
|
|
|
trades = factory.create_trade_history(*history_args) |
2023
|
|
|
transactions = factory.create_txn_history(*history_args) |
2024
|
|
|
|
2025
|
|
|
data_portal = create_data_portal_from_trade_history( |
2026
|
|
|
self.env, |
2027
|
|
|
self.tempdir, |
2028
|
|
|
self.sim_params, |
2029
|
|
|
{1: trades}) |
2030
|
|
|
|
2031
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
2032
|
|
|
self.sim_params.data_frequency) |
2033
|
|
|
pp = perf.PerformancePeriod(1000.0, self.env.asset_finder, data_portal, |
2034
|
|
|
self.sim_params.data_frequency) |
2035
|
|
|
pp.position_tracker = pt |
2036
|
|
|
|
2037
|
|
|
for txn, cb in zip(transactions, cost_bases): |
2038
|
|
|
pt.execute_transaction(txn) |
2039
|
|
|
pp.handle_execution(txn) |
2040
|
|
|
self.assertEqual(pp.positions[1].cost_basis, cb) |
2041
|
|
|
|
2042
|
|
|
pp.calculate_performance() |
2043
|
|
|
|
2044
|
|
|
self.assertEqual(pp.positions[1].cost_basis, cost_bases[-1]) |
2045
|
|
|
|
2046
|
|
|
|
2047
|
|
|
class TestPosition(unittest.TestCase): |
2048
|
|
|
def setUp(self): |
2049
|
|
|
pass |
2050
|
|
|
|
2051
|
|
|
def test_serialization(self): |
2052
|
|
|
dt = pd.Timestamp("1984/03/06 3:00PM") |
2053
|
|
|
pos = perf.Position(10, amount=np.float64(120.0), last_sale_date=dt, |
2054
|
|
|
last_sale_price=3.4) |
2055
|
|
|
|
2056
|
|
|
p_string = dumps_with_persistent_ids(pos) |
2057
|
|
|
|
2058
|
|
|
test = loads_with_persistent_ids(p_string, env=None) |
2059
|
|
|
nt.assert_dict_equal(test.__dict__, pos.__dict__) |
2060
|
|
|
|
2061
|
|
|
|
2062
|
|
|
class TestPositionTracker(unittest.TestCase): |
2063
|
|
|
|
2064
|
|
|
@classmethod |
2065
|
|
|
def setUpClass(cls): |
2066
|
|
|
cls.env = TradingEnvironment() |
2067
|
|
|
futures_metadata = {3: {'contract_multiplier': 1000}, |
2068
|
|
|
4: {'contract_multiplier': 1000}} |
2069
|
|
|
cls.env.write_data(equities_identifiers=[1, 2], |
2070
|
|
|
futures_data=futures_metadata) |
2071
|
|
|
|
2072
|
|
|
@classmethod |
2073
|
|
|
def tearDownClass(cls): |
2074
|
|
|
del cls.env |
2075
|
|
|
|
2076
|
|
|
def setUp(self): |
2077
|
|
|
self.tempdir = TempDirectory() |
2078
|
|
|
|
2079
|
|
|
def tearDown(self): |
2080
|
|
|
self.tempdir.cleanup() |
2081
|
|
|
|
2082
|
|
|
def test_empty_positions(self): |
2083
|
|
|
""" |
2084
|
|
|
make sure all the empty position stats return a numeric 0 |
2085
|
|
|
|
2086
|
|
|
Originally this bug was due to np.dot([], []) returning |
2087
|
|
|
np.bool_(False) |
2088
|
|
|
""" |
2089
|
|
|
sim_params = factory.create_simulation_parameters( |
2090
|
|
|
num_days=4, env=self.env |
2091
|
|
|
) |
2092
|
|
|
trades = factory.create_trade_history( |
2093
|
|
|
1, |
2094
|
|
|
[10, 10, 10, 11], |
2095
|
|
|
[100, 100, 100, 100], |
2096
|
|
|
oneday, |
2097
|
|
|
sim_params, |
2098
|
|
|
env=self.env |
2099
|
|
|
) |
2100
|
|
|
|
2101
|
|
|
data_portal = create_data_portal_from_trade_history( |
2102
|
|
|
self.env, |
2103
|
|
|
self.tempdir, |
2104
|
|
|
sim_params, |
2105
|
|
|
{1: trades}) |
2106
|
|
|
|
2107
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, data_portal, |
2108
|
|
|
sim_params.data_frequency) |
2109
|
|
|
pos_stats = pt.stats() |
2110
|
|
|
|
2111
|
|
|
stats = [ |
2112
|
|
|
'net_value', |
2113
|
|
|
'net_exposure', |
2114
|
|
|
'gross_value', |
2115
|
|
|
'gross_exposure', |
2116
|
|
|
'short_value', |
2117
|
|
|
'short_exposure', |
2118
|
|
|
'shorts_count', |
2119
|
|
|
'long_value', |
2120
|
|
|
'long_exposure', |
2121
|
|
|
'longs_count', |
2122
|
|
|
] |
2123
|
|
|
for name in stats: |
2124
|
|
|
val = getattr(pos_stats, name) |
2125
|
|
|
self.assertEquals(val, 0) |
2126
|
|
|
self.assertNotIsInstance(val, (bool, np.bool_)) |
2127
|
|
|
|
2128
|
|
|
def test_position_values_and_exposures(self): |
2129
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, None, None) |
2130
|
|
|
dt = pd.Timestamp("1984/03/06 3:00PM") |
2131
|
|
|
pos1 = perf.Position(1, amount=np.float64(10.0), |
2132
|
|
|
last_sale_date=dt, last_sale_price=10) |
2133
|
|
|
pos2 = perf.Position(2, amount=np.float64(-20.0), |
2134
|
|
|
last_sale_date=dt, last_sale_price=10) |
2135
|
|
|
pos3 = perf.Position(3, amount=np.float64(30.0), |
2136
|
|
|
last_sale_date=dt, last_sale_price=10) |
2137
|
|
|
pos4 = perf.Position(4, amount=np.float64(-40.0), |
2138
|
|
|
last_sale_date=dt, last_sale_price=10) |
2139
|
|
|
pt.update_positions({1: pos1, 2: pos2, 3: pos3, 4: pos4}) |
2140
|
|
|
|
2141
|
|
|
# Test long-only methods |
2142
|
|
|
|
2143
|
|
|
pos_stats = pt.stats() |
2144
|
|
|
self.assertEqual(100, pos_stats.long_value) |
2145
|
|
|
self.assertEqual(100 + 300000, pos_stats.long_exposure) |
2146
|
|
|
self.assertEqual(2, pos_stats.longs_count) |
2147
|
|
|
|
2148
|
|
|
# Test short-only methods |
2149
|
|
|
self.assertEqual(-200, pos_stats.short_value) |
2150
|
|
|
self.assertEqual(-200 - 400000, pos_stats.short_exposure) |
2151
|
|
|
self.assertEqual(2, pos_stats.shorts_count) |
2152
|
|
|
|
2153
|
|
|
# Test gross and net values |
2154
|
|
|
self.assertEqual(100 + 200, pos_stats.gross_value) |
2155
|
|
|
self.assertEqual(100 - 200, pos_stats.net_value) |
2156
|
|
|
|
2157
|
|
|
# Test gross and net exposures |
2158
|
|
|
self.assertEqual(100 + 200 + 300000 + 400000, pos_stats.gross_exposure) |
2159
|
|
|
self.assertEqual(100 - 200 + 300000 - 400000, pos_stats.net_exposure) |
2160
|
|
|
|
2161
|
|
|
def test_serialization(self): |
2162
|
|
|
pt = perf.PositionTracker(self.env.asset_finder, None, None) |
2163
|
|
|
dt = pd.Timestamp("1984/03/06 3:00PM") |
2164
|
|
|
pos1 = perf.Position(1, amount=np.float64(120.0), |
2165
|
|
|
last_sale_date=dt, last_sale_price=3.4) |
2166
|
|
|
pos3 = perf.Position(3, amount=np.float64(100.0), |
2167
|
|
|
last_sale_date=dt, last_sale_price=3.4) |
2168
|
|
|
|
2169
|
|
|
pt.update_positions({1: pos1, 3: pos3}) |
2170
|
|
|
p_string = dumps_with_persistent_ids(pt) |
2171
|
|
|
test = loads_with_persistent_ids(p_string, env=self.env) |
2172
|
|
|
nt.assert_count_equal(test.positions.keys(), pt.positions.keys()) |
2173
|
|
|
for sid in pt.positions: |
2174
|
|
|
nt.assert_dict_equal(test.positions[sid].__dict__, |
2175
|
|
|
pt.positions[sid].__dict__) |
2176
|
|
|
|
2177
|
|
|
|
2178
|
|
|
class TestPerformancePeriod(unittest.TestCase): |
2179
|
|
|
|
2180
|
|
|
def test_serialization(self): |
2181
|
|
|
env = TradingEnvironment() |
2182
|
|
|
pp = perf.PerformancePeriod(100, env.asset_finder, 'minute', None) |
2183
|
|
|
|
2184
|
|
|
p_string = dumps_with_persistent_ids(pp) |
2185
|
|
|
test = loads_with_persistent_ids(p_string, env=env) |
2186
|
|
|
|
2187
|
|
|
correct = pp.__dict__.copy() |
2188
|
|
|
correct.pop('_data_portal') |
2189
|
|
|
|
2190
|
|
|
nt.assert_count_equal(test.__dict__.keys(), correct.keys()) |
2191
|
|
|
|
2192
|
|
|
equal_keys = list(correct.keys()) |
2193
|
|
|
equal_keys.remove('_account_store') |
2194
|
|
|
equal_keys.remove('_portfolio_store') |
2195
|
|
|
|
2196
|
|
|
for k in equal_keys: |
2197
|
|
|
nt.assert_equal(test.__dict__[k], correct[k]) |
2198
|
|
|
|
Duplicated code is one of the most pungent code smells. If you need to duplicate the same code in three or more different places, we strongly encourage you to look into extracting the code into a single class or operation.
You can also find more detailed suggestions in the “Code” section of your repository.