Completed
Pull Request — master (#858)
by Eddie
02:13 queued 19s
created

zipline.gens.AlgorithmSimulator.every_bar()   B

Complexity

Conditions 6

Size

Total Lines 41

Duplication

Lines 0
Ratio 0 %
Metric Value
cc 6
dl 0
loc 41
rs 7.5385
1
#
2
# Copyright 2015 Quantopian, Inc.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License");
5
# you may not use this file except in compliance with the License.
6
# You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15
from logbook import Logger, Processor
16
from pandas.tslib import normalize_date
17
from zipline.protocol import BarData
18
from zipline.utils.api_support import ZiplineAPI
19
20
from zipline.gens.sim_engine import (
21
    BAR,
22
    DAY_START,
23
    DAY_END,
24
    MINUTE_END
25
)
26
27
log = Logger('Trade Simulation')
28
29
30
class AlgorithmSimulator(object):
31
32
    EMISSION_TO_PERF_KEY_MAP = {
33
        'minute': 'minute_perf',
34
        'daily': 'daily_perf'
35
    }
36
37
    def __init__(self, algo, sim_params, data_portal, clock, benchmark_source):
38
39
        # ==============
40
        # Simulation
41
        # Param Setup
42
        # ==============
43
        self.sim_params = sim_params
44
        self.env = algo.trading_environment
45
        self.data_portal = data_portal
46
47
        # ==============
48
        # Algo Setup
49
        # ==============
50
        self.algo = algo
51
        self.algo_start = normalize_date(self.sim_params.first_open)
52
53
        # ==============
54
        # Snapshot Setup
55
        # ==============
56
57
        # The algorithm's data as of our most recent event.
58
        # We want an object that will have empty objects as default
59
        # values on missing keys.
60
        self.current_data = self._create_bar_data()
61
62
        # We don't have a datetime for the current snapshot until we
63
        # receive a message.
64
        self.simulation_dt = None
65
66
        self.clock = clock
67
68
        self.benchmark_source = benchmark_source
69
70
        # =============
71
        # Logging Setup
72
        # =============
73
74
        # Processor function for injecting the algo_dt into
75
        # user prints/logs.
76
        def inject_algo_dt(record):
77
            if 'algo_dt' not in record.extra:
78
                record.extra['algo_dt'] = self.simulation_dt
79
        self.processor = Processor(inject_algo_dt)
80
81
    def _create_bar_data(self):
82
        return BarData(
83
            data_portal=self.data_portal,
84
            simulator=self
85
        )
86
87
    def transform(self):
88
        """
89
        Main generator work loop.
90
        """
91
        algo = self.algo
92
        algo.data_portal = self.data_portal
93
        handle_data = algo.event_manager.handle_data
94
        current_data = self.current_data
95
96
        data_portal = self.data_portal
97
98
        # can't cache a pointer to algo.perf_tracker because we're not
99
        # guaranteed that the algo doesn't swap out perf trackers during
100
        # its lifetime.
101
        # likewise, we can't cache a pointer to the blotter.
102
103
        algo.perf_tracker.position_tracker.data_portal = data_portal
104
105
        def every_bar(dt_to_use):
106
            # called every tick (minute or day).
107
108
            self.simulation_dt = dt_to_use
109
            algo.on_dt_changed(dt_to_use)
110
111
            blotter = algo.blotter
112
            perf_tracker = algo.perf_tracker
113
114
            # handle any transactions and commissions coming out new orders
115
            # placed in the last bar
116
            new_transactions, new_commissions = \
117
                blotter.get_transactions(data_portal)
118
119
            for transaction in new_transactions:
120
                perf_tracker.process_transaction(transaction)
121
122
                # since this order was modified, record it
123
                order = blotter.orders[transaction.order_id]
124
                perf_tracker.process_order(order)
125
126
            if new_commissions:
127
                for commission in new_commissions:
128
                    perf_tracker.process_commission(commission)
129
130
            handle_data(algo, current_data, dt_to_use)
131
132
            # grab any new orders from the blotter, then clear the list.
133
            # this includes cancelled orders.
134
            new_orders = blotter.new_orders
135
            blotter.new_orders = []
136
137
            # if we have any new orders, record them so that we know
138
            # in what perf period they were placed.
139
            if new_orders:
140
                for new_order in new_orders:
141
                    perf_tracker.process_order(new_order)
142
143
            self.algo.portfolio_needs_update = True
144
            self.algo.account_needs_update = True
145
            self.algo.performance_needs_update = True
146
147
        def once_a_day(midnight_dt):
148
            # set all the timestamps
149
            self.simulation_dt = midnight_dt
150
            algo.on_dt_changed(midnight_dt)
151
152
            # call before trading start
153
            algo.before_trading_start(current_data)
154
155
            perf_tracker = algo.perf_tracker
156
157
            # handle any splits that impact any positions or any open orders.
158
            sids_we_care_about = \
159
                list(set(list(perf_tracker.position_tracker.positions.keys()) +
160
                         list(algo.blotter.open_orders.keys())))
161
162
            if len(sids_we_care_about) > 0:
163
                splits = data_portal.get_splits(sids_we_care_about,
164
                                                midnight_dt)
165
                if len(splits) > 0:
166
                    algo.blotter.process_splits(splits)
167
                    perf_tracker.position_tracker.handle_splits(splits)
168
169
        def handle_benchmark(date):
170
            algo.perf_tracker.all_benchmark_returns[date] = \
171
                self.benchmark_source.get_value(date)
172
173
        with self.processor, ZiplineAPI(self.algo):
174
            for dt, action in self.clock:
175
                if action == BAR:
176
                    every_bar(dt)
177
                elif action == DAY_START:
178
                    once_a_day(dt)
179
                elif action == DAY_END:
180
                    # End of the day.
181
                    handle_benchmark(normalize_date(dt))
182
                    yield self._get_daily_message(dt, algo, algo.perf_tracker)
183
                elif action == MINUTE_END:
184
                    handle_benchmark(dt)
185
                    minute_msg, daily_msg = \
186
                        self._get_minute_message(dt, algo, algo.perf_tracker)
187
188
                    yield minute_msg
189
190
                    if daily_msg:
191
                        yield daily_msg
192
193
        risk_message = algo.perf_tracker.handle_simulation_end()
194
        yield risk_message
195
196
    @staticmethod
197
    def _get_daily_message(dt, algo, perf_tracker):
198
        """
199
        Get a perf message for the given datetime.
200
        """
201
        perf_message = perf_tracker.handle_market_close_daily(dt)
202
        perf_message['daily_perf']['recorded_vars'] = algo.recorded_vars
203
        return perf_message
204
205
    @staticmethod
206
    def _get_minute_message(dt, algo, perf_tracker):
207
        """
208
        Get a perf message for the given datetime.
209
        """
210
        rvars = algo.recorded_vars
211
212
        minute_message, daily_message = perf_tracker.handle_minute_close(dt)
213
        minute_message['minute_perf']['recorded_vars'] = rvars
214
215
        if daily_message:
216
            daily_message["daily_perf"]["recorded_vars"] = rvars
217
218
        return minute_message, daily_message
219