Completed
Pull Request — master (#858)
by Eddie
01:43
created

zipline.gens.AlgorithmSimulator.inject_algo_dt()   A

Complexity

Conditions 2

Size

Total Lines 3

Duplication

Lines 0
Ratio 0 %
Metric Value
cc 2
dl 0
loc 3
rs 10
1
#
2
# Copyright 2015 Quantopian, Inc.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License");
5
# you may not use this file except in compliance with the License.
6
# You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15
from logbook import Logger, Processor
16
from pandas.tslib import normalize_date
17
from zipline.protocol import BarData
18
from zipline.utils.api_support import ZiplineAPI
19
20
from zipline.gens.sim_engine import (
21
    BAR,
22
    DAY_START,
23
    DAY_END,
24
    MINUTE_END
25
)
26
27
log = Logger('Trade Simulation')
28
29
30
class AlgorithmSimulator(object):
31
32
    EMISSION_TO_PERF_KEY_MAP = {
33
        'minute': 'minute_perf',
34
        'daily': 'daily_perf'
35
    }
36
37
    def __init__(self, algo, sim_params, data_portal, clock, benchmark_source):
38
39
        # ==============
40
        # Simulation
41
        # Param Setup
42
        # ==============
43
        self.sim_params = sim_params
44
        self.env = algo.trading_environment
45
        self.data_portal = data_portal
46
47
        # ==============
48
        # Algo Setup
49
        # ==============
50
        self.algo = algo
51
        self.algo_start = normalize_date(self.sim_params.first_open)
52
53
        # ==============
54
        # Snapshot Setup
55
        # ==============
56
57
        # The algorithm's data as of our most recent event.
58
        # We want an object that will have empty objects as default
59
        # values on missing keys.
60
        self.current_data = BarData(data_portal=self.data_portal)
61
62
        # We don't have a datetime for the current snapshot until we
63
        # receive a message.
64
        self.simulation_dt = None
65
66
        self.clock = clock
67
68
        self.benchmark_source = benchmark_source
69
70
        # =============
71
        # Logging Setup
72
        # =============
73
74
        # Processor function for injecting the algo_dt into
75
        # user prints/logs.
76
        def inject_algo_dt(record):
77
            if 'algo_dt' not in record.extra:
78
                record.extra['algo_dt'] = self.simulation_dt
79
        self.processor = Processor(inject_algo_dt)
80
81
    def transform(self):
82
        """
83
        Main generator work loop.
84
        """
85
        algo = self.algo
86
        algo.data_portal = self.data_portal
87
        handle_data = algo.event_manager.handle_data
88
        current_data = self.current_data
89
90
        data_portal = self.data_portal
91
92
        # can't cache a pointer to algo.perf_tracker because we're not
93
        # guaranteed that the algo doesn't swap out perf trackers during
94
        # its lifetime.
95
        # likewise, we can't cache a pointer to the blotter.
96
97
        algo.perf_tracker.position_tracker.data_portal = data_portal
98
99
        def every_bar(dt_to_use):
100
            # called every tick (minute or day).
101
102
            data_portal.current_dt = dt_to_use
103
            self.simulation_dt = dt_to_use
104
            algo.on_dt_changed(dt_to_use)
105
106
            blotter = algo.blotter
107
            perf_tracker = algo.perf_tracker
108
109
            # handle any transactions and commissions coming out new orders
110
            # placed in the last bar
111
            new_transactions, new_commissions = \
112
                blotter.get_transactions(data_portal)
113
114
            for transaction in new_transactions:
115
                perf_tracker.process_transaction(transaction)
116
117
                # since this order was modified, record it
118
                order = blotter.orders[transaction.order_id]
119
                perf_tracker.process_order(order)
120
121
            if new_commissions:
122
                for commission in new_commissions:
123
                    perf_tracker.process_commission(commission)
124
125
            handle_data(algo, current_data, dt_to_use)
126
127
            # grab any new orders from the blotter, then clear the list.
128
            # this includes cancelled orders.
129
            new_orders = blotter.new_orders
130
            blotter.new_orders = []
131
132
            # if we have any new orders, record them so that we know
133
            # in what perf period they were placed.
134
            if new_orders:
135
                for new_order in new_orders:
136
                    perf_tracker.process_order(new_order)
137
138
        def once_a_day(midnight_dt):
139
            # set all the timestamps
140
            self.simulation_dt = midnight_dt
141
            algo.on_dt_changed(midnight_dt)
142
            data_portal.current_day = midnight_dt
143
144
            # call before trading start
145
            algo.before_trading_start(current_data)
146
147
            perf_tracker = algo.perf_tracker
148
149
            # handle any splits that impact any positions or any open orders.
150
            sids_we_care_about = \
151
                list(set(list(perf_tracker.position_tracker.positions.keys()) +
152
                         list(algo.blotter.open_orders.keys())))
153
154
            if len(sids_we_care_about) > 0:
155
                splits = data_portal.get_splits(sids_we_care_about,
156
                                                midnight_dt)
157
                if len(splits) > 0:
158
                    algo.blotter.process_splits(splits)
159
                    perf_tracker.position_tracker.handle_splits(splits)
160
161
        def handle_benchmark(date):
162
            algo.perf_tracker.all_benchmark_returns[date] = \
163
                self.benchmark_source.get_value(date)
164
165
        with self.processor, ZiplineAPI(self.algo):
166
            for dt, action in self.clock:
167
                if action == BAR:
168
                    every_bar(dt)
169
                elif action == DAY_START:
170
                    once_a_day(dt)
171
                elif action == DAY_END:
172
                    # End of the day.
173
                    handle_benchmark(dt)
174
                    yield self._get_daily_message(dt, algo, algo.perf_tracker)
175
                elif action == MINUTE_END:
176
                    handle_benchmark(dt)
177
                    minute_msg, daily_msg = \
178
                        self._get_minute_message(dt, algo, algo.perf_tracker)
179
180
                    yield minute_msg
181
182
                    if daily_msg:
183
                        yield daily_msg
184
185
        risk_message = algo.perf_tracker.handle_simulation_end()
186
        yield risk_message
187
188
    @staticmethod
189
    def _get_daily_message(dt, algo, perf_tracker):
190
        """
191
        Get a perf message for the given datetime.
192
        """
193
        perf_message = perf_tracker.handle_market_close_daily(dt)
194
        perf_message['daily_perf']['recorded_vars'] = algo.recorded_vars
195
        return perf_message
196
197
    @staticmethod
198
    def _get_minute_message(dt, algo, perf_tracker):
199
        """
200
        Get a perf message for the given datetime.
201
        """
202
        rvars = algo.recorded_vars
203
204
        minute_message, daily_message = perf_tracker.handle_minute_close(dt)
205
        minute_message['minute_perf']['recorded_vars'] = rvars
206
207
        if daily_message:
208
            daily_message["daily_perf"]["recorded_vars"] = rvars
209
210
        return minute_message, daily_message
211