1 | # -*- Mode: Python; tab-width: 4 -*- |
||
2 | |||
3 | import string |
||
4 | |||
5 | """ |
||
6 | A collection of producers. |
||
7 | Each producer implements a particular feature: They can be combined |
||
8 | in various ways to get interesting and useful behaviors. |
||
9 | |||
10 | For example, you can feed dynamically-produced output into the compressing |
||
11 | producer, then wrap this with the 'chunked' transfer-encoding producer. |
||
12 | """ |
||
13 | |||
14 | View Code Duplication | class simple_producer: |
|
0 ignored issues
–
show
Duplication
introduced
by
![]() |
|||
15 | "producer for a string" |
||
16 | def __init__ (self, data, buffer_size=1024): |
||
17 | self.data = data |
||
18 | self.buffer_size = buffer_size |
||
19 | |||
20 | def more (self): |
||
21 | if len (self.data) > self.buffer_size: |
||
22 | result = self.data[:self.buffer_size] |
||
23 | self.data = self.data[self.buffer_size:] |
||
24 | return result |
||
25 | else: |
||
26 | result = self.data |
||
27 | self.data = '' |
||
28 | return result |
||
29 | |||
30 | class scanning_producer: |
||
31 | "like simple_producer, but more efficient for large strings" |
||
32 | def __init__ (self, data, buffer_size=1024): |
||
33 | self.data = data |
||
34 | self.buffer_size = buffer_size |
||
35 | self.pos = 0 |
||
36 | |||
37 | def more (self): |
||
38 | if self.pos < len(self.data): |
||
39 | lp = self.pos |
||
40 | rp = min ( |
||
41 | len(self.data), |
||
42 | self.pos + self.buffer_size |
||
43 | ) |
||
44 | result = self.data[lp:rp] |
||
45 | self.pos = self.pos + len(result) |
||
46 | return result |
||
47 | else: |
||
48 | return '' |
||
49 | |||
50 | View Code Duplication | class lines_producer: |
|
0 ignored issues
–
show
|
|||
51 | "producer for a list of lines" |
||
52 | |||
53 | def __init__ (self, lines): |
||
54 | self.lines = lines |
||
55 | |||
56 | def ready (self): |
||
57 | return len(self.lines) |
||
58 | |||
59 | def more (self): |
||
60 | if self.lines: |
||
61 | chunk = self.lines[:50] |
||
62 | self.lines = self.lines[50:] |
||
63 | return string.join (chunk, '\r\n') + '\r\n' |
||
64 | else: |
||
65 | return '' |
||
66 | |||
67 | class buffer_list_producer: |
||
68 | "producer for a list of buffers" |
||
69 | |||
70 | # i.e., data == string.join (buffers, '') |
||
71 | |||
72 | def __init__ (self, buffers): |
||
73 | |||
74 | self.index = 0 |
||
75 | self.buffers = buffers |
||
76 | |||
77 | def more (self): |
||
78 | if self.index >= len(self.buffers): |
||
79 | return '' |
||
80 | else: |
||
81 | data = self.buffers[self.index] |
||
82 | self.index = self.index + 1 |
||
83 | return data |
||
84 | |||
85 | class file_producer: |
||
86 | "producer wrapper for file[-like] objects" |
||
87 | |||
88 | # match http_channel's outgoing buffer size |
||
89 | out_buffer_size = 1<<16 |
||
90 | |||
91 | def __init__ (self, file): |
||
92 | self.done = 0 |
||
93 | self.file = file |
||
94 | |||
95 | def more (self): |
||
96 | if self.done: |
||
97 | return '' |
||
98 | else: |
||
99 | data = self.file.read (self.out_buffer_size) |
||
100 | if not data: |
||
101 | self.file.close() |
||
102 | del self.file |
||
103 | self.done = 1 |
||
104 | return '' |
||
105 | else: |
||
106 | return data |
||
107 | |||
108 | # A simple output producer. This one does not [yet] have |
||
109 | # the safety feature builtin to the monitor channel: runaway |
||
110 | # output will not be caught. |
||
111 | |||
112 | # don't try to print from within any of the methods |
||
113 | # of this object. |
||
114 | |||
115 | class output_producer: |
||
116 | "Acts like an output file; suitable for capturing sys.stdout" |
||
117 | def __init__ (self): |
||
118 | self.data = '' |
||
119 | |||
120 | def write (self, data): |
||
121 | lines = string.splitfields (data, '\n') |
||
122 | data = string.join (lines, '\r\n') |
||
123 | self.data = self.data + data |
||
124 | |||
125 | def writeline (self, line): |
||
126 | self.data = self.data + line + '\r\n' |
||
127 | |||
128 | def writelines (self, lines): |
||
129 | self.data = self.data + string.joinfields ( |
||
130 | lines, |
||
131 | '\r\n' |
||
132 | ) + '\r\n' |
||
133 | |||
134 | def ready (self): |
||
135 | return (len (self.data) > 0) |
||
136 | |||
137 | def flush (self): |
||
138 | pass |
||
139 | |||
140 | def softspace (self, *args): |
||
141 | pass |
||
142 | |||
143 | def more (self): |
||
144 | if self.data: |
||
145 | result = self.data[:512] |
||
146 | self.data = self.data[512:] |
||
147 | return result |
||
148 | else: |
||
149 | return '' |
||
150 | |||
151 | class composite_producer: |
||
152 | "combine a fifo of producers into one" |
||
153 | def __init__ (self, producers): |
||
154 | self.producers = producers |
||
155 | |||
156 | def more (self): |
||
157 | while len(self.producers): |
||
158 | p = self.producers.first() |
||
159 | d = p.more() |
||
160 | if d: |
||
161 | return d |
||
162 | else: |
||
163 | self.producers.pop() |
||
164 | else: |
||
165 | return '' |
||
166 | |||
167 | |||
168 | class globbing_producer: |
||
169 | """ |
||
170 | 'glob' the output from a producer into a particular buffer size. |
||
171 | helps reduce the number of calls to send(). [this appears to |
||
172 | gain about 30% performance on requests to a single channel] |
||
173 | """ |
||
174 | |||
175 | def __init__ (self, producer, buffer_size=1<<16): |
||
176 | self.producer = producer |
||
177 | self.buffer = '' |
||
178 | self.buffer_size = buffer_size |
||
179 | |||
180 | def more (self): |
||
181 | while len(self.buffer) < self.buffer_size: |
||
182 | data = self.producer.more() |
||
183 | if data: |
||
184 | self.buffer = self.buffer + data |
||
185 | else: |
||
186 | break |
||
187 | r = self.buffer |
||
188 | self.buffer = '' |
||
189 | return r |
||
190 | |||
191 | |||
192 | class hooked_producer: |
||
193 | """ |
||
194 | A producer that will call <function> when it empties,. |
||
195 | with an argument of the number of bytes produced. Useful |
||
196 | for logging/instrumentation purposes. |
||
197 | """ |
||
198 | |||
199 | def __init__ (self, producer, function): |
||
200 | self.producer = producer |
||
201 | self.function = function |
||
202 | self.bytes = 0 |
||
203 | |||
204 | def more (self): |
||
205 | if self.producer: |
||
206 | result = self.producer.more() |
||
207 | if not result: |
||
208 | self.producer = None |
||
209 | self.function (self.bytes) |
||
210 | else: |
||
211 | self.bytes = self.bytes + len(result) |
||
212 | return result |
||
213 | else: |
||
214 | return '' |
||
215 | |||
216 | # HTTP 1.1 emphasizes that an advertised Content-Length header MUST be |
||
217 | # correct. In the face of Strange Files, it is conceivable that |
||
218 | # reading a 'file' may produce an amount of data not matching that |
||
219 | # reported by os.stat() [text/binary mode issues, perhaps the file is |
||
220 | # being appended to, etc..] This makes the chunked encoding a True |
||
221 | # Blessing, and it really ought to be used even with normal files. |
||
222 | # How beautifully it blends with the concept of the producer. |
||
223 | |||
224 | class chunked_producer: |
||
225 | """A producer that implements the 'chunked' transfer coding for HTTP/1.1. |
||
226 | Here is a sample usage: |
||
227 | request['Transfer-Encoding'] = 'chunked' |
||
228 | request.push ( |
||
229 | producers.chunked_producer (your_producer) |
||
230 | ) |
||
231 | request.done() |
||
232 | """ |
||
233 | |||
234 | def __init__ (self, producer, footers=None): |
||
235 | self.producer = producer |
||
236 | self.footers = footers |
||
237 | |||
238 | def more (self): |
||
239 | if self.producer: |
||
240 | data = self.producer.more() |
||
241 | if data: |
||
242 | return '%x\r\n%s\r\n' % (len(data), data) |
||
243 | else: |
||
244 | self.producer = None |
||
245 | if self.footers: |
||
246 | return string.join ( |
||
247 | ['0'] + self.footers, |
||
248 | '\r\n' |
||
249 | ) + '\r\n\r\n' |
||
250 | else: |
||
251 | return '0\r\n\r\n' |
||
252 | else: |
||
253 | return '' |
||
254 | |||
255 | # Unfortunately this isn't very useful right now (Aug 97), because |
||
256 | # apparently the browsers don't do on-the-fly decompression. Which |
||
257 | # is sad, because this could _really_ speed things up, especially for |
||
258 | # low-bandwidth clients (i.e., most everyone). |
||
259 | |||
260 | try: |
||
261 | import zlib |
||
262 | except ImportError: |
||
263 | zlib = None |
||
264 | |||
265 | class compressed_producer: |
||
266 | """ |
||
267 | Compress another producer on-the-fly, using ZLIB |
||
268 | [Unfortunately, none of the current browsers seem to support this] |
||
269 | """ |
||
270 | |||
271 | # Note: It's not very efficient to have the server repeatedly |
||
272 | # compressing your outgoing files: compress them ahead of time, or |
||
273 | # use a compress-once-and-store scheme. However, if you have low |
||
274 | # bandwidth and low traffic, this may make more sense than |
||
275 | # maintaining your source files compressed. |
||
276 | # |
||
277 | # Can also be used for compressing dynamically-produced output. |
||
278 | |||
279 | def __init__ (self, producer, level=5): |
||
280 | self.producer = producer |
||
281 | self.compressor = zlib.compressobj (level) |
||
282 | |||
283 | def more (self): |
||
284 | if self.producer: |
||
285 | cdata = '' |
||
286 | # feed until we get some output |
||
287 | while not cdata: |
||
288 | data = self.producer.more() |
||
289 | if not data: |
||
290 | self.producer = None |
||
291 | return self.compressor.flush() |
||
292 | else: |
||
293 | cdata = self.compressor.compress (data) |
||
294 | return cdata |
||
295 | else: |
||
296 | return '' |
||
297 | |||
298 | class escaping_producer: |
||
299 | |||
300 | "A producer that escapes a sequence of characters" |
||
301 | " Common usage: escaping the CRLF.CRLF sequence in SMTP, NNTP, etc..." |
||
302 | |||
303 | def __init__ (self, producer, esc_from='\r\n.', esc_to='\r\n..'): |
||
304 | self.producer = producer |
||
305 | self.esc_from = esc_from |
||
306 | self.esc_to = esc_to |
||
307 | self.buffer = '' |
||
308 | from asynchat import find_prefix_at_end |
||
309 | self.find_prefix_at_end = find_prefix_at_end |
||
310 | |||
311 | def more (self): |
||
312 | esc_from = self.esc_from |
||
313 | esc_to = self.esc_to |
||
314 | |||
315 | buffer = self.buffer + self.producer.more() |
||
316 | |||
317 | if buffer: |
||
318 | buffer = string.replace (buffer, esc_from, esc_to) |
||
319 | i = self.find_prefix_at_end (buffer, esc_from) |
||
320 | if i: |
||
321 | # we found a prefix |
||
322 | self.buffer = buffer[-i:] |
||
323 | return buffer[:-i] |
||
324 | else: |
||
325 | # no prefix, return it all |
||
326 | self.buffer = '' |
||
327 | return buffer |
||
328 | else: |
||
329 | return buffer |
||
330 |