1 # -*- Mode: Python; tab-width: 4 -*-
2 # Id: asynchat.py,v 2.23 1999/05/01 04:49:24 rushing Exp
3 # Author: Sam Rushing <rushing@nightmare.com>
5 # ======================================================================
6 # Copyright 1996 by Sam Rushing
10 # Permission to use, copy, modify, and distribute this software and
11 # its documentation for any purpose and without fee is hereby
12 # granted, provided that the above copyright notice appear in all
13 # copies and that both that copyright notice and this permission
14 # notice appear in supporting documentation, and that the name of Sam
15 # Rushing not be used in advertising or publicity pertaining to
16 # distribution of the software without specific, written prior
19 # SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
20 # INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
21 # NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
22 # CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
23 # OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
24 # NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
25 # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
26 # ======================================================================
28 """A class supporting chat-style (command/response) protocols.
30 This class adds support for 'chat' style protocols - where one side
31 sends a 'command', and the other sends a response (examples would be
32 the common internet protocols - smtp, nntp, ftp, etc..).
34 The handle_read() method looks at the input stream for the current
35 'terminator' (usually '\r\n' for single-line responses, '\r\n.\r\n'
36 for multi-line output), calling self.found_terminator() on its
40 Say you build an async nntp client using this class. At the start
41 of the connection, you'll have self.terminator set to '\r\n', in
42 order to process the single-line greeting. Just before issuing a
43 'LIST' command you'll set it to '\r\n.\r\n'. The output of the LIST
44 command will be accumulated (using your own 'collect_incoming_data'
45 method) up to the terminator, and then control will be returned to
46 you - by calling your self.found_terminator() method.
53 class async_chat (asyncore
.dispatcher
):
54 """This is an abstract class. You must derive from this class, and add
55 the two methods collect_incoming_data() and found_terminator()"""
57 # these are overridable defaults
59 ac_in_buffer_size
= 4096
60 ac_out_buffer_size
= 4096
62 def __init__ (self
, conn
=None):
63 self
.ac_in_buffer
= ''
64 self
.ac_out_buffer
= ''
65 self
.producer_fifo
= fifo()
66 asyncore
.dispatcher
.__init
__ (self
, conn
)
68 def set_terminator (self
, term
):
69 "Set the input delimiter. Can be a fixed string of any length, an integer, or None"
70 self
.terminator
= term
72 def get_terminator (self
):
73 return self
.terminator
75 # grab some more data from the socket,
76 # throw it to the collector method,
77 # check for the terminator,
78 # if found, transition to the next state.
80 def handle_read (self
):
83 data
= self
.recv (self
.ac_in_buffer_size
)
84 except socket
.error
, why
:
88 self
.ac_in_buffer
= self
.ac_in_buffer
+ data
90 # Continue to search for self.terminator in self.ac_in_buffer,
91 # while calling self.collect_incoming_data. The while loop
92 # is necessary because we might read several data+terminator
93 # combos with a single recv(1024).
95 while self
.ac_in_buffer
:
96 lb
= len(self
.ac_in_buffer
)
97 terminator
= self
.get_terminator()
98 if terminator
is None:
99 # no terminator, collect it all
100 self
.collect_incoming_data (self
.ac_in_buffer
)
101 self
.ac_in_buffer
= ''
102 elif type(terminator
) == type(0):
106 self
.collect_incoming_data (self
.ac_in_buffer
)
107 self
.ac_in_buffer
= ''
108 self
.terminator
= self
.terminator
- lb
110 self
.collect_incoming_data (self
.ac_in_buffer
[:n
])
111 self
.ac_in_buffer
= self
.ac_in_buffer
[n
:]
113 self
.found_terminator()
116 # 1) end of buffer matches terminator exactly:
117 # collect data, transition
118 # 2) end of buffer matches some prefix:
119 # collect data to the prefix
120 # 3) end of buffer does not match any prefix:
122 terminator_len
= len(terminator
)
123 index
= string
.find (self
.ac_in_buffer
, terminator
)
125 # we found the terminator
126 self
.collect_incoming_data (self
.ac_in_buffer
[:index
])
127 self
.ac_in_buffer
= self
.ac_in_buffer
[index
+terminator_len
:]
128 # This does the Right Thing if the terminator is changed here.
129 self
.found_terminator()
131 # check for a prefix of the terminator
132 index
= find_prefix_at_end (self
.ac_in_buffer
, terminator
)
135 # we found a prefix, collect up to the prefix
136 self
.collect_incoming_data (self
.ac_in_buffer
[:-index
])
137 self
.ac_in_buffer
= self
.ac_in_buffer
[-index
:]
140 # no prefix, collect it all
141 self
.collect_incoming_data (self
.ac_in_buffer
)
142 self
.ac_in_buffer
= ''
144 def handle_write (self
):
145 self
.initiate_send ()
147 def handle_close (self
):
150 def push (self
, data
):
151 self
.producer_fifo
.push (simple_producer (data
))
154 def push_with_producer (self
, producer
):
155 self
.producer_fifo
.push (producer
)
159 "predicate for inclusion in the readable for select()"
160 return (len(self
.ac_in_buffer
) <= self
.ac_in_buffer_size
)
163 "predicate for inclusion in the writable for select()"
164 # return len(self.ac_out_buffer) or len(self.producer_fifo) or (not self.connected)
165 # this is about twice as fast, though not as clear.
167 (self
.ac_out_buffer
is '') and
168 self
.producer_fifo
.is_empty() and
172 def close_when_done (self
):
173 "automatically close this channel once the outgoing queue is empty"
174 self
.producer_fifo
.push (None)
176 # refill the outgoing buffer by calling the more() method
177 # of the first producer in the queue
178 def refill_buffer (self
):
179 _string_type
= type('')
181 if len(self
.producer_fifo
):
182 p
= self
.producer_fifo
.first()
183 # a 'None' in the producer fifo is a sentinel,
184 # telling us to close the channel.
186 if not self
.ac_out_buffer
:
187 self
.producer_fifo
.pop()
190 elif type(p
) is _string_type
:
191 self
.producer_fifo
.pop()
192 self
.ac_out_buffer
= self
.ac_out_buffer
+ p
196 self
.ac_out_buffer
= self
.ac_out_buffer
+ data
199 self
.producer_fifo
.pop()
203 def initiate_send (self
):
204 obs
= self
.ac_out_buffer_size
205 # try to refill the buffer
206 if (len (self
.ac_out_buffer
) < obs
):
209 if self
.ac_out_buffer
and self
.connected
:
210 # try to send the buffer
212 num_sent
= self
.send (self
.ac_out_buffer
[:obs
])
214 self
.ac_out_buffer
= self
.ac_out_buffer
[num_sent
:]
216 except socket
.error
, why
:
220 def discard_buffers (self
):
222 self
.ac_in_buffer
= ''
223 self
.ac_out_buffer
== ''
224 while self
.producer_fifo
:
225 self
.producer_fifo
.pop()
227 class simple_producer
:
229 def __init__ (self
, data
, buffer_size
=512):
231 self
.buffer_size
= buffer_size
234 if len (self
.data
) > self
.buffer_size
:
235 result
= self
.data
[:self
.buffer_size
]
236 self
.data
= self
.data
[self
.buffer_size
:]
244 def __init__ (self
, list=None):
251 return len(self
.list)
254 return self
.list == []
259 def push (self
, data
):
260 self
.list.append (data
)
264 result
= self
.list[0]
270 # Given 'haystack', see if any prefix of 'needle' is at its end. This
271 # assumes an exact match has already been checked. Return the number of
272 # characters matched.
274 # f_p_a_e ("qwerty\r", "\r\n") => 1
275 # f_p_a_e ("qwerty\r\n", "\r\n") => 2
276 # f_p_a_e ("qwertydkjf", "\r\n") => 0
278 # this could maybe be made faster with a computed regex?
280 ##def find_prefix_at_end (haystack, needle):
283 ## for i in range (1,nl):
284 ## if haystack[-(nl-i):] == needle[:(nl-i)]:
289 # yes, this is about twice as fast, but still seems
290 # to be negligible CPU. The previous could do about 290
291 # searches/sec. the new one about 555/sec.
297 def prefix_regex (needle
):
298 if prefix_cache
.has_key (needle
):
299 return prefix_cache
[needle
]
302 for i
in range(1,len(needle
)):
303 reg
= '%c\(%s\)?' % (needle
[-(i
+1)], reg
)
304 reg
= regex
.compile (reg
+'$')
305 prefix_cache
[needle
] = reg
, len(needle
)
306 return reg
, len(needle
)
308 def find_prefix_at_end (haystack
, needle
):
309 reg
, length
= prefix_regex (needle
)
311 result
= reg
.search (haystack
, max(0,lh
-length
))