blob: 5486419f77e4b54a9614e210ca4d18540c06bb6f [file] [log] [blame]
Guido van Rossum0039d7b1999-01-12 20:19:27 +00001# -*- Mode: Python; tab-width: 4 -*-
2# $Id$
3# Author: Sam Rushing <rushing@nightmare.com>
4
5# ======================================================================
6# Copyright 1996 by Sam Rushing
7#
8# All Rights Reserved
9#
10# Permission to use, copy, modify, and distribute this software and
11# its documentation for any purpose and without fee is hereby
12# granted, provided that the above copyright notice appear in all
13# copies and that both that copyright notice and this permission
14# notice appear in supporting documentation, and that the name of Sam
15# Rushing not be used in advertising or publicity pertaining to
16# distribution of the software without specific, written prior
17# permission.
18#
19# SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
20# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
21# NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
22# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
23# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
24# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
25# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
26# ======================================================================
27
28import socket
29import asyncore
30import string
31
32# This class adds support for 'chat' style protocols - where one side
33# sends a 'command', and the other sends a response (examples would be
34# the common internet protocols - smtp, nntp, ftp, etc..).
35
36# The handle_read() method looks at the input stream for the current
37# 'terminator' (usually '\r\n' for single-line responses, '\r\n.\r\n'
38# for multi-line output), calling self.found_terminator() on its
39# receipt.
40
41# for example:
42# Say you build an async nntp client using this class. At the start
43# of the connection, you'll have self.terminator set to '\r\n', in
44# order to process the single-line greeting. Just before issuing a
45# 'LIST' command you'll set it to '\r\n.\r\n'. The output of the LIST
46# command will be accumulated (using your own 'collect_incoming_data'
47# method) up to the terminator, and then control will be returned to
48# you - by calling your self.found_terminator() method
49
50class async_chat (asyncore.dispatcher):
51 """This is an abstract class. You must derive from this class, and add
52 the two methods collect_incoming_data() and found_terminator()"""
53
54 # these are overridable defaults
55
56 ac_in_buffer_size = 4096
57 ac_out_buffer_size = 4096
58
59 def __init__ (self, conn=None):
60 self.ac_in_buffer = ''
61 self.ac_out_buffer = ''
62 self.producer_fifo = fifo()
63 asyncore.dispatcher.__init__ (self, conn)
64
65 def set_terminator (self, term):
66 "Set the input delimiter. Can be a fixed string of any length, or None"
67 if term is None:
68 self.terminator = ''
69 else:
70 self.terminator = term
71
72 def get_terminator (self):
73 return self.terminator
74
75 # grab some more data from the socket,
76 # throw it to the collector method,
77 # check for the terminator,
78 # if found, transition to the next state.
79
80 def handle_read (self):
81
82 try:
83 data = self.recv (self.ac_in_buffer_size)
84 except socket.error, why:
85 import sys
86 self.handle_error (sys.exc_type, sys.exc_value, sys.exc_traceback)
87 return
88
89 self.ac_in_buffer = self.ac_in_buffer + data
90
91 # Continue to search for self.terminator in self.ac_in_buffer,
92 # while calling self.collect_incoming_data. The while loop
93 # is necessary because we might read several data+terminator
94 # combos with a single recv(1024).
95
96 while self.ac_in_buffer:
97 terminator = self.get_terminator()
98 terminator_len = len(terminator)
99 # 4 cases:
100 # 1) end of buffer matches terminator exactly:
101 # collect data, transition
102 # 2) end of buffer matches some prefix:
103 # collect data to the prefix
104 # 3) end of buffer does not match any prefix:
105 # collect data
106 # 4) no terminator, just collect the data
107 if terminator:
108 index = string.find (self.ac_in_buffer, terminator)
109 if index != -1:
110 # we found the terminator
111 self.collect_incoming_data (self.ac_in_buffer[:index])
112 self.ac_in_buffer = self.ac_in_buffer[index+terminator_len:]
113 # This does the Right Thing if the terminator is changed here.
114 self.found_terminator()
115 else:
116 # check for a prefix of the terminator
117 index = find_prefix_at_end (self.ac_in_buffer, terminator)
118 if index:
119 # we found a prefix, collect up to the prefix
120 self.collect_incoming_data (self.ac_in_buffer[:-index])
121 self.ac_in_buffer = self.ac_in_buffer[-index:]
122 break
123 else:
124 # no prefix, collect it all
125 self.collect_incoming_data (self.ac_in_buffer)
126 self.ac_in_buffer = ''
127 else:
128 # no terminator, collect it all
129 self.collect_incoming_data (self.ac_in_buffer)
130 self.ac_in_buffer = ''
131
132 def handle_write (self):
133 self.initiate_send ()
134
135 def handle_close (self):
136 self.close()
137
138 def push (self, data):
139 self.producer_fifo.push (simple_producer (data))
140 self.initiate_send()
141
142 def push_with_producer (self, producer):
143 self.producer_fifo.push (producer)
144 self.initiate_send()
145
146 def readable (self):
147 return (len(self.ac_in_buffer) <= self.ac_in_buffer_size)
148
149 def writable (self):
150 return len(self.ac_out_buffer) or len(self.producer_fifo) or (not self.connected)
151
152 def close_when_done (self):
153 self.producer_fifo.push (None)
154
155 # refill the outgoing buffer by calling the more() method
156 # of the first producer in the queue
157 def refill_buffer (self):
158 while 1:
159 if len(self.producer_fifo):
160 p = self.producer_fifo.first()
161 # a 'None' in the producer fifo is a sentinel,
162 # telling us to close the channel.
163 if p is None:
164 if not self.ac_out_buffer:
165 self.producer_fifo.pop()
166 self.close()
167 return
168 data = p.more()
169 if data:
170 self.ac_out_buffer = self.ac_out_buffer + data
171 return
172 else:
173 self.producer_fifo.pop()
174 else:
175 return
176
177 def initiate_send (self):
178 obs = self.ac_out_buffer_size
179 # try to refill the buffer
180 if (not self._push_mode) and (len (self.ac_out_buffer) < obs):
181 self.refill_buffer()
182
183 if self.ac_out_buffer and self.connected:
184 # try to send the buffer
185 num_sent = self.send (self.ac_out_buffer[:obs])
186 if num_sent:
187 self.ac_out_buffer = self.ac_out_buffer[num_sent:]
188
189 def discard_buffers (self):
190 # Emergencies only!
191 self.ac_in_buffer = ''
192 self.ac_out_buffer == ''
193 while self.producer_fifo:
194 self.producer_fifo.pop()
195
196 # ==================================================
197 # support for push mode.
198 # ==================================================
199 _push_mode = 0
200 def push_mode (self, boolean):
201 self._push_mode = boolean
202
203 def writable_push (self):
204 return self.connected and len(self.ac_out_buffer)
205
206class simple_producer:
207 def __init__ (self, data, buffer_size=512):
208 self.data = data
209 self.buffer_size = buffer_size
210
211 def more (self):
212 if len (self.data) > self.buffer_size:
213 result = self.data[:self.buffer_size]
214 self.data = self.data[self.buffer_size:]
215 return result
216 else:
217 result = self.data
218 self.data = ''
219 return result
220
221class fifo:
222 def __init__ (self, list=None):
223 if not list:
224 self.list = []
225 else:
226 self.list = list
227
228 def __len__ (self):
229 return len(self.list)
230
231 def first (self):
232 return self.list[0]
233
234 def push (self, data):
235 self.list.append (data)
236
237 def pop (self):
238 if self.list:
239 result = self.list[0]
240 del self.list[0]
241 return (1, result)
242 else:
243 return (0, None)
244
245# Given 'haystack', see if any prefix of 'needle' is at its end. This
246# assumes an exact match has already been checked. Return the number of
247# characters matched.
248# for example:
249# f_p_a_e ("qwerty\r", "\r\n") => 1
250# f_p_a_e ("qwerty\r\n", "\r\n") => 2
251# f_p_a_e ("qwertydkjf", "\r\n") => 0
252
253# this could maybe be made faster with a computed regex?
254
255##def find_prefix_at_end (haystack, needle):
256## nl = len(needle)
257## result = 0
258## for i in range (1,nl):
259## if haystack[-(nl-i):] == needle[:(nl-i)]:
260## result = nl-i
261## break
262## return result
263
264# yes, this is about twice as fast, but still seems
265# to be neglible CPU. The previous could do about 290
266# searches/sec. the new one about 555/sec.
267
268import regex
269
270prefix_cache = {}
271
272def prefix_regex (needle):
273 if prefix_cache.has_key (needle):
274 return prefix_cache[needle]
275 else:
276 reg = needle[-1]
277 for i in range(1,len(needle)):
278 reg = '%c\(%s\)?' % (needle[-(i+1)], reg)
279 reg = regex.compile (reg+'$')
280 prefix_cache[needle] = reg, len(needle)
281 return reg, len(needle)
282
283def find_prefix_at_end (haystack, needle):
284 reg, length = prefix_regex (needle)
285 lh = len(haystack)
286 result = reg.search (haystack, max(0,lh-length))
287 if result >= 0:
288 return (lh - result)
289 else:
290 return 0