Line 0
Link Here
|
|
|
1 |
#!/usr/bin/python |
2 |
|
3 |
# Copyright 2012 Google Inc. All Rights Reserved. |
4 |
# |
5 |
# Licensed under the Apache License, Version 2.0 (the "License"); |
6 |
# you may not use this file except in compliance with the License. |
7 |
# You may obtain a copy of the License at |
8 |
# |
9 |
# http://www.apache.org/licenses/LICENSE-2.0 |
10 |
# |
11 |
# Unless required by applicable law or agreed to in writing, software |
12 |
# distributed under the License is distributed on an "AS IS" BASIS, |
13 |
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
14 |
# See the License for the specific language governing permissions and |
15 |
# limitations under the License. |
16 |
# |
17 |
# Modified by Linus Nielsen Feltzing for inclusion in the libcurl test |
18 |
# framework |
19 |
# |
20 |
import SocketServer |
21 |
import argparse |
22 |
import re |
23 |
import select |
24 |
import socket |
25 |
import time |
26 |
import pprint |
27 |
import os |
28 |
|
29 |
INFO_MESSAGE = ''' |
30 |
This is a test server to test the libcurl pipelining functionality. |
31 |
It is a modified version if Google's HTTP pipelining test server. More |
32 |
information can be found here: |
33 |
|
34 |
http://dev.chromium.org/developers/design-documents/network-stack/http-pipelining |
35 |
|
36 |
Source code can be found here: |
37 |
|
38 |
http://code.google.com/p/http-pipelining-test/ |
39 |
''' |
40 |
MAX_REQUEST_SIZE = 1024 # bytes |
41 |
MIN_POLL_TIME = 0.01 # seconds. Minimum time to poll, in order to prevent |
42 |
# excessive looping because Python refuses to poll for |
43 |
# small timeouts. |
44 |
SEND_BUFFER_TIME = 0.5 # seconds |
45 |
TIMEOUT = 30 # seconds |
46 |
|
47 |
|
48 |
class Error(Exception): |
49 |
pass |
50 |
|
51 |
|
52 |
class RequestTooLargeError(Error): |
53 |
pass |
54 |
|
55 |
|
56 |
class ServeIndexError(Error): |
57 |
pass |
58 |
|
59 |
|
60 |
class UnexpectedMethodError(Error): |
61 |
pass |
62 |
|
63 |
|
64 |
class RequestParser(object): |
65 |
"""Parses an input buffer looking for HTTP GET requests.""" |
66 |
|
67 |
global logfile |
68 |
|
69 |
LOOKING_FOR_GET = 1 |
70 |
READING_HEADERS = 2 |
71 |
|
72 |
HEADER_RE = re.compile('([^:]+):(.*)\n') |
73 |
REQUEST_RE = re.compile('([^ ]+) ([^ ]+) HTTP/(\d+)\.(\d+)\n') |
74 |
|
75 |
def __init__(self): |
76 |
"""Initializer.""" |
77 |
self._buffer = "" |
78 |
self._pending_headers = {} |
79 |
self._pending_request = "" |
80 |
self._state = self.LOOKING_FOR_GET |
81 |
self._were_all_requests_http_1_1 = True |
82 |
self._valid_requests = [] |
83 |
|
84 |
def ParseAdditionalData(self, data): |
85 |
"""Finds HTTP requests in |data|. |
86 |
|
87 |
Args: |
88 |
data: (String) Newly received input data from the socket. |
89 |
|
90 |
Returns: |
91 |
(List of Tuples) |
92 |
(String) The request path. |
93 |
(Map of String to String) The header name and value. |
94 |
|
95 |
Raises: |
96 |
RequestTooLargeError: If the request exceeds MAX_REQUEST_SIZE. |
97 |
UnexpectedMethodError: On a non-GET method. |
98 |
Error: On a programming error. |
99 |
""" |
100 |
logfile = open('log/server.input', 'a') |
101 |
logfile.write(data) |
102 |
logfile.close() |
103 |
self._buffer += data.replace('\r', '') |
104 |
should_continue_parsing = True |
105 |
while should_continue_parsing: |
106 |
if self._state == self.LOOKING_FOR_GET: |
107 |
should_continue_parsing = self._DoLookForGet() |
108 |
elif self._state == self.READING_HEADERS: |
109 |
should_continue_parsing = self._DoReadHeader() |
110 |
else: |
111 |
raise Error('Unexpected state: ' + self._state) |
112 |
if len(self._buffer) > MAX_REQUEST_SIZE: |
113 |
raise RequestTooLargeError( |
114 |
'Request is at least %d bytes' % len(self._buffer)) |
115 |
valid_requests = self._valid_requests |
116 |
self._valid_requests = [] |
117 |
return valid_requests |
118 |
|
119 |
@property |
120 |
def were_all_requests_http_1_1(self): |
121 |
return self._were_all_requests_http_1_1 |
122 |
|
123 |
def _DoLookForGet(self): |
124 |
"""Tries to parse an HTTTP request line. |
125 |
|
126 |
Returns: |
127 |
(Boolean) True if a request was found. |
128 |
|
129 |
Raises: |
130 |
UnexpectedMethodError: On a non-GET method. |
131 |
""" |
132 |
m = self.REQUEST_RE.match(self._buffer) |
133 |
if not m: |
134 |
return False |
135 |
method, path, http_major, http_minor = m.groups() |
136 |
|
137 |
if method != 'GET': |
138 |
raise UnexpectedMethodError('Unexpected method: ' + method) |
139 |
if path in ['/', '/index.htm', '/index.html']: |
140 |
raise ServeIndexError() |
141 |
|
142 |
if http_major != '1' or http_minor != '1': |
143 |
self._were_all_requests_http_1_1 = False |
144 |
|
145 |
# print method, path |
146 |
|
147 |
self._pending_request = path |
148 |
self._buffer = self._buffer[m.end():] |
149 |
self._state = self.READING_HEADERS |
150 |
return True |
151 |
|
152 |
def _DoReadHeader(self): |
153 |
"""Tries to parse a HTTP header. |
154 |
|
155 |
Returns: |
156 |
(Boolean) True if it found the end of the request or a HTTP header. |
157 |
""" |
158 |
if self._buffer.startswith('\n'): |
159 |
self._buffer = self._buffer[1:] |
160 |
self._state = self.LOOKING_FOR_GET |
161 |
self._valid_requests.append((self._pending_request, |
162 |
self._pending_headers)) |
163 |
self._pending_headers = {} |
164 |
self._pending_request = "" |
165 |
return True |
166 |
|
167 |
m = self.HEADER_RE.match(self._buffer) |
168 |
if not m: |
169 |
return False |
170 |
|
171 |
header = m.group(1).lower() |
172 |
value = m.group(2).strip().lower() |
173 |
if header not in self._pending_headers: |
174 |
self._pending_headers[header] = value |
175 |
self._buffer = self._buffer[m.end():] |
176 |
return True |
177 |
|
178 |
|
179 |
class ResponseBuilder(object): |
180 |
"""Builds HTTP responses for a list of accumulated requests.""" |
181 |
|
182 |
def __init__(self): |
183 |
"""Initializer.""" |
184 |
self._max_pipeline_depth = 0 |
185 |
self._requested_paths = [] |
186 |
self._processed_end = False |
187 |
self._were_all_requests_http_1_1 = True |
188 |
|
189 |
def QueueRequests(self, requested_paths, were_all_requests_http_1_1): |
190 |
"""Adds requests to the queue of requests. |
191 |
|
192 |
Args: |
193 |
requested_paths: (List of Strings) Requested paths. |
194 |
""" |
195 |
self._requested_paths.extend(requested_paths) |
196 |
self._were_all_requests_http_1_1 = were_all_requests_http_1_1 |
197 |
|
198 |
def Chunkify(self, data, chunksize): |
199 |
""" Divides a string into chunks |
200 |
""" |
201 |
return [hex(chunksize)[2:] + "\r\n" + data[i:i+chunksize] + "\r\n" for i in range(0, len(data), chunksize)] |
202 |
|
203 |
def BuildResponses(self): |
204 |
"""Converts the queue of requests into responses. |
205 |
|
206 |
Returns: |
207 |
(String) Buffer containing all of the responses. |
208 |
""" |
209 |
result = "" |
210 |
self._max_pipeline_depth = max(self._max_pipeline_depth, |
211 |
len(self._requested_paths)) |
212 |
for path, headers in self._requested_paths: |
213 |
if path == '/verifiedserver': |
214 |
body = "WE ROOLZ: {}\r\n".format(os.getpid()); |
215 |
result += self._BuildResponse( |
216 |
'200 OK', ['Server: Apache', |
217 |
'Content-Length: {}'.format(len(body)), |
218 |
'Cache-Control: no-store'], body) |
219 |
|
220 |
elif path == '/alphabet.txt': |
221 |
body = 'abcdefghijklmnopqrstuvwxyz' |
222 |
result += self._BuildResponse( |
223 |
'200 OK', ['Server: Apache', |
224 |
'Content-Length: 26', |
225 |
'Cache-Control: no-store'], body) |
226 |
|
227 |
elif path == '/reverse.txt': |
228 |
body = 'zyxwvutsrqponmlkjihgfedcba' |
229 |
result += self._BuildResponse( |
230 |
'200 OK', ['Content-Length: 26', 'Cache-Control: no-store'], body) |
231 |
|
232 |
elif path == '/chunked.txt': |
233 |
body = ('7\r\nchunked\r\n' |
234 |
'8\r\nencoding\r\n' |
235 |
'2\r\nis\r\n' |
236 |
'3\r\nfun\r\n' |
237 |
'0\r\n\r\n') |
238 |
result += self._BuildResponse( |
239 |
'200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'], |
240 |
body) |
241 |
|
242 |
elif path == '/cached.txt': |
243 |
body = 'azbycxdwevfugthsirjqkplomn' |
244 |
result += self._BuildResponse( |
245 |
'200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60'], body) |
246 |
|
247 |
elif path == '/connection_close.txt': |
248 |
body = 'azbycxdwevfugthsirjqkplomn' |
249 |
result += self._BuildResponse( |
250 |
'200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60', 'Connection: close'], body) |
251 |
self._processed_end = True |
252 |
|
253 |
elif path == '/1k.txt': |
254 |
str = '0123456789abcdef' |
255 |
body = ''.join([str for num in xrange(64)]) |
256 |
result += self._BuildResponse( |
257 |
'200 OK', ['Server: Apache', |
258 |
'Content-Length: 1024', |
259 |
'Cache-Control: max-age=60'], body) |
260 |
|
261 |
elif path == '/10k.txt': |
262 |
str = '0123456789abcdef' |
263 |
body = ''.join([str for num in xrange(640)]) |
264 |
result += self._BuildResponse( |
265 |
'200 OK', ['Server: Apache', |
266 |
'Content-Length: 10240', |
267 |
'Cache-Control: max-age=60'], body) |
268 |
|
269 |
elif path == '/100k.txt': |
270 |
str = '0123456789abcdef' |
271 |
body = ''.join([str for num in xrange(6400)]) |
272 |
result += self._BuildResponse( |
273 |
'200 OK', |
274 |
['Server: Apache', |
275 |
'Content-Length: 102400', |
276 |
'Cache-Control: max-age=60'], |
277 |
body) |
278 |
|
279 |
elif path == '/100k_chunked.txt': |
280 |
str = '0123456789abcdef' |
281 |
moo = ''.join([str for num in xrange(6400)]) |
282 |
body = self.Chunkify(moo, 20480) |
283 |
body.append('0\r\n\r\n') |
284 |
body = ''.join(body) |
285 |
|
286 |
result += self._BuildResponse( |
287 |
'200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'], body) |
288 |
|
289 |
elif path == '/stats.txt': |
290 |
results = { |
291 |
'max_pipeline_depth': self._max_pipeline_depth, |
292 |
'were_all_requests_http_1_1': int(self._were_all_requests_http_1_1), |
293 |
} |
294 |
body = ','.join(['%s:%s' % (k, v) for k, v in results.items()]) |
295 |
result += self._BuildResponse( |
296 |
'200 OK', |
297 |
['Content-Length: %s' % len(body), 'Cache-Control: no-store'], body) |
298 |
self._processed_end = True |
299 |
|
300 |
else: |
301 |
result += self._BuildResponse('404 Not Found', ['Content-Length: 7'], 'Go away') |
302 |
if self._processed_end: |
303 |
break |
304 |
self._requested_paths = [] |
305 |
return result |
306 |
|
307 |
def WriteError(self, status, error): |
308 |
"""Returns an HTTP response for the specified error. |
309 |
|
310 |
Args: |
311 |
status: (String) Response code and descrtion (e.g. "404 Not Found") |
312 |
|
313 |
Returns: |
314 |
(String) Text of HTTP response. |
315 |
""" |
316 |
return self._BuildResponse( |
317 |
status, ['Connection: close', 'Content-Type: text/plain'], error) |
318 |
|
319 |
@property |
320 |
def processed_end(self): |
321 |
return self._processed_end |
322 |
|
323 |
def _BuildResponse(self, status, headers, body): |
324 |
"""Builds an HTTP response. |
325 |
|
326 |
Args: |
327 |
status: (String) Response code and descrtion (e.g. "200 OK") |
328 |
headers: (List of Strings) Headers (e.g. "Connection: close") |
329 |
body: (String) Response body. |
330 |
|
331 |
Returns: |
332 |
(String) Text of HTTP response. |
333 |
""" |
334 |
return ('HTTP/1.1 %s\r\n' |
335 |
'%s\r\n' |
336 |
'\r\n' |
337 |
'%s' % (status, '\r\n'.join(headers), body)) |
338 |
|
339 |
|
340 |
class PipelineRequestHandler(SocketServer.BaseRequestHandler): |
341 |
"""Called on an incoming TCP connection.""" |
342 |
|
343 |
def _GetTimeUntilTimeout(self): |
344 |
return self._start_time + TIMEOUT - time.time() |
345 |
|
346 |
def _GetTimeUntilNextSend(self): |
347 |
if not self._last_queued_time: |
348 |
return TIMEOUT |
349 |
return self._last_queued_time + SEND_BUFFER_TIME - time.time() |
350 |
|
351 |
def handle(self): |
352 |
self._request_parser = RequestParser() |
353 |
self._response_builder = ResponseBuilder() |
354 |
self._last_queued_time = 0 |
355 |
self._num_queued = 0 |
356 |
self._num_written = 0 |
357 |
self._send_buffer = "" |
358 |
self._start_time = time.time() |
359 |
try: |
360 |
poller = select.epoll(sizehint=1) |
361 |
poller.register(self.request.fileno(), select.EPOLLIN) |
362 |
while not self._response_builder.processed_end or self._send_buffer: |
363 |
|
364 |
time_left = self._GetTimeUntilTimeout() |
365 |
time_until_next_send = self._GetTimeUntilNextSend() |
366 |
max_poll_time = min(time_left, time_until_next_send) + MIN_POLL_TIME |
367 |
|
368 |
events = None |
369 |
if max_poll_time > 0: |
370 |
if self._send_buffer: |
371 |
poller.modify(self.request.fileno(), |
372 |
select.EPOLLIN | select.EPOLLOUT) |
373 |
else: |
374 |
poller.modify(self.request.fileno(), select.EPOLLIN) |
375 |
events = poller.poll(timeout=max_poll_time) |
376 |
|
377 |
if self._GetTimeUntilTimeout() <= 0: |
378 |
return |
379 |
|
380 |
if self._GetTimeUntilNextSend() <= 0: |
381 |
self._send_buffer += self._response_builder.BuildResponses() |
382 |
self._num_written = self._num_queued |
383 |
self._last_queued_time = 0 |
384 |
|
385 |
for fd, mode in events: |
386 |
if mode & select.EPOLLIN: |
387 |
new_data = self.request.recv(MAX_REQUEST_SIZE, socket.MSG_DONTWAIT) |
388 |
if not new_data: |
389 |
return |
390 |
new_requests = self._request_parser.ParseAdditionalData(new_data) |
391 |
self._response_builder.QueueRequests( |
392 |
new_requests, self._request_parser.were_all_requests_http_1_1) |
393 |
self._num_queued += len(new_requests) |
394 |
self._last_queued_time = time.time() |
395 |
elif mode & select.EPOLLOUT: |
396 |
num_bytes_sent = self.request.send(self._send_buffer[0:4096]) |
397 |
self._send_buffer = self._send_buffer[num_bytes_sent:] |
398 |
time.sleep(0.05) |
399 |
else: |
400 |
return |
401 |
|
402 |
except RequestTooLargeError as e: |
403 |
self.request.send(self._response_builder.WriteError( |
404 |
'413 Request Entity Too Large', e)) |
405 |
raise |
406 |
except UnexpectedMethodError as e: |
407 |
self.request.send(self._response_builder.WriteError( |
408 |
'405 Method Not Allowed', e)) |
409 |
raise |
410 |
except ServeIndexError: |
411 |
self.request.send(self._response_builder.WriteError( |
412 |
'200 OK', INFO_MESSAGE)) |
413 |
except Exception as e: |
414 |
print e |
415 |
self.request.close() |
416 |
|
417 |
|
418 |
class PipelineServer(SocketServer.ForkingMixIn, SocketServer.TCPServer): |
419 |
pass |
420 |
|
421 |
|
422 |
parser = argparse.ArgumentParser() |
423 |
parser.add_argument("--port", action="store", default=0, |
424 |
type=int, help="port to listen on") |
425 |
parser.add_argument("--verbose", action="store", default=0, |
426 |
type=int, help="verbose output") |
427 |
parser.add_argument("--pidfile", action="store", default=0, |
428 |
help="file name for the PID") |
429 |
parser.add_argument("--logfile", action="store", default=0, |
430 |
help="file name for the log") |
431 |
parser.add_argument("--srcdir", action="store", default=0, |
432 |
help="test directory") |
433 |
parser.add_argument("--id", action="store", default=0, |
434 |
help="server ID") |
435 |
parser.add_argument("--ipv4", action="store_true", default=0, |
436 |
help="IPv4 flag") |
437 |
args = parser.parse_args() |
438 |
|
439 |
if args.pidfile: |
440 |
pid = os.getpid() |
441 |
f = open(args.pidfile, 'w') |
442 |
f.write('{}'.format(pid)) |
443 |
f.close() |
444 |
|
445 |
server = PipelineServer(('0.0.0.0', args.port), PipelineRequestHandler) |
446 |
server.allow_reuse_address = True |
447 |
server.serve_forever() |