2 # Copyright 2015 Google Inc. All Rights Reserved.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
25 class MockCustomResponseHandler(object):
26 def __init__(self
, response
):
29 response: An instance of ArchivedHttpResponse that is returned for each
32 self
._response
= response
34 def handle(self
, request
):
39 class MockHttpArchiveFetch(object):
41 self
.is_record_mode
= False
43 def __call__(self
, request
):
47 class MockHttpArchiveHandler(httpproxy
.HttpArchiveHandler
):
48 def handle_one_request(self
):
49 httpproxy
.HttpArchiveHandler
.handle_one_request(self
)
50 HttpProxyTest
.HANDLED_REQUEST_COUNT
+= 1
53 class MockRules(object):
54 def Find(self
, unused_rule_type_name
): # pylint: disable=unused-argument
55 return lambda unused_request
, unused_response
: None
58 class HttpProxyTest(unittest
.TestCase
):
60 self
.has_proxy_server_bound_port
= False
61 self
.has_proxy_server_started
= False
63 def set_up_proxy_server(self
, response
):
66 response: An instance of ArchivedHttpResponse that is returned for each
69 HttpProxyTest
.HANDLED_REQUEST_COUNT
= 0
70 self
.host
= 'localhost'
72 custom_handlers
= MockCustomResponseHandler(response
)
74 http_archive_fetch
= MockHttpArchiveFetch()
75 self
.proxy_server
= httpproxy
.HttpProxyServer(
76 http_archive_fetch
, custom_handlers
, rules
,
77 host
=self
.host
, port
=self
.port
)
78 self
.proxy_server
.RequestHandlerClass
= MockHttpArchiveHandler
79 self
.has_proxy_server_bound_port
= True
82 if self
.has_proxy_server_started
:
83 self
.proxy_server
.shutdown()
84 if self
.has_proxy_server_bound_port
:
85 self
.proxy_server
.server_close()
87 def serve_requests_forever(self
):
88 self
.has_proxy_server_started
= True
89 self
.proxy_server
.serve_forever(poll_interval
=0.01)
91 # Tests that handle_one_request does not leak threads, and does not try to
92 # re-handle connections that are finished.
93 def test_handle_one_request_closes_connection(self
):
94 # By default, BaseHTTPServer.py treats all HTTP 1.1 requests as keep-alive.
95 # Intentionally use HTTP 1.0 to prevent this behavior.
96 response
= httparchive
.ArchivedHttpResponse(
97 version
=10, status
=200, reason
="OK",
98 headers
=[], response_data
=["bat1"])
99 self
.set_up_proxy_server(response
)
100 t
= threading
.Thread(
101 target
=HttpProxyTest
.serve_requests_forever
, args
=(self
,))
104 initial_thread_count
= threading
.activeCount()
106 # Make a bunch of requests.
108 for _
in range(request_count
):
109 conn
= httplib
.HTTPConnection('localhost', 8889, timeout
=10)
110 conn
.request("GET", "/index.html")
111 res
= conn
.getresponse().read()
112 self
.assertEqual(res
, "bat1")
115 # Check to make sure that there is no leaked thread.
116 util
.WaitFor(lambda: threading
.activeCount() == initial_thread_count
, 2)
118 self
.assertEqual(request_count
, HttpProxyTest
.HANDLED_REQUEST_COUNT
)
121 # Tests that keep-alive header works.
122 def test_keep_alive_header(self
):
123 response
= httparchive
.ArchivedHttpResponse(
124 version
=11, status
=200, reason
="OK",
125 headers
=[("Connection", "keep-alive")], response_data
=["bat1"])
126 self
.set_up_proxy_server(response
)
127 t
= threading
.Thread(
128 target
=HttpProxyTest
.serve_requests_forever
, args
=(self
,))
131 initial_thread_count
= threading
.activeCount()
133 # Make a bunch of requests.
136 for _
in range(request_count
):
137 conn
= httplib
.HTTPConnection('localhost', 8889, timeout
=10)
138 conn
.request("GET", "/index.html", headers
={"Connection": "keep-alive"})
139 res
= conn
.getresponse().read()
140 self
.assertEqual(res
, "bat1")
141 connections
.append(conn
)
143 # Repeat the same requests.
144 for conn
in connections
:
145 conn
.request("GET", "/index.html", headers
={"Connection": "keep-alive"})
146 res
= conn
.getresponse().read()
147 self
.assertEqual(res
, "bat1")
149 # Check that the right number of requests have been handled.
150 self
.assertEqual(2 * request_count
, HttpProxyTest
.HANDLED_REQUEST_COUNT
)
152 # Check to make sure that exactly "request_count" new threads are active.
154 threading
.activeCount(), initial_thread_count
+ request_count
)
156 for conn
in connections
:
159 util
.WaitFor(lambda: threading
.activeCount() == initial_thread_count
, 1)
161 # Test that opening 400 simultaneous connections does not cause httpproxy to
162 # hit a process fd limit. The default limit is 256 fds.
163 def test_max_fd(self
):
164 response
= httparchive
.ArchivedHttpResponse(
165 version
=11, status
=200, reason
="OK",
166 headers
=[("Connection", "keep-alive")], response_data
=["bat1"])
167 self
.set_up_proxy_server(response
)
168 t
= threading
.Thread(
169 target
=HttpProxyTest
.serve_requests_forever
, args
=(self
,))
172 # Make a bunch of requests.
175 for _
in range(request_count
):
176 conn
= httplib
.HTTPConnection('localhost', 8889, timeout
=10)
177 conn
.request("GET", "/index.html", headers
={"Connection": "keep-alive"})
178 res
= conn
.getresponse().read()
179 self
.assertEqual(res
, "bat1")
180 connections
.append(conn
)
182 # Check that the right number of requests have been handled.
183 self
.assertEqual(request_count
, HttpProxyTest
.HANDLED_REQUEST_COUNT
)
185 for conn
in connections
:
188 if __name__
== '__main__':