httpproxy_test.py 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243
  1. #!/usr/bin/env python
  2. # Copyright 2015 Google Inc. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. import httparchive
  16. import httplib
  17. import httpproxy
  18. import threading
  19. import unittest
  20. import util
  21. class MockCustomResponseHandler(object):
  22. def __init__(self, response):
  23. """
  24. Args:
  25. response: An instance of ArchivedHttpResponse that is returned for each
  26. request.
  27. """
  28. self._response = response
  29. def handle(self, request):
  30. del request
  31. return self._response
  32. class MockHttpArchiveFetch(object):
  33. def __init__(self, response):
  34. """
  35. Args:
  36. response: An instance of ArchivedHttpResponse that is returned for each
  37. request.
  38. """
  39. self.is_record_mode = False
  40. self._response = response
  41. def __call__(self, request):
  42. del request # unused
  43. return self._response
  44. class MockHttpArchiveHandler(httpproxy.HttpArchiveHandler):
  45. def handle_one_request(self):
  46. httpproxy.HttpArchiveHandler.handle_one_request(self)
  47. HttpProxyTest.HANDLED_REQUEST_COUNT += 1
  48. class MockRules(object):
  49. def Find(self, unused_rule_type_name): # pylint: disable=unused-argument
  50. return lambda unused_request, unused_response: None
  51. class HttpProxyTest(unittest.TestCase):
  52. def setUp(self):
  53. self.has_proxy_server_bound_port = False
  54. self.has_proxy_server_started = False
  55. self.allow_generate_304 = False
  56. self.serve_response_by_http_archive = False
  57. def set_up_proxy_server(self, response):
  58. """
  59. Args:
  60. response: An instance of ArchivedHttpResponse that is returned for each
  61. request.
  62. """
  63. HttpProxyTest.HANDLED_REQUEST_COUNT = 0
  64. self.host = 'localhost'
  65. self.port = 8889
  66. custom_handlers = MockCustomResponseHandler(
  67. response if not self.serve_response_by_http_archive else None)
  68. rules = MockRules()
  69. http_archive_fetch = MockHttpArchiveFetch(
  70. response if self.serve_response_by_http_archive else None)
  71. self.proxy_server = httpproxy.HttpProxyServer(
  72. http_archive_fetch, custom_handlers, rules,
  73. host=self.host, port=self.port,
  74. allow_generate_304=self.allow_generate_304)
  75. self.proxy_server.RequestHandlerClass = MockHttpArchiveHandler
  76. self.has_proxy_server_bound_port = True
  77. def tear_down_proxy_server(self):
  78. if self.has_proxy_server_started:
  79. self.proxy_server.shutdown()
  80. if self.has_proxy_server_bound_port:
  81. self.proxy_server.server_close()
  82. def tearDown(self):
  83. self.tear_down_proxy_server()
  84. def serve_requests_forever(self):
  85. self.has_proxy_server_started = True
  86. self.proxy_server.serve_forever(poll_interval=0.01)
  87. # Tests that handle_one_request does not leak threads, and does not try to
  88. # re-handle connections that are finished.
  89. def test_handle_one_request_closes_connection(self):
  90. # By default, BaseHTTPServer.py treats all HTTP 1.1 requests as keep-alive.
  91. # Intentionally use HTTP 1.0 to prevent this behavior.
  92. response = httparchive.ArchivedHttpResponse(
  93. version=10, status=200, reason="OK",
  94. headers=[], response_data=["bat1"])
  95. self.set_up_proxy_server(response)
  96. t = threading.Thread(
  97. target=HttpProxyTest.serve_requests_forever, args=(self,))
  98. t.start()
  99. initial_thread_count = threading.activeCount()
  100. # Make a bunch of requests.
  101. request_count = 10
  102. for _ in range(request_count):
  103. conn = httplib.HTTPConnection('localhost', 8889, timeout=10)
  104. conn.request("GET", "/index.html")
  105. res = conn.getresponse().read()
  106. self.assertEqual(res, "bat1")
  107. conn.close()
  108. # Check to make sure that there is no leaked thread.
  109. util.WaitFor(lambda: threading.activeCount() == initial_thread_count, 2)
  110. self.assertEqual(request_count, HttpProxyTest.HANDLED_REQUEST_COUNT)
  111. # Tests that keep-alive header works.
  112. def test_keep_alive_header(self):
  113. response = httparchive.ArchivedHttpResponse(
  114. version=11, status=200, reason="OK",
  115. headers=[("Connection", "keep-alive")], response_data=["bat1"])
  116. self.set_up_proxy_server(response)
  117. t = threading.Thread(
  118. target=HttpProxyTest.serve_requests_forever, args=(self,))
  119. t.start()
  120. initial_thread_count = threading.activeCount()
  121. # Make a bunch of requests.
  122. request_count = 10
  123. connections = []
  124. for _ in range(request_count):
  125. conn = httplib.HTTPConnection('localhost', 8889, timeout=10)
  126. conn.request("GET", "/index.html", headers={"Connection": "keep-alive"})
  127. res = conn.getresponse().read()
  128. self.assertEqual(res, "bat1")
  129. connections.append(conn)
  130. # Repeat the same requests.
  131. for conn in connections:
  132. conn.request("GET", "/index.html", headers={"Connection": "keep-alive"})
  133. res = conn.getresponse().read()
  134. self.assertEqual(res, "bat1")
  135. # Check that the right number of requests have been handled.
  136. self.assertEqual(2 * request_count, HttpProxyTest.HANDLED_REQUEST_COUNT)
  137. # Check to make sure that exactly "request_count" new threads are active.
  138. self.assertEqual(
  139. threading.activeCount(), initial_thread_count + request_count)
  140. for conn in connections:
  141. conn.close()
  142. util.WaitFor(lambda: threading.activeCount() == initial_thread_count, 1)
  143. # Test that opening 400 simultaneous connections does not cause httpproxy to
  144. # hit a process fd limit. The default limit is 256 fds.
  145. def test_max_fd(self):
  146. response = httparchive.ArchivedHttpResponse(
  147. version=11, status=200, reason="OK",
  148. headers=[("Connection", "keep-alive")], response_data=["bat1"])
  149. self.set_up_proxy_server(response)
  150. t = threading.Thread(
  151. target=HttpProxyTest.serve_requests_forever, args=(self,))
  152. t.start()
  153. # Make a bunch of requests.
  154. request_count = 400
  155. connections = []
  156. for _ in range(request_count):
  157. conn = httplib.HTTPConnection('localhost', 8889, timeout=10)
  158. conn.request("GET", "/index.html", headers={"Connection": "keep-alive"})
  159. res = conn.getresponse().read()
  160. self.assertEqual(res, "bat1")
  161. connections.append(conn)
  162. # Check that the right number of requests have been handled.
  163. self.assertEqual(request_count, HttpProxyTest.HANDLED_REQUEST_COUNT)
  164. for conn in connections:
  165. conn.close()
  166. # Tests that conditional requests return 304.
  167. def test_generate_304(self):
  168. REQUEST_HEADERS = [
  169. {},
  170. {'If-Modified-Since': 'whatever'},
  171. {'If-None-Match': 'whatever yet again'}]
  172. RESPONSE_STATUSES = [200, 204, 304, 404]
  173. for allow_generate_304 in [False, True]:
  174. self.allow_generate_304 = allow_generate_304
  175. for serve_response_by_http_archive in [False, True]:
  176. self.serve_response_by_http_archive = serve_response_by_http_archive
  177. for response_status in RESPONSE_STATUSES:
  178. response = None
  179. if response_status != 404:
  180. response = httparchive.ArchivedHttpResponse(
  181. version=11, status=response_status, reason="OK", headers=[],
  182. response_data=["some content"])
  183. self.set_up_proxy_server(response)
  184. t = threading.Thread(
  185. target=HttpProxyTest.serve_requests_forever, args=(self,))
  186. t.start()
  187. for method in ['GET', 'HEAD', 'POST']:
  188. for headers in REQUEST_HEADERS:
  189. connection = httplib.HTTPConnection('localhost', 8889, timeout=10)
  190. connection.request(method, "/index.html", headers=headers)
  191. response = connection.getresponse()
  192. connection.close()
  193. if (allow_generate_304 and
  194. serve_response_by_http_archive and
  195. method in ['GET', 'HEAD'] and
  196. headers and
  197. response_status == 200):
  198. self.assertEqual(304, response.status)
  199. self.assertEqual('', response.read())
  200. else:
  201. self.assertEqual(response_status, response.status)
  202. self.tear_down_proxy_server()
  203. if __name__ == '__main__':
  204. unittest.main()