Bug 1428709 - Add six for Python 3 compat in mozhttpd; r?davehunt draft
authorRaphael Pierzina <rpierzina@mozilla.com>
Tue, 03 Jul 2018 11:42:29 +0200
changeset 817843 c545c5db861369ecbd5a6648eae0d281762444e0
parent 817336 2e6d6ca5ab87f8127f79fccfee8213315d23c581
child 817844 3330c76a7ff04079bf4af9a4099cbb2401c48b1f
push id116182
push userbmo:rpierzina@mozilla.com
push dateFri, 13 Jul 2018 14:20:25 +0000
reviewersdavehunt
bugs1428709
milestone63.0a1
Bug 1428709 - Add six for Python 3 compat in mozhttpd; r?davehunt MozReview-Commit-ID: 1fNdmG9YVQq
testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py
testing/mozbase/mozhttpd/setup.py
testing/mozbase/mozhttpd/tests/api.py
--- a/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py
+++ b/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py
@@ -1,34 +1,40 @@
 #!/usr/bin/env python
 
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function
 
-import BaseHTTPServer
-import SimpleHTTPServer
+
 import errno
 import logging
 import threading
 import posixpath
 import socket
 import sys
 import os
-import urllib
-import urlparse
 import re
 import moznetwork
 import time
-from SocketServer import ThreadingMixIn
+
+from six import iteritems
+from six.moves.socketserver import ThreadingMixIn
+from six.moves.BaseHTTPServer import HTTPServer
+
+from six.moves.urllib.parse import (
+    urlsplit,
+    unquote,
+)
+from six.moves.SimpleHTTPServer import SimpleHTTPRequestHandler
 
 
-class EasyServer(ThreadingMixIn, BaseHTTPServer.HTTPServer):
+class EasyServer(ThreadingMixIn, HTTPServer):
     allow_reuse_address = True
     acceptable_errors = (errno.EPIPE, errno.ECONNABORTED)
 
     def handle_error(self, request, client_address):
         error = sys.exc_info()[1]
 
         if ((isinstance(error, socket.error) and
              isinstance(error.args, tuple) and
@@ -45,96 +51,96 @@ class Request(object):
     """Details of a request."""
 
     # attributes from urlsplit that this class also sets
     uri_attrs = ('scheme', 'netloc', 'path', 'query', 'fragment')
 
     def __init__(self, uri, headers, rfile=None):
         self.uri = uri
         self.headers = headers
-        parsed = urlparse.urlsplit(uri)
+        parsed = urlsplit(uri)
         for i, attr in enumerate(self.uri_attrs):
             setattr(self, attr, parsed[i])
         try:
             body_len = int(self.headers.get('Content-length', 0))
         except ValueError:
             body_len = 0
         if body_len and rfile:
             self.body = rfile.read(body_len)
         else:
             self.body = None
 
 
-class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
+class RequestHandler(SimpleHTTPRequestHandler):
 
     docroot = os.getcwd()  # current working directory at time of import
     proxy_host_dirs = False
     request_log = []
     log_requests = False
     request = None
 
     def __init__(self, *args, **kwargs):
-        SimpleHTTPServer.SimpleHTTPRequestHandler.__init__(self, *args, **kwargs)
+        SimpleHTTPRequestHandler.__init__(self, *args, **kwargs)
         self.extensions_map['.svg'] = 'image/svg+xml'
 
     def _try_handler(self, method):
         if self.log_requests:
             self.request_log.append({'method': method,
                                      'path': self.request.path,
                                      'time': time.time()})
 
         handlers = [handler for handler in self.urlhandlers
                     if handler['method'] == method]
         for handler in handlers:
             m = re.match(handler['path'], self.request.path)
             if m:
                 (response_code, headerdict, data) = \
                     handler['function'](self.request, *m.groups())
                 self.send_response(response_code)
-                for (keyword, value) in headerdict.iteritems():
+                for (keyword, value) in iteritems(headerdict):
                     self.send_header(keyword, value)
                 self.end_headers()
                 self.wfile.write(data)
 
                 return True
 
         return False
 
     def _find_path(self):
         """Find the on-disk path to serve this request from,
         using self.path_mappings and self.docroot.
         Return (url_path, disk_path)."""
         path_components = filter(None, self.request.path.split('/'))
-        for prefix, disk_path in self.path_mappings.iteritems():
+        for prefix, disk_path in iteritems(self.path_mappings):
             prefix_components = filter(None, prefix.split('/'))
             if len(path_components) < len(prefix_components):
                 continue
             if path_components[:len(prefix_components)] == prefix_components:
                 return ('/'.join(path_components[len(prefix_components):]),
                         disk_path)
         if self.docroot:
             return self.request.path, self.docroot
         return None
 
     def parse_request(self):
-        retval = SimpleHTTPServer.SimpleHTTPRequestHandler.parse_request(self)
+        retval = SimpleHTTPRequestHandler.parse_request(self)
         self.request = Request(self.path, self.headers, self.rfile)
         return retval
 
     def do_GET(self):
         if not self._try_handler('GET'):
             res = self._find_path()
             if res:
                 self.path, self.disk_root = res
                 # don't include query string and fragment, and prepend
                 # host directory if required.
                 if self.request.netloc and self.proxy_host_dirs:
                     self.path = '/' + self.request.netloc + \
                         self.path
-                SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
+                SimpleHTTPRequestHandler.do_GET(self)
             else:
                 self.send_response(404)
                 self.end_headers()
                 self.wfile.write('')
 
     def do_POST(self):
         # if we don't have a match, we always fall through to 404 (this may
         # not be "technically" correct if we have a local file at the same
@@ -153,17 +159,17 @@ class RequestHandler(SimpleHTTPServer.Si
             self.end_headers()
             self.wfile.write('')
 
     def translate_path(self, path):
         # this is taken from SimpleHTTPRequestHandler.translate_path(),
         # except we serve from self.docroot instead of os.getcwd(), and
         # parse_request()/do_GET() have already stripped the query string and
         # fragment and mangled the path for proxying, if required.
-        path = posixpath.normpath(urllib.unquote(self.path))
+        path = posixpath.normpath(unquote(self.path))
         words = path.split('/')
         words = filter(None, words)
         path = self.disk_root
         for word in words:
             drive, word = os.path.splitdrive(word)
             head, word = os.path.split(word)
             if word in (os.curdir, os.pardir):
                 continue
--- a/testing/mozbase/mozhttpd/setup.py
+++ b/testing/mozbase/mozhttpd/setup.py
@@ -2,17 +2,17 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import
 
 from setuptools import setup
 
 PACKAGE_VERSION = '0.7'
-deps = ['moznetwork >= 0.24']
+deps = ['moznetwork >= 0.24', 'mozinfo >= 1.0.0', 'six >= 1.10.0']
 
 setup(name='mozhttpd',
       version=PACKAGE_VERSION,
       description="Python webserver intended for use with Mozilla testing",
       long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html",
       classifiers=['Programming Language :: Python :: 2.7',
                    'Programming Language :: Python :: 2 :: Only'],
       # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
--- a/testing/mozbase/mozhttpd/tests/api.py
+++ b/testing/mozbase/mozhttpd/tests/api.py
@@ -3,24 +3,33 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import
 
 import mozfile
 import mozhttpd
-import urllib2
 import os
 import unittest
 import json
 import tempfile
 
 import mozunit
 
+from six.moves.urllib.request import (
+    HTTPHandler,
+    ProxyHandler,
+    Request,
+    build_opener,
+    install_opener,
+    urlopen,
+)
+from six.moves.urllib.error import HTTPError
+
 here = os.path.dirname(os.path.abspath(__file__))
 
 
 class ApiTest(unittest.TestCase):
     resource_get_called = 0
     resource_post_called = 0
     resource_del_called = 0
 
@@ -49,48 +58,50 @@ class ApiTest(unittest.TestCase):
         url = "http://127.0.0.1:%s%s" % (server_port, path)
         if querystr:
             url += "?%s" % querystr
         return url
 
     def try_get(self, server_port, querystr):
         self.resource_get_called = 0
 
-        f = urllib2.urlopen(self.get_url('/api/resource/1', server_port, querystr))
+        f = urlopen(self.get_url('/api/resource/1', server_port, querystr))
         try:
             self.assertEqual(f.getcode(), 200)
         except AttributeError:
             pass  # python 2.4
         self.assertEqual(json.loads(f.read()), {'called': 1, 'id': str(1), 'query': querystr})
         self.assertEqual(self.resource_get_called, 1)
 
     def try_post(self, server_port, querystr):
         self.resource_post_called = 0
 
         postdata = {'hamburgers': '1234'}
         try:
-            f = urllib2.urlopen(self.get_url('/api/resource/', server_port, querystr),
-                                data=json.dumps(postdata))
-        except urllib2.HTTPError as e:
+            f = urlopen(
+                self.get_url('/api/resource/', server_port, querystr),
+                data=json.dumps(postdata),
+            )
+        except HTTPError as e:
             # python 2.4
             self.assertEqual(e.code, 201)
             body = e.fp.read()
         else:
             self.assertEqual(f.getcode(), 201)
             body = f.read()
         self.assertEqual(json.loads(body), {'called': 1,
                                             'data': postdata,
                                             'query': querystr})
         self.assertEqual(self.resource_post_called, 1)
 
     def try_del(self, server_port, querystr):
         self.resource_del_called = 0
 
-        opener = urllib2.build_opener(urllib2.HTTPHandler)
-        request = urllib2.Request(self.get_url('/api/resource/1', server_port, querystr))
+        opener = build_opener(HTTPHandler)
+        request = Request(self.get_url('/api/resource/1', server_port, querystr))
         request.get_method = lambda: 'DEL'
         f = opener.open(request)
 
         try:
             self.assertEqual(f.getcode(), 200)
         except AttributeError:
             pass  # python 2.4
         self.assertEqual(json.loads(f.read()), {'called': 1, 'id': str(1), 'query': querystr})
@@ -122,149 +133,152 @@ class ApiTest(unittest.TestCase):
 
         # DEL
         self.try_del(server_port, '')
         self.try_del(server_port, '?foo=bar')
 
         # GET: By default we don't serve any files if we just define an API
         exception_thrown = False
         try:
-            urllib2.urlopen(self.get_url('/', server_port, None))
-        except urllib2.HTTPError as e:
+            urlopen(self.get_url('/', server_port, None))
+        except HTTPError as e:
             self.assertEqual(e.code, 404)
             exception_thrown = True
         self.assertTrue(exception_thrown)
 
     def test_nonexistent_resources(self):
         # Create a server with a placeholder handler so we don't fall back
         # to serving local files
         httpd = mozhttpd.MozHttpd(port=0)
         httpd.start(block=False)
         server_port = httpd.httpd.server_port
 
         # GET: Return 404 for non-existent endpoint
         exception_thrown = False
         try:
-            urllib2.urlopen(self.get_url('/api/resource/', server_port, None))
-        except urllib2.HTTPError as e:
+            urlopen(self.get_url('/api/resource/', server_port, None))
+        except HTTPError as e:
             self.assertEqual(e.code, 404)
             exception_thrown = True
         self.assertTrue(exception_thrown)
 
         # POST: POST should also return 404
         exception_thrown = False
         try:
-            urllib2.urlopen(self.get_url('/api/resource/', server_port, None),
-                            data=json.dumps({}))
-        except urllib2.HTTPError as e:
+            urlopen(
+                self.get_url('/api/resource/', server_port, None),
+                data=json.dumps({}),
+            )
+        except HTTPError as e:
             self.assertEqual(e.code, 404)
             exception_thrown = True
         self.assertTrue(exception_thrown)
 
         # DEL: DEL should also return 404
         exception_thrown = False
         try:
-            opener = urllib2.build_opener(urllib2.HTTPHandler)
-            request = urllib2.Request(self.get_url('/api/resource/', server_port,
-                                                   None))
+            opener = build_opener(HTTPHandler)
+            request = Request(self.get_url('/api/resource/', server_port, None))
             request.get_method = lambda: 'DEL'
             opener.open(request)
-        except urllib2.HTTPError:
+        except HTTPError:
             self.assertEqual(e.code, 404)
             exception_thrown = True
         self.assertTrue(exception_thrown)
 
     def test_api_with_docroot(self):
         httpd = mozhttpd.MozHttpd(port=0, docroot=here,
                                   urlhandlers=[{'method': 'GET',
                                                 'path': '/api/resource/([^/]+)/?',
                                                 'function': self.resource_get}])
         httpd.start(block=False)
         server_port = httpd.httpd.server_port
 
         # We defined a docroot, so we expect a directory listing
-        f = urllib2.urlopen(self.get_url('/', server_port, None))
+        f = urlopen(self.get_url('/', server_port, None))
         try:
             self.assertEqual(f.getcode(), 200)
         except AttributeError:
             pass  # python 2.4
         self.assertTrue('Directory listing for' in f.read())
 
         # Make sure API methods still work
         self.try_get(server_port, '')
         self.try_get(server_port, '?foo=bar')
 
 
 class ProxyTest(unittest.TestCase):
 
     def tearDown(self):
         # reset proxy opener in case it changed
-        urllib2.install_opener(None)
+        install_opener(None)
 
     def test_proxy(self):
         docroot = tempfile.mkdtemp()
         self.addCleanup(mozfile.remove, docroot)
         hosts = ('mozilla.com', 'mozilla.org')
         unproxied_host = 'notmozilla.org'
 
         def url(host): return 'http://%s/' % host
 
         index_filename = 'index.html'
 
         def index_contents(host): return '%s index' % host
 
-        index = file(os.path.join(docroot, index_filename), 'w')
+        index = open(os.path.join(docroot, index_filename), 'w')
         index.write(index_contents('*'))
         index.close()
 
         httpd = mozhttpd.MozHttpd(port=0, docroot=docroot)
         httpd.start(block=False)
         server_port = httpd.httpd.server_port
 
-        proxy_support = urllib2.ProxyHandler({'http': 'http://127.0.0.1:%d' %
-                                              server_port})
-        urllib2.install_opener(urllib2.build_opener(proxy_support))
+        proxy_support = ProxyHandler({
+            'http': 'http://127.0.0.1:%d' % server_port,
+        })
+        install_opener(build_opener(proxy_support))
 
         for host in hosts:
-            f = urllib2.urlopen(url(host))
+            f = urlopen(url(host))
             try:
                 self.assertEqual(f.getcode(), 200)
             except AttributeError:
                 pass  # python 2.4
             self.assertEqual(f.read(), index_contents('*'))
 
         httpd.stop()
 
         # test separate directories per host
 
         httpd = mozhttpd.MozHttpd(port=0, docroot=docroot, proxy_host_dirs=True)
         httpd.start(block=False)
         server_port = httpd.httpd.server_port
 
-        proxy_support = urllib2.ProxyHandler({'http': 'http://127.0.0.1:%d' %
-                                              server_port})
-        urllib2.install_opener(urllib2.build_opener(proxy_support))
+        proxy_support = ProxyHandler({
+            'http': 'http://127.0.0.1:%d' % server_port,
+        })
+        install_opener(build_opener(proxy_support))
 
         # set up dirs
         for host in hosts:
             os.mkdir(os.path.join(docroot, host))
-            file(os.path.join(docroot, host, index_filename), 'w') \
+            open(os.path.join(docroot, host, index_filename), 'w') \
                 .write(index_contents(host))
 
         for host in hosts:
-            f = urllib2.urlopen(url(host))
+            f = urlopen(url(host))
             try:
                 self.assertEqual(f.getcode(), 200)
             except AttributeError:
                 pass  # python 2.4
             self.assertEqual(f.read(), index_contents(host))
 
         exc = None
         try:
-            urllib2.urlopen(url(unproxied_host))
-        except urllib2.HTTPError as e:
+            urlopen(url(unproxied_host))
+        except HTTPError as e:
             exc = e
         self.assertNotEqual(exc, None)
         self.assertEqual(exc.code, 404)
 
 
 if __name__ == '__main__':
     mozunit.main()