Reland "httplib: Update to 0.11.3"

This is a reland of dd0c341bd2

The problem is that chromium-review.googlesource.com started requiring SNI.
httplib2 0.9.2 does not support that, but it seems to have been fixed since 0.10.1:

8aaecf4496


Original change's description:
> httplib: Update to 0.11.3
>
> The `DST Root CA X3` certificate was missing from cacerts.txt in the old
> version. This resulted in failure to connect to Let's Encrypt hosts.
> e.g., https://review.coreboot.org
>
> BUG=none
> TEST=Made sure my_activity.py continues to function and it can also
> connect to review.coreboot.org
>
> Signed-off-by: Raul E Rangel <rrangel@chromium.org>
> Change-Id: I519916b58a59b8f13c227218e93c392a63a24800
> Reviewed-on: https://chromium-review.googlesource.com/1173064
> Reviewed-by: Robbie Iannucci <iannucci@chromium.org>
> Commit-Queue: Robbie Iannucci <iannucci@chromium.org>

Bug: 914564, 891505
Change-Id: Icd2a57da802864b2ebb17417b73434e1284a292d
Reviewed-on: https://chromium-review.googlesource.com/c/1432633
Auto-Submit: Edward Lesmes <ehmaldonado@chromium.org>
Commit-Queue: Robbie Iannucci <iannucci@chromium.org>
Reviewed-by: Robbie Iannucci <iannucci@chromium.org>
changes/33/1432633/4
Raul E Rangel 7 years ago committed by Commit Bot
parent 4d965ee2d8
commit edfbc9ced2

@ -1,8 +1,8 @@
Name: httplib2 Name: httplib2
Short Name: httplib2 Short Name: httplib2
URL: https://github.com/httplib2/httplib2 URL: https://github.com/httplib2/httplib2
Version: 0.9.2 Version: 0.11.3
Revision: cf631a73e2f3f43897b65206127ced82382d35f5 Revision: 70fb0c820d2e8211992b402d34444e4b32a1cb6e
License: MIT License License: MIT License
Description: Description:

@ -1,4 +1,4 @@
from __future__ import generators from __future__ import print_function
""" """
httplib2 httplib2
@ -20,9 +20,10 @@ __contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
"Jonathan Feinberg", "Jonathan Feinberg",
"Blair Zajac", "Blair Zajac",
"Sam Ruby", "Sam Ruby",
"Louis Nyffenegger"] "Louis Nyffenegger",
"Alex Yu"]
__license__ = "MIT" __license__ = "MIT"
__version__ = "0.9.2" __version__ = '0.11.3'
import re import re
import sys import sys
@ -64,31 +65,54 @@ except ImportError:
socks = None socks = None
# Build the appropriate socket wrapper for ssl # Build the appropriate socket wrapper for ssl
ssl = None
ssl_SSLError = None
ssl_CertificateError = None
try: try:
import ssl # python 2.6 import ssl # python 2.6
ssl_SSLError = ssl.SSLError except ImportError:
def _ssl_wrap_socket(sock, key_file, cert_file, pass
disable_validation, ca_certs): if ssl is not None:
if disable_validation: ssl_SSLError = getattr(ssl, 'SSLError', None)
cert_reqs = ssl.CERT_NONE ssl_CertificateError = getattr(ssl, 'CertificateError', None)
else:
cert_reqs = ssl.CERT_REQUIRED
# We should be specifying SSL version 3 or TLS v1, but the ssl module def _ssl_wrap_socket(sock, key_file, cert_file, disable_validation,
# doesn't expose the necessary knobs. So we need to go with the default ca_certs, ssl_version, hostname):
# of SSLv23. if disable_validation:
cert_reqs = ssl.CERT_NONE
else:
cert_reqs = ssl.CERT_REQUIRED
if ssl_version is None:
ssl_version = ssl.PROTOCOL_SSLv23
if hasattr(ssl, 'SSLContext'): # Python 2.7.9
context = ssl.SSLContext(ssl_version)
context.verify_mode = cert_reqs
context.check_hostname = (cert_reqs != ssl.CERT_NONE)
if cert_file:
context.load_cert_chain(cert_file, key_file)
if ca_certs:
context.load_verify_locations(ca_certs)
return context.wrap_socket(sock, server_hostname=hostname)
else:
return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file, return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
cert_reqs=cert_reqs, ca_certs=ca_certs) cert_reqs=cert_reqs, ca_certs=ca_certs,
except (AttributeError, ImportError): ssl_version=ssl_version)
ssl_SSLError = None
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs): def _ssl_wrap_socket_unsupported(sock, key_file, cert_file, disable_validation,
if not disable_validation: ca_certs, ssl_version, hostname):
raise CertificateValidationUnsupported( if not disable_validation:
"SSL certificate validation is not supported without " raise CertificateValidationUnsupported(
"the ssl module installed. To avoid this error, install " "SSL certificate validation is not supported without "
"the ssl module, or explicity disable validation.") "the ssl module installed. To avoid this error, install "
ssl_sock = socket.ssl(sock, key_file, cert_file) "the ssl module, or explicity disable validation.")
return httplib.FakeSocket(sock, ssl_sock) ssl_sock = socket.ssl(sock, key_file, cert_file)
return httplib.FakeSocket(sock, ssl_sock)
if ssl is None:
_ssl_wrap_socket = _ssl_wrap_socket_unsupported
if sys.version_info >= (2,3): if sys.version_info >= (2,3):
@ -122,6 +146,7 @@ if sys.version_info < (2,4):
seq.sort() seq.sort()
return seq return seq
# Python 2.3 support # Python 2.3 support
def HTTPResponse__getheaders(self): def HTTPResponse__getheaders(self):
"""Return list of (header, value) tuples.""" """Return list of (header, value) tuples."""
@ -162,6 +187,8 @@ class CertificateHostnameMismatch(SSLHandshakeError):
self.host = host self.host = host
self.cert = cert self.cert = cert
class NotRunningAppEngineEnvironment(HttpLib2Error): pass
# Open Items: # Open Items:
# ----------- # -----------
# Proxy support # Proxy support
@ -197,6 +224,7 @@ except ImportError:
# Which headers are hop-by-hop headers by default # Which headers are hop-by-hop headers by default
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
def _get_end2end_headers(response): def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP) hopbyhop = list(HOP_BY_HOP)
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
@ -204,6 +232,7 @@ def _get_end2end_headers(response):
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri): def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986. """Parses a URI using the regex given in Appendix B of RFC 3986.
@ -212,6 +241,7 @@ def parse_uri(uri):
groups = URI.match(uri).groups() groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8]) return (groups[1], groups[3], groups[4], groups[6], groups[8])
def urlnorm(uri): def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri) (scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority: if not scheme or not authority:
@ -232,6 +262,7 @@ def urlnorm(uri):
re_url_scheme = re.compile(r'^\w+://') re_url_scheme = re.compile(r'^\w+://')
re_slash = re.compile(r'[?/:|]+') re_slash = re.compile(r'[?/:|]+')
def safename(filename): def safename(filename):
"""Return a filename suitable for the cache. """Return a filename suitable for the cache.
@ -260,12 +291,15 @@ def safename(filename):
return ",".join((filename, filemd5)) return ",".join((filename, filemd5))
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers): def _normalize_headers(headers):
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
def _parse_cache_control(headers): def _parse_cache_control(headers):
retval = {} retval = {}
if headers.has_key('cache-control'): if 'cache-control' in headers:
parts = headers['cache-control'].split(',') parts = headers['cache-control'].split(',')
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")] parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
@ -290,7 +324,7 @@ def _parse_www_authenticate(headers, headername='www-authenticate'):
"""Returns a dictionary of dictionaries, one dict """Returns a dictionary of dictionaries, one dict
per auth_scheme.""" per auth_scheme."""
retval = {} retval = {}
if headers.has_key(headername): if headername in headers:
try: try:
authenticate = headers[headername].strip() authenticate = headers[headername].strip()
@ -318,6 +352,7 @@ def _parse_www_authenticate(headers, headername='www-authenticate'):
return retval return retval
# TODO: add current time as _entry_disposition argument to avoid sleep in tests
def _entry_disposition(response_headers, request_headers): def _entry_disposition(response_headers, request_headers):
"""Determine freshness from the Date, Expires and Cache-Control headers. """Determine freshness from the Date, Expires and Cache-Control headers.
@ -350,26 +385,26 @@ def _entry_disposition(response_headers, request_headers):
cc = _parse_cache_control(request_headers) cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers) cc_response = _parse_cache_control(response_headers)
if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1: if 'pragma' in request_headers and request_headers['pragma'].lower().find('no-cache') != -1:
retval = "TRANSPARENT" retval = "TRANSPARENT"
if 'cache-control' not in request_headers: if 'cache-control' not in request_headers:
request_headers['cache-control'] = 'no-cache' request_headers['cache-control'] = 'no-cache'
elif cc.has_key('no-cache'): elif 'no-cache' in cc:
retval = "TRANSPARENT" retval = "TRANSPARENT"
elif cc_response.has_key('no-cache'): elif 'no-cache' in cc_response:
retval = "STALE" retval = "STALE"
elif cc.has_key('only-if-cached'): elif 'only-if-cached' in cc:
retval = "FRESH" retval = "FRESH"
elif response_headers.has_key('date'): elif 'date' in response_headers:
date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date'])) date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
now = time.time() now = time.time()
current_age = max(0, now - date) current_age = max(0, now - date)
if cc_response.has_key('max-age'): if 'max-age' in cc_response:
try: try:
freshness_lifetime = int(cc_response['max-age']) freshness_lifetime = int(cc_response['max-age'])
except ValueError: except ValueError:
freshness_lifetime = 0 freshness_lifetime = 0
elif response_headers.has_key('expires'): elif 'expires' in response_headers:
expires = email.Utils.parsedate_tz(response_headers['expires']) expires = email.Utils.parsedate_tz(response_headers['expires'])
if None == expires: if None == expires:
freshness_lifetime = 0 freshness_lifetime = 0
@ -377,12 +412,12 @@ def _entry_disposition(response_headers, request_headers):
freshness_lifetime = max(0, calendar.timegm(expires) - date) freshness_lifetime = max(0, calendar.timegm(expires) - date)
else: else:
freshness_lifetime = 0 freshness_lifetime = 0
if cc.has_key('max-age'): if 'max-age' in cc:
try: try:
freshness_lifetime = int(cc['max-age']) freshness_lifetime = int(cc['max-age'])
except ValueError: except ValueError:
freshness_lifetime = 0 freshness_lifetime = 0
if cc.has_key('min-fresh'): if 'min-fresh' in cc:
try: try:
min_fresh = int(cc['min-fresh']) min_fresh = int(cc['min-fresh'])
except ValueError: except ValueError:
@ -392,6 +427,7 @@ def _entry_disposition(response_headers, request_headers):
retval = "FRESH" retval = "FRESH"
return retval return retval
def _decompressContent(response, new_content): def _decompressContent(response, new_content):
content = new_content content = new_content
try: try:
@ -400,21 +436,22 @@ def _decompressContent(response, new_content):
if encoding == 'gzip': if encoding == 'gzip':
content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
if encoding == 'deflate': if encoding == 'deflate':
content = zlib.decompress(content) content = zlib.decompress(content, -zlib.MAX_WBITS)
response['content-length'] = str(len(content)) response['content-length'] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere. # Record the historical presence of the encoding in a way the won't interfere.
response['-content-encoding'] = response['content-encoding'] response['-content-encoding'] = response['content-encoding']
del response['content-encoding'] del response['content-encoding']
except IOError: except (IOError, zlib.error):
content = "" content = ""
raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
return content return content
def _updateCache(request_headers, response_headers, content, cache, cachekey): def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey: if cachekey:
cc = _parse_cache_control(request_headers) cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers) cc_response = _parse_cache_control(response_headers)
if cc.has_key('no-store') or cc_response.has_key('no-store'): if 'no-store' in cc or 'no-store' in cc_response:
cache.delete(cachekey) cache.delete(cachekey)
else: else:
info = email.Message.Message() info = email.Message.Message()
@ -496,7 +533,6 @@ class Authentication(object):
return False return False
class BasicAuthentication(Authentication): class BasicAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http): def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
@ -550,7 +586,7 @@ class DigestAuthentication(Authentication):
self.challenge['nc'] += 1 self.challenge['nc'] += 1
def response(self, response, content): def response(self, response, content):
if not response.has_key('authentication-info'): if 'authentication-info' not in response:
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {}) challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
if 'true' == challenge.get('stale'): if 'true' == challenge.get('stale'):
self.challenge['nonce'] = challenge['nonce'] self.challenge['nonce'] = challenge['nonce']
@ -559,7 +595,7 @@ class DigestAuthentication(Authentication):
else: else:
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {}) updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
if updated_challenge.has_key('nextnonce'): if 'nextnonce' in updated_challenge:
self.challenge['nonce'] = updated_challenge['nextnonce'] self.challenge['nonce'] = updated_challenge['nextnonce']
self.challenge['nc'] = 1 self.challenge['nc'] = 1
return False return False
@ -649,6 +685,7 @@ class WsseAuthentication(Authentication):
cnonce, cnonce,
iso_now) iso_now)
class GoogleLoginAuthentication(Authentication): class GoogleLoginAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http): def __init__(self, credentials, host, request_uri, headers, response, content, http):
from urllib import urlencode from urllib import urlencode
@ -688,12 +725,13 @@ AUTH_SCHEME_CLASSES = {
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
class FileCache(object): class FileCache(object):
"""Uses a local directory as a store for cached files. """Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to Not really safe to use if multiple threads or processes are going to
be running on the same cache. be running on the same cache.
""" """
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache self.cache = cache
self.safe = safe self.safe = safe
if not os.path.exists(cache): if not os.path.exists(cache):
@ -721,6 +759,7 @@ class FileCache(object):
if os.path.exists(cacheFullPath): if os.path.exists(cacheFullPath):
os.remove(cacheFullPath) os.remove(cacheFullPath)
class Credentials(object): class Credentials(object):
def __init__(self): def __init__(self):
self.credentials = [] self.credentials = []
@ -736,20 +775,23 @@ class Credentials(object):
if cdomain == "" or domain == cdomain: if cdomain == "" or domain == cdomain:
yield (name, password) yield (name, password)
class KeyCerts(Credentials): class KeyCerts(Credentials):
"""Identical to Credentials except that """Identical to Credentials except that
name/password are mapped to key/cert.""" name/password are mapped to key/cert."""
pass pass
class AllHosts(object): class AllHosts(object):
pass pass
class ProxyInfo(object): class ProxyInfo(object):
"""Collect information required to use a proxy.""" """Collect information required to use a proxy."""
bypass_hosts = () bypass_hosts = ()
def __init__(self, proxy_type, proxy_host, proxy_port, def __init__(self, proxy_type, proxy_host, proxy_port,
proxy_rdns=True, proxy_user=None, proxy_pass=None): proxy_rdns=True, proxy_user=None, proxy_pass=None, proxy_headers=None):
""" """
Args: Args:
proxy_type: The type of proxy server. This must be set to one of proxy_type: The type of proxy server. This must be set to one of
@ -770,6 +812,8 @@ class ProxyInfo(object):
proxy_user: The username used to authenticate with the proxy server. proxy_user: The username used to authenticate with the proxy server.
proxy_pass: The password used to authenticate with the proxy server. proxy_pass: The password used to authenticate with the proxy server.
proxy_headers: Additional or modified headers for the proxy connect request.
""" """
self.proxy_type = proxy_type self.proxy_type = proxy_type
self.proxy_host = proxy_host self.proxy_host = proxy_host
@ -777,10 +821,11 @@ class ProxyInfo(object):
self.proxy_rdns = proxy_rdns self.proxy_rdns = proxy_rdns
self.proxy_user = proxy_user self.proxy_user = proxy_user
self.proxy_pass = proxy_pass self.proxy_pass = proxy_pass
self.proxy_headers = proxy_headers
def astuple(self): def astuple(self):
return (self.proxy_type, self.proxy_host, self.proxy_port, return (self.proxy_type, self.proxy_host, self.proxy_port,
self.proxy_rdns, self.proxy_user, self.proxy_pass) self.proxy_rdns, self.proxy_user, self.proxy_pass, self.proxy_headers)
def isgood(self): def isgood(self):
return (self.proxy_host != None) and (self.proxy_port != None) return (self.proxy_host != None) and (self.proxy_port != None)
@ -793,12 +838,20 @@ class ProxyInfo(object):
if self.bypass_hosts is AllHosts: if self.bypass_hosts is AllHosts:
return True return True
bypass = False hostname = '.' + hostname.lstrip('.')
for domain in self.bypass_hosts: for skip_name in self.bypass_hosts:
if hostname.endswith(domain): # *.suffix
bypass = True if skip_name.startswith('.') and hostname.endswith(skip_name):
return True
# exact match
if hostname == '.' + skip_name:
return True
return False
return bypass def __repr__(self):
return (
'<ProxyInfo type={p.proxy_type} host:port={p.proxy_host}:{p.proxy_port} rdns={p.proxy_rdns}' +
' user={p.proxy_user} headers={p.proxy_headers}>').format(p=self)
def proxy_info_from_environment(method='http'): def proxy_info_from_environment(method='http'):
@ -812,20 +865,10 @@ def proxy_info_from_environment(method='http'):
url = os.environ.get(env_var, os.environ.get(env_var.upper())) url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url: if not url:
return return
pi = proxy_info_from_url(url, method) return proxy_info_from_url(url, method, None)
no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
bypass_hosts = []
if no_proxy:
bypass_hosts = no_proxy.split(',')
# special case, no_proxy=* means all hosts bypassed
if no_proxy == '*':
bypass_hosts = AllHosts
pi.bypass_hosts = bypass_hosts def proxy_info_from_url(url, method='http', noproxy=None):
return pi
def proxy_info_from_url(url, method='http'):
""" """
Construct a ProxyInfo from a URL (such as http_proxy env var) Construct a ProxyInfo from a URL (such as http_proxy env var)
""" """
@ -851,15 +894,30 @@ def proxy_info_from_url(url, method='http'):
else: else:
port = dict(https=443, http=80)[method] port = dict(https=443, http=80)[method]
proxy_type = 3 # socks.PROXY_TYPE_HTTP proxy_type = 3 # socks.PROXY_TYPE_HTTP
return ProxyInfo( pi = ProxyInfo(
proxy_type = proxy_type, proxy_type = proxy_type,
proxy_host = host, proxy_host = host,
proxy_port = port, proxy_port = port,
proxy_user = username or None, proxy_user = username or None,
proxy_pass = password or None, proxy_pass = password or None,
proxy_headers = None,
) )
bypass_hosts = []
# If not given an explicit noproxy value, respect values in env vars.
if noproxy is None:
noproxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
# Special case: A single '*' character means all hosts should be bypassed.
if noproxy == '*':
bypass_hosts = AllHosts
elif noproxy.strip():
bypass_hosts = noproxy.split(',')
bypass_hosts = filter(bool, bypass_hosts) # To exclude empty string.
pi.bypass_hosts = bypass_hosts
return pi
class HTTPConnectionWithTimeout(httplib.HTTPConnection): class HTTPConnectionWithTimeout(httplib.HTTPConnection):
""" """
@ -885,7 +943,7 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
msg = "getaddrinfo returns an empty list" msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood(): if self.proxy_info and self.proxy_info.isgood():
use_proxy = True use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = self.proxy_info.astuple()
host = proxy_host host = proxy_host
port = proxy_port port = proxy_port
@ -900,7 +958,7 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
try: try:
if use_proxy: if use_proxy:
self.sock = socks.socksocket(af, socktype, proto) self.sock = socks.socksocket(af, socktype, proto)
self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)
else: else:
self.sock = socket.socket(af, socktype, proto) self.sock = socket.socket(af, socktype, proto)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
@ -909,16 +967,18 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
self.sock.settimeout(self.timeout) self.sock.settimeout(self.timeout)
# End of difference from httplib. # End of difference from httplib.
if self.debuglevel > 0: if self.debuglevel > 0:
print "connect: (%s, %s) ************" % (self.host, self.port) print("connect: (%s, %s) ************" % (self.host, self.port))
if use_proxy: if use_proxy:
print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) print("proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
if use_proxy:
self.sock.connect((self.host, self.port) + sa[2:]) self.sock.connect((self.host, self.port) + sa[2:])
except socket.error, msg: else:
self.sock.connect(sa)
except socket.error as msg:
if self.debuglevel > 0: if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port) print("connect fail: (%s, %s)" % (self.host, self.port))
if use_proxy: if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
if self.sock: if self.sock:
self.sock.close() self.sock.close()
self.sock = None self.sock = None
@ -927,6 +987,7 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
if not self.sock: if not self.sock:
raise socket.error, msg raise socket.error, msg
class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
""" """
This class allows communication via SSL. This class allows communication via SSL.
@ -938,7 +999,8 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
""" """
def __init__(self, host, port=None, key_file=None, cert_file=None, def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, strict=None, timeout=None, proxy_info=None,
ca_certs=None, disable_ssl_certificate_validation=False): ca_certs=None, disable_ssl_certificate_validation=False,
ssl_version=None):
httplib.HTTPSConnection.__init__(self, host, port=port, httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file, key_file=key_file,
cert_file=cert_file, strict=strict) cert_file=cert_file, strict=strict)
@ -949,6 +1011,7 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
self.ca_certs = ca_certs self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \ self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation disable_ssl_certificate_validation
self.ssl_version = ssl_version
# The following two methods were adapted from https_wrapper.py, released # The following two methods were adapted from https_wrapper.py, released
# with the Google Appengine SDK at # with the Google Appengine SDK at
@ -1007,7 +1070,7 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
msg = "getaddrinfo returns an empty list" msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood(): if self.proxy_info and self.proxy_info.isgood():
use_proxy = True use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = self.proxy_info.astuple()
host = proxy_host host = proxy_host
port = proxy_port port = proxy_port
@ -1023,21 +1086,26 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
if use_proxy: if use_proxy:
sock = socks.socksocket(family, socktype, proto) sock = socks.socksocket(family, socktype, proto)
sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)
else: else:
sock = socket.socket(family, socktype, proto) sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if has_timeout(self.timeout): if has_timeout(self.timeout):
sock.settimeout(self.timeout) sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
if use_proxy:
sock.connect((self.host, self.port) + sockaddr[:2])
else:
sock.connect(sockaddr)
self.sock =_ssl_wrap_socket( self.sock =_ssl_wrap_socket(
sock, self.key_file, self.cert_file, sock, self.key_file, self.cert_file,
self.disable_ssl_certificate_validation, self.ca_certs) self.disable_ssl_certificate_validation, self.ca_certs,
self.ssl_version, self.host)
if self.debuglevel > 0: if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port) print("connect: (%s, %s)" % (self.host, self.port))
if use_proxy: if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
if not self.disable_ssl_certificate_validation: if not self.disable_ssl_certificate_validation:
cert = self.sock.getpeercert() cert = self.sock.getpeercert()
hostname = self.host.split(':', 0)[0] hostname = self.host.split(':', 0)[0]
@ -1045,7 +1113,7 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
raise CertificateHostnameMismatch( raise CertificateHostnameMismatch(
'Server presented certificate that does not match ' 'Server presented certificate that does not match '
'host %s: %s' % (hostname, cert), hostname, cert) 'host %s: %s' % (hostname, cert), hostname, cert)
except ssl_SSLError, e: except (ssl_SSLError, ssl_CertificateError, CertificateHostnameMismatch) as e:
if sock: if sock:
sock.close() sock.close()
if self.sock: if self.sock:
@ -1055,17 +1123,17 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
# to get at more detailed error information, in particular # to get at more detailed error information, in particular
# whether the error is due to certificate validation or # whether the error is due to certificate validation or
# something else (such as SSL protocol mismatch). # something else (such as SSL protocol mismatch).
if e.errno == ssl.SSL_ERROR_SSL: if getattr(e, 'errno', None) == ssl.SSL_ERROR_SSL:
raise SSLHandshakeError(e) raise SSLHandshakeError(e)
else: else:
raise raise
except (socket.timeout, socket.gaierror): except (socket.timeout, socket.gaierror):
raise raise
except socket.error, msg: except socket.error as msg:
if self.debuglevel > 0: if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port) print("connect fail: (%s, %s)" % (self.host, self.port))
if use_proxy: if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
if self.sock: if self.sock:
self.sock.close() self.sock.close()
self.sock = None self.sock = None
@ -1079,63 +1147,73 @@ SCHEME_TO_CONNECTION = {
'https': HTTPSConnectionWithTimeout 'https': HTTPSConnectionWithTimeout
} }
def _new_fixed_fetch(validate_certificate):
def fixed_fetch(url, payload=None, method="GET", headers={},
allow_truncated=False, follow_redirects=True,
deadline=None):
if deadline is None:
deadline = socket.getdefaulttimeout()
return fetch(url, payload=payload, method=method, headers=headers,
allow_truncated=allow_truncated,
follow_redirects=follow_redirects, deadline=deadline,
validate_certificate=validate_certificate)
return fixed_fetch
class AppEngineHttpConnection(httplib.HTTPConnection):
"""Use httplib on App Engine, but compensate for its weirdness.
The parameters key_file, cert_file, proxy_info, ca_certs,
disable_ssl_certificate_validation, and ssl_version are all dropped on
the ground.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False,
ssl_version=None):
httplib.HTTPConnection.__init__(self, host, port=port,
strict=strict, timeout=timeout)
class AppEngineHttpsConnection(httplib.HTTPSConnection):
"""Same as AppEngineHttpConnection, but for HTTPS URIs.
The parameters proxy_info, ca_certs, disable_ssl_certificate_validation,
and ssl_version are all dropped on the ground.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False,
ssl_version=None):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict,
timeout=timeout)
self._fetch = _new_fixed_fetch(
not disable_ssl_certificate_validation)
# Use a different connection object for Google App Engine # Use a different connection object for Google App Engine
try: try:
try: server_software = os.environ.get('SERVER_SOFTWARE')
from google.appengine.api import apiproxy_stub_map if not server_software:
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: raise NotRunningAppEngineEnvironment()
raise ImportError # Bail out; we're not actually running on App Engine. elif not (server_software.startswith('Google App Engine/') or
from google.appengine.api.urlfetch import fetch server_software.startswith('Development/')):
from google.appengine.api.urlfetch import InvalidURLError raise NotRunningAppEngineEnvironment()
except (ImportError, AttributeError):
from google3.apphosting.api import apiproxy_stub_map from google.appengine.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine. raise ImportError # Bail out; we're not actually running on App Engine.
from google3.apphosting.api.urlfetch import fetch from google.appengine.api.urlfetch import fetch
from google3.apphosting.api.urlfetch import InvalidURLError from google.appengine.api.urlfetch import InvalidURLError
def _new_fixed_fetch(validate_certificate):
def fixed_fetch(url, payload=None, method="GET", headers={},
allow_truncated=False, follow_redirects=True,
deadline=None):
if deadline is None:
deadline = socket.getdefaulttimeout() or 5
return fetch(url, payload=payload, method=method, headers=headers,
allow_truncated=allow_truncated,
follow_redirects=follow_redirects, deadline=deadline,
validate_certificate=validate_certificate)
return fixed_fetch
class AppEngineHttpConnection(httplib.HTTPConnection):
"""Use httplib on App Engine, but compensate for its weirdness.
The parameters key_file, cert_file, proxy_info, ca_certs, and
disable_ssl_certificate_validation are all dropped on the ground.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPConnection.__init__(self, host, port=port,
strict=strict, timeout=timeout)
class AppEngineHttpsConnection(httplib.HTTPSConnection):
"""Same as AppEngineHttpConnection, but for HTTPS URIs."""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict,
timeout=timeout)
self._fetch = _new_fixed_fetch(
not disable_ssl_certificate_validation)
# Update the connection classes to use the Googel App Engine specific ones. # Update the connection classes to use the Googel App Engine specific ones.
SCHEME_TO_CONNECTION = { SCHEME_TO_CONNECTION = {
'http': AppEngineHttpConnection, 'http': AppEngineHttpConnection,
'https': AppEngineHttpsConnection 'https': AppEngineHttpsConnection
} }
except (ImportError, AttributeError): except (ImportError, AttributeError, NotRunningAppEngineEnvironment):
pass pass
@ -1155,7 +1233,8 @@ class Http(object):
""" """
def __init__(self, cache=None, timeout=None, def __init__(self, cache=None, timeout=None,
proxy_info=proxy_info_from_environment, proxy_info=proxy_info_from_environment,
ca_certs=None, disable_ssl_certificate_validation=False): ca_certs=None, disable_ssl_certificate_validation=False,
ssl_version=None):
"""If 'cache' is a string then it is used as a directory name for """If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the a disk cache. Otherwise it must be an object that supports the
same interface as FileCache. same interface as FileCache.
@ -1178,11 +1257,14 @@ class Http(object):
If disable_ssl_certificate_validation is true, SSL cert validation will If disable_ssl_certificate_validation is true, SSL cert validation will
not be performed. not be performed.
By default, ssl.PROTOCOL_SSLv23 will be used for the ssl version.
""" """
self.proxy_info = proxy_info self.proxy_info = proxy_info
self.ca_certs = ca_certs self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \ self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation disable_ssl_certificate_validation
self.ssl_version = ssl_version
# Map domain name to an httplib connection # Map domain name to an httplib connection
self.connections = {} self.connections = {}
@ -1243,7 +1325,7 @@ class Http(object):
challenges = _parse_www_authenticate(response, 'www-authenticate') challenges = _parse_www_authenticate(response, 'www-authenticate')
for cred in self.credentials.iter(host): for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER: for scheme in AUTH_SCHEME_ORDER:
if challenges.has_key(scheme): if scheme in challenges:
yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
def add_credentials(self, name, password, domain=""): def add_credentials(self, name, password, domain=""):
@ -1279,15 +1361,16 @@ class Http(object):
except ssl_SSLError: except ssl_SSLError:
conn.close() conn.close()
raise raise
except socket.error, e: except socket.error as e:
err = 0 err = 0
if hasattr(e, 'args'): if hasattr(e, 'args'):
err = getattr(e, 'args')[0] err = getattr(e, 'args')[0]
else: else:
err = e.errno err = e.errno
if err == errno.ECONNREFUSED: # Connection refused
raise
if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES: if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
continue # retry on potentially transient socket errors continue # retry on potentially transient socket errors
raise
except httplib.HTTPException: except httplib.HTTPException:
# Just because the server closed the connection doesn't apparently mean # Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response. # that the server didn't send a response.
@ -1370,29 +1453,29 @@ class Http(object):
# Pick out the location header and basically start from the beginning # Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth' # remembering first to strip the ETag header and decrement our 'depth'
if redirections: if redirections:
if not response.has_key('location') and response.status != 300: if 'location' not in response and response.status != 300:
raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
# Fix-up relative redirects (which violate an RFC 2616 MUST) # Fix-up relative redirects (which violate an RFC 2616 MUST)
if response.has_key('location'): if 'location' in response:
location = response['location'] location = response['location']
(scheme, authority, path, query, fragment) = parse_uri(location) (scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None: if authority == None:
response['location'] = urlparse.urljoin(absolute_uri, location) response['location'] = urlparse.urljoin(absolute_uri, location)
if response.status == 301 and method in ["GET", "HEAD"]: if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location'] response['-x-permanent-redirect-url'] = response['location']
if not response.has_key('content-location'): if 'content-location' not in response:
response['content-location'] = absolute_uri response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey) _updateCache(headers, response, content, self.cache, cachekey)
if headers.has_key('if-none-match'): if 'if-none-match' in headers:
del headers['if-none-match'] del headers['if-none-match']
if headers.has_key('if-modified-since'): if 'if-modified-since' in headers:
del headers['if-modified-since'] del headers['if-modified-since']
if 'authorization' in headers and not self.forward_authorization_headers: if 'authorization' in headers and not self.forward_authorization_headers:
del headers['authorization'] del headers['authorization']
if response.has_key('location'): if 'location' in response:
location = response['location'] location = response['location']
old_response = copy.deepcopy(response) old_response = copy.deepcopy(response)
if not old_response.has_key('content-location'): if 'content-location' not in old_response:
old_response['content-location'] = absolute_uri old_response['content-location'] = absolute_uri
redirect_method = method redirect_method = method
if response.status in [302, 303]: if response.status in [302, 303]:
@ -1407,7 +1490,7 @@ class Http(object):
raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content) raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
elif response.status in [200, 203] and method in ["GET", "HEAD"]: elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests # Don't cache 206's since we aren't going to handle byte range requests
if not response.has_key('content-location'): if 'content-location' not in response:
response['content-location'] = absolute_uri response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey) _updateCache(headers, response, content, self.cache, cachekey)
@ -1449,7 +1532,7 @@ class Http(object):
else: else:
headers = self._normalize_headers(headers) headers = self._normalize_headers(headers)
if not headers.has_key('user-agent'): if 'user-agent' not in headers:
headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__ headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri) uri = iri2uri(uri)
@ -1477,14 +1560,16 @@ class Http(object):
proxy_info=proxy_info, proxy_info=proxy_info,
ca_certs=self.ca_certs, ca_certs=self.ca_certs,
disable_ssl_certificate_validation= disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation) self.disable_ssl_certificate_validation,
ssl_version=self.ssl_version)
else: else:
conn = self.connections[conn_key] = connection_type( conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout, authority, timeout=self.timeout,
proxy_info=proxy_info, proxy_info=proxy_info,
ca_certs=self.ca_certs, ca_certs=self.ca_certs,
disable_ssl_certificate_validation= disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation) self.disable_ssl_certificate_validation,
ssl_version=self.ssl_version)
else: else:
conn = self.connections[conn_key] = connection_type( conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout, authority, timeout=self.timeout,
@ -1518,7 +1603,7 @@ class Http(object):
else: else:
cachekey = None cachekey = None
if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: if method in self.optimistic_concurrency_methods and self.cache and 'etag' in info and not self.ignore_etag and 'if-match' not in headers:
# http://www.w3.org/1999/04/Editing/ # http://www.w3.org/1999/04/Editing/
headers['if-match'] = info['etag'] headers['if-match'] = info['etag']
@ -1539,7 +1624,7 @@ class Http(object):
break break
if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
if info.has_key('-x-permanent-redirect-url'): if '-x-permanent-redirect-url' in info:
# Should cached permanent redirects be counted in our redirection count? For now, yes. # Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0: if redirections <= 0:
raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "") raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
@ -1569,9 +1654,9 @@ class Http(object):
return (response, content) return (response, content)
if entry_disposition == "STALE": if entry_disposition == "STALE":
if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: if 'etag' in info and not self.ignore_etag and not 'if-none-match' in headers:
headers['if-none-match'] = info['etag'] headers['if-none-match'] = info['etag']
if info.has_key('last-modified') and not 'last-modified' in headers: if 'last-modified' in info and not 'last-modified' in headers:
headers['if-modified-since'] = info['last-modified'] headers['if-modified-since'] = info['last-modified']
elif entry_disposition == "TRANSPARENT": elif entry_disposition == "TRANSPARENT":
pass pass
@ -1601,13 +1686,13 @@ class Http(object):
content = new_content content = new_content
else: else:
cc = _parse_cache_control(headers) cc = _parse_cache_control(headers)
if cc.has_key('only-if-cached'): if 'only-if-cached' in cc:
info['status'] = '504' info['status'] = '504'
response = Response(info) response = Response(info)
content = "" content = ""
else: else:
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
except Exception, e: except Exception as e:
if self.force_exception_to_status_code: if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse): if isinstance(e, HttpLib2ErrorWithResponse):
response = e.response response = e.response
@ -1688,9 +1773,8 @@ class Response(dict):
self.status = int(self.get('status', self.status)) self.status = int(self.get('status', self.status))
self.reason = self.get('reason', self.reason) self.reason = self.get('reason', self.reason)
def __getattr__(self, name): def __getattr__(self, name):
if name == 'dict': if name == 'dict':
return self return self
else: else:
raise AttributeError, name raise AttributeError(name)

@ -102,28 +102,6 @@ A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
-----END CERTIFICATE----- -----END CERTIFICATE-----
# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
# Label: "Verisign Class 3 Public Primary Certification Authority"
# Serial: 149843929435818692848040365716851702463
# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67
# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2
# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70
-----BEGIN CERTIFICATE-----
MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
-----END CERTIFICATE-----
# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network # Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network # Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
# Label: "Verisign Class 3 Public Primary Certification Authority - G2" # Label: "Verisign Class 3 Public Primary Certification Authority - G2"
@ -1800,28 +1778,6 @@ kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
-----END CERTIFICATE----- -----END CERTIFICATE-----
# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
# Label: "Verisign Class 3 Public Primary Certification Authority"
# Serial: 80507572722862485515306429940691309246
# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4
# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b
# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05
-----BEGIN CERTIFICATE-----
MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG
A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i
2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ
2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ
-----END CERTIFICATE-----
# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 # Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 # Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
# Label: "GlobalSign Root CA - R3" # Label: "GlobalSign Root CA - R3"
@ -2181,3 +2137,60 @@ IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
-----END CERTIFICATE----- -----END CERTIFICATE-----
# Issuer: O=Digital Signature Trust Co., CN=DST Root CA X3
# Subject: O=Digital Signature Trust Co., CN=DST Root CA X3
# Label: "IdenTrust DST Root CA X3"
# Serial: 44AFB080D6A327BA893039862EF8406B
# MD5 Fingerprint: 41:03:52:DC:0F:F7:50:1B:16:F0:02:8E:BA:6F:45:C5
# SHA1 Fingerprint: DA:C9:02:4F:54:D8:F6:DF:94:93:5F:B1:73:26:38:CA:6A:D7:7C:13
# SHA256 Fingerprint: 06:87:26:03:31:A7:24:03:D9:09:F1:05:E6:9B:CF:0D:32:E1:BD:24:93:FF:C6:D9:20:6D:11:BC:D6:77:07:39
-----BEGIN CERTIFICATE-----
MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
-----END CERTIFICATE-----
# Issuer: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US
# Subject: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US
# Serial: 33af1e6a711a9a0bb2864b11d09fae5
# MD5 Fingerprint: E4:A6:8A:C8:54:AC:52:42:46:0A:FD:72:48:1B:2A:44
# SHA1 Fingerprint: DF:3C:24:F9:BF:D6:66:76:1B:26:80:73:FE:06:D1:CC:8D:4F:82:A4
# SHA256 Fingerprint: CB:3C:CB:B7:60:31:E5:E0:13:8F:8D:D3:9A:23:F9:DE:47:FF:C3:5E:43:C1:14:4C:EA:27:D4:6A:5A:B1:CB:5F
-----BEGIN CERTIFICATE-----
MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
MrY=
-----END CERTIFICATE-----

@ -177,7 +177,7 @@ class socksocket(socket.socket):
auth = self.__proxy[4] + ":" + self.__proxy[5] auth = self.__proxy[4] + ":" + self.__proxy[5]
return "Proxy-Authorization: Basic " + base64.b64encode(auth) return "Proxy-Authorization: Basic " + base64.b64encode(auth)
def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None, headers=None):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used. Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types proxytype - The type of the proxy to be used. Three types
@ -193,8 +193,9 @@ class socksocket(socket.socket):
The default is no authentication. The default is no authentication.
password - Password to authenticate with to the server. password - Password to authenticate with to the server.
Only relevant when username is also provided. Only relevant when username is also provided.
headers - Additional or modified headers for the proxy connect request.
""" """
self.__proxy = (proxytype, addr, port, rdns, username, password) self.__proxy = (proxytype, addr, port, rdns, username, password, headers)
def __negotiatesocks5(self, destaddr, destport): def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport) """__negotiatesocks5(self,destaddr,destport)
@ -253,7 +254,7 @@ class socksocket(socket.socket):
if self.__proxy[3]: if self.__proxy[3]:
# Resolve remotely # Resolve remotely
ipaddr = None ipaddr = None
req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr.encode()
else: else:
# Resolve locally # Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
@ -365,8 +366,17 @@ class socksocket(socket.socket):
else: else:
addr = destaddr addr = destaddr
headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"] headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
headers += ["Host: ", destaddr, "\r\n"] wrote_host_header = False
if (self.__proxy[4] != None and self.__proxy[5] != None): wrote_auth_header = False
if self.__proxy[6] != None:
for key, val in self.__proxy[6].iteritems():
headers += [key, ": ", val, "\r\n"]
wrote_host_header = (key.lower() == "host")
wrote_auth_header = (key.lower() == "proxy-authorization")
if not wrote_host_header:
headers += ["Host: ", destaddr, "\r\n"]
if not wrote_auth_header:
if (self.__proxy[4] != None and self.__proxy[5] != None):
headers += [self.__getauthheader(), "\r\n"] headers += [self.__getauthheader(), "\r\n"]
headers.append("\r\n") headers.append("\r\n")
self.sendall("".join(headers).encode()) self.sendall("".join(headers).encode())

Loading…
Cancel
Save