@ -1,4 +1,4 @@
from __future__ import generators
from __future__ import print_function
"""
"""
httplib2
httplib2
@ -20,9 +20,10 @@ __contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
" Jonathan Feinberg " ,
" Jonathan Feinberg " ,
" Blair Zajac " ,
" Blair Zajac " ,
" Sam Ruby " ,
" Sam Ruby " ,
" Louis Nyffenegger " ]
" Louis Nyffenegger " ,
" Alex Yu " ]
__license__ = " MIT "
__license__ = " MIT "
__version__ = " 0.9.2 "
__version__ = ' 0.11.3 '
import re
import re
import sys
import sys
@ -64,31 +65,54 @@ except ImportError:
socks = None
socks = None
# Build the appropriate socket wrapper for ssl
# Build the appropriate socket wrapper for ssl
ssl = None
ssl_SSLError = None
ssl_CertificateError = None
try :
try :
import ssl # python 2.6
import ssl # python 2.6
ssl_SSLError = ssl . SSLError
except ImportError :
def _ssl_wrap_socket ( sock , key_file , cert_file ,
pass
disable_validation , ca_certs ) :
if ssl is not None :
if disable_validation :
ssl_SSLError = getattr ( ssl , ' SSLError ' , None )
cert_reqs = ssl . CERT_NONE
ssl_CertificateError = getattr ( ssl , ' CertificateError ' , None )
else :
cert_reqs = ssl . CERT_REQUIRED
# We should be specifying SSL version 3 or TLS v1, but the ssl module
def _ssl_wrap_socket ( sock , key_file , cert_file , disable_validation ,
# doesn't expose the necessary knobs. So we need to go with the default
ca_certs , ssl_version , hostname ) :
# of SSLv23.
if disable_validation :
cert_reqs = ssl . CERT_NONE
else :
cert_reqs = ssl . CERT_REQUIRED
if ssl_version is None :
ssl_version = ssl . PROTOCOL_SSLv23
if hasattr ( ssl , ' SSLContext ' ) : # Python 2.7.9
context = ssl . SSLContext ( ssl_version )
context . verify_mode = cert_reqs
context . check_hostname = ( cert_reqs != ssl . CERT_NONE )
if cert_file :
context . load_cert_chain ( cert_file , key_file )
if ca_certs :
context . load_verify_locations ( ca_certs )
return context . wrap_socket ( sock , server_hostname = hostname )
else :
return ssl . wrap_socket ( sock , keyfile = key_file , certfile = cert_file ,
return ssl . wrap_socket ( sock , keyfile = key_file , certfile = cert_file ,
cert_reqs = cert_reqs , ca_certs = ca_certs )
cert_reqs = cert_reqs , ca_certs = ca_certs ,
except ( AttributeError , ImportError ) :
ssl_version = ssl_version )
ssl_SSLError = None
def _ssl_wrap_socket ( sock , key_file , cert_file ,
disable_validation , ca_certs ) :
def _ssl_wrap_socket_unsupported ( sock , key_file , cert_file , disable_validation ,
if not disable_validation :
ca_certs , ssl_version , hostname ) :
raise CertificateValidationUnsupported (
if not disable_validation :
" SSL certificate validation is not supported without "
raise CertificateValidationUnsupported (
" the ssl module installed. To avoid this error, install "
" SSL certificate validation is not supported without "
" the ssl module, or explicity disable validation. " )
" the ssl module installed. To avoid this error, install "
ssl_sock = socket . ssl ( sock , key_file , cert_file )
" the ssl module, or explicity disable validation. " )
return httplib . FakeSocket ( sock , ssl_sock )
ssl_sock = socket . ssl ( sock , key_file , cert_file )
return httplib . FakeSocket ( sock , ssl_sock )
if ssl is None :
_ssl_wrap_socket = _ssl_wrap_socket_unsupported
if sys . version_info > = ( 2 , 3 ) :
if sys . version_info > = ( 2 , 3 ) :
@ -122,6 +146,7 @@ if sys.version_info < (2,4):
seq . sort ( )
seq . sort ( )
return seq
return seq
# Python 2.3 support
# Python 2.3 support
def HTTPResponse__getheaders ( self ) :
def HTTPResponse__getheaders ( self ) :
""" Return list of (header, value) tuples. """
""" Return list of (header, value) tuples. """
@ -162,6 +187,8 @@ class CertificateHostnameMismatch(SSLHandshakeError):
self . host = host
self . host = host
self . cert = cert
self . cert = cert
class NotRunningAppEngineEnvironment ( HttpLib2Error ) : pass
# Open Items:
# Open Items:
# -----------
# -----------
# Proxy support
# Proxy support
@ -197,6 +224,7 @@ except ImportError:
# Which headers are hop-by-hop headers by default
# Which headers are hop-by-hop headers by default
HOP_BY_HOP = [ ' connection ' , ' keep-alive ' , ' proxy-authenticate ' , ' proxy-authorization ' , ' te ' , ' trailers ' , ' transfer-encoding ' , ' upgrade ' ]
HOP_BY_HOP = [ ' connection ' , ' keep-alive ' , ' proxy-authenticate ' , ' proxy-authorization ' , ' te ' , ' trailers ' , ' transfer-encoding ' , ' upgrade ' ]
def _get_end2end_headers ( response ) :
def _get_end2end_headers ( response ) :
hopbyhop = list ( HOP_BY_HOP )
hopbyhop = list ( HOP_BY_HOP )
hopbyhop . extend ( [ x . strip ( ) for x in response . get ( ' connection ' , ' ' ) . split ( ' , ' ) ] )
hopbyhop . extend ( [ x . strip ( ) for x in response . get ( ' connection ' , ' ' ) . split ( ' , ' ) ] )
@ -204,6 +232,7 @@ def _get_end2end_headers(response):
URI = re . compile ( r " ^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)( \ ?([^#]*))?(#(.*))? " )
URI = re . compile ( r " ^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)( \ ?([^#]*))?(#(.*))? " )
def parse_uri ( uri ) :
def parse_uri ( uri ) :
""" Parses a URI using the regex given in Appendix B of RFC 3986.
""" Parses a URI using the regex given in Appendix B of RFC 3986.
@ -212,6 +241,7 @@ def parse_uri(uri):
groups = URI . match ( uri ) . groups ( )
groups = URI . match ( uri ) . groups ( )
return ( groups [ 1 ] , groups [ 3 ] , groups [ 4 ] , groups [ 6 ] , groups [ 8 ] )
return ( groups [ 1 ] , groups [ 3 ] , groups [ 4 ] , groups [ 6 ] , groups [ 8 ] )
def urlnorm ( uri ) :
def urlnorm ( uri ) :
( scheme , authority , path , query , fragment ) = parse_uri ( uri )
( scheme , authority , path , query , fragment ) = parse_uri ( uri )
if not scheme or not authority :
if not scheme or not authority :
@ -232,6 +262,7 @@ def urlnorm(uri):
re_url_scheme = re . compile ( r ' ^ \ w+:// ' )
re_url_scheme = re . compile ( r ' ^ \ w+:// ' )
re_slash = re . compile ( r ' [?/:|]+ ' )
re_slash = re . compile ( r ' [?/:|]+ ' )
def safename ( filename ) :
def safename ( filename ) :
""" Return a filename suitable for the cache.
""" Return a filename suitable for the cache.
@ -260,12 +291,15 @@ def safename(filename):
return " , " . join ( ( filename , filemd5 ) )
return " , " . join ( ( filename , filemd5 ) )
NORMALIZE_SPACE = re . compile ( r ' (?: \ r \ n)?[ \ t]+ ' )
NORMALIZE_SPACE = re . compile ( r ' (?: \ r \ n)?[ \ t]+ ' )
def _normalize_headers ( headers ) :
def _normalize_headers ( headers ) :
return dict ( [ ( key . lower ( ) , NORMALIZE_SPACE . sub ( value , ' ' ) . strip ( ) ) for ( key , value ) in headers . iteritems ( ) ] )
return dict ( [ ( key . lower ( ) , NORMALIZE_SPACE . sub ( value , ' ' ) . strip ( ) ) for ( key , value ) in headers . iteritems ( ) ] )
def _parse_cache_control ( headers ) :
def _parse_cache_control ( headers ) :
retval = { }
retval = { }
if headers . has_key ( ' cache-control ' ) :
if ' cache-control ' in headers :
parts = headers [ ' cache-control ' ] . split ( ' , ' )
parts = headers [ ' cache-control ' ] . split ( ' , ' )
parts_with_args = [ tuple ( [ x . strip ( ) . lower ( ) for x in part . split ( " = " , 1 ) ] ) for part in parts if - 1 != part . find ( " = " ) ]
parts_with_args = [ tuple ( [ x . strip ( ) . lower ( ) for x in part . split ( " = " , 1 ) ] ) for part in parts if - 1 != part . find ( " = " ) ]
parts_wo_args = [ ( name . strip ( ) . lower ( ) , 1 ) for name in parts if - 1 == name . find ( " = " ) ]
parts_wo_args = [ ( name . strip ( ) . lower ( ) , 1 ) for name in parts if - 1 == name . find ( " = " ) ]
@ -290,7 +324,7 @@ def _parse_www_authenticate(headers, headername='www-authenticate'):
""" Returns a dictionary of dictionaries, one dict
""" Returns a dictionary of dictionaries, one dict
per auth_scheme . """
per auth_scheme . """
retval = { }
retval = { }
if header s. has_key ( headername ) :
if header name in headers :
try :
try :
authenticate = headers [ headername ] . strip ( )
authenticate = headers [ headername ] . strip ( )
@ -318,6 +352,7 @@ def _parse_www_authenticate(headers, headername='www-authenticate'):
return retval
return retval
# TODO: add current time as _entry_disposition argument to avoid sleep in tests
def _entry_disposition ( response_headers , request_headers ) :
def _entry_disposition ( response_headers , request_headers ) :
""" Determine freshness from the Date, Expires and Cache-Control headers.
""" Determine freshness from the Date, Expires and Cache-Control headers.
@ -350,26 +385,26 @@ def _entry_disposition(response_headers, request_headers):
cc = _parse_cache_control ( request_headers )
cc = _parse_cache_control ( request_headers )
cc_response = _parse_cache_control ( response_headers )
cc_response = _parse_cache_control ( response_headers )
if request_headers . has_key ( ' pragma ' ) and request_headers [ ' pragma ' ] . lower ( ) . find ( ' no-cache ' ) != - 1 :
if ' pragma ' in request_headers and request_headers [ ' pragma ' ] . lower ( ) . find ( ' no-cache ' ) != - 1 :
retval = " TRANSPARENT "
retval = " TRANSPARENT "
if ' cache-control ' not in request_headers :
if ' cache-control ' not in request_headers :
request_headers [ ' cache-control ' ] = ' no-cache '
request_headers [ ' cache-control ' ] = ' no-cache '
elif cc . has_key ( ' no-cache ' ) :
elif ' no-cache ' in cc :
retval = " TRANSPARENT "
retval = " TRANSPARENT "
elif cc_response . has_key ( ' no-cache ' ) :
elif ' no-cache ' in cc_response :
retval = " STALE "
retval = " STALE "
elif cc . has_key ( ' only-if-cached ' ) :
elif ' only-if-cached ' in cc :
retval = " FRESH "
retval = " FRESH "
elif response_headers . has_key ( ' date ' ) :
elif ' date ' in response_headers :
date = calendar . timegm ( email . Utils . parsedate_tz ( response_headers [ ' date ' ] ) )
date = calendar . timegm ( email . Utils . parsedate_tz ( response_headers [ ' date ' ] ) )
now = time . time ( )
now = time . time ( )
current_age = max ( 0 , now - date )
current_age = max ( 0 , now - date )
if cc_response . has_key ( ' max-age ' ) :
if ' max-age ' in cc_response :
try :
try :
freshness_lifetime = int ( cc_response [ ' max-age ' ] )
freshness_lifetime = int ( cc_response [ ' max-age ' ] )
except ValueError :
except ValueError :
freshness_lifetime = 0
freshness_lifetime = 0
elif response_headers . has_key ( ' expires ' ) :
elif ' expires ' in response_headers :
expires = email . Utils . parsedate_tz ( response_headers [ ' expires ' ] )
expires = email . Utils . parsedate_tz ( response_headers [ ' expires ' ] )
if None == expires :
if None == expires :
freshness_lifetime = 0
freshness_lifetime = 0
@ -377,12 +412,12 @@ def _entry_disposition(response_headers, request_headers):
freshness_lifetime = max ( 0 , calendar . timegm ( expires ) - date )
freshness_lifetime = max ( 0 , calendar . timegm ( expires ) - date )
else :
else :
freshness_lifetime = 0
freshness_lifetime = 0
if cc . has_key ( ' max-age ' ) :
if ' max-age ' in cc :
try :
try :
freshness_lifetime = int ( cc [ ' max-age ' ] )
freshness_lifetime = int ( cc [ ' max-age ' ] )
except ValueError :
except ValueError :
freshness_lifetime = 0
freshness_lifetime = 0
if cc . has_key ( ' min-fresh ' ) :
if ' min-fresh ' in cc :
try :
try :
min_fresh = int ( cc [ ' min-fresh ' ] )
min_fresh = int ( cc [ ' min-fresh ' ] )
except ValueError :
except ValueError :
@ -392,6 +427,7 @@ def _entry_disposition(response_headers, request_headers):
retval = " FRESH "
retval = " FRESH "
return retval
return retval
def _decompressContent ( response , new_content ) :
def _decompressContent ( response , new_content ) :
content = new_content
content = new_content
try :
try :
@ -400,21 +436,22 @@ def _decompressContent(response, new_content):
if encoding == ' gzip ' :
if encoding == ' gzip ' :
content = gzip . GzipFile ( fileobj = StringIO . StringIO ( new_content ) ) . read ( )
content = gzip . GzipFile ( fileobj = StringIO . StringIO ( new_content ) ) . read ( )
if encoding == ' deflate ' :
if encoding == ' deflate ' :
content = zlib . decompress ( content )
content = zlib . decompress ( content , - zlib . MAX_WBITS )
response [ ' content-length ' ] = str ( len ( content ) )
response [ ' content-length ' ] = str ( len ( content ) )
# Record the historical presence of the encoding in a way the won't interfere.
# Record the historical presence of the encoding in a way the won't interfere.
response [ ' -content-encoding ' ] = response [ ' content-encoding ' ]
response [ ' -content-encoding ' ] = response [ ' content-encoding ' ]
del response [ ' content-encoding ' ]
del response [ ' content-encoding ' ]
except IOError :
except ( IOError , zlib . error ) :
content = " "
content = " "
raise FailedToDecompressContent ( _ ( " Content purported to be compressed with %s but failed to decompress. " ) % response . get ( ' content-encoding ' ) , response , content )
raise FailedToDecompressContent ( _ ( " Content purported to be compressed with %s but failed to decompress. " ) % response . get ( ' content-encoding ' ) , response , content )
return content
return content
def _updateCache ( request_headers , response_headers , content , cache , cachekey ) :
def _updateCache ( request_headers , response_headers , content , cache , cachekey ) :
if cachekey :
if cachekey :
cc = _parse_cache_control ( request_headers )
cc = _parse_cache_control ( request_headers )
cc_response = _parse_cache_control ( response_headers )
cc_response = _parse_cache_control ( response_headers )
if cc . has_key ( ' no-store ' ) or cc_response . has_key ( ' no-store ' ) :
if ' no-store ' in cc or ' no-store ' in cc_response :
cache . delete ( cachekey )
cache . delete ( cachekey )
else :
else :
info = email . Message . Message ( )
info = email . Message . Message ( )
@ -496,7 +533,6 @@ class Authentication(object):
return False
return False
class BasicAuthentication ( Authentication ) :
class BasicAuthentication ( Authentication ) :
def __init__ ( self , credentials , host , request_uri , headers , response , content , http ) :
def __init__ ( self , credentials , host , request_uri , headers , response , content , http ) :
Authentication . __init__ ( self , credentials , host , request_uri , headers , response , content , http )
Authentication . __init__ ( self , credentials , host , request_uri , headers , response , content , http )
@ -550,7 +586,7 @@ class DigestAuthentication(Authentication):
self . challenge [ ' nc ' ] + = 1
self . challenge [ ' nc ' ] + = 1
def response ( self , response , content ) :
def response ( self , response , content ) :
if not response . has_key ( ' authentication-info ' ) :
if ' authentication-info ' not in response :
challenge = _parse_www_authenticate ( response , ' www-authenticate ' ) . get ( ' digest ' , { } )
challenge = _parse_www_authenticate ( response , ' www-authenticate ' ) . get ( ' digest ' , { } )
if ' true ' == challenge . get ( ' stale ' ) :
if ' true ' == challenge . get ( ' stale ' ) :
self . challenge [ ' nonce ' ] = challenge [ ' nonce ' ]
self . challenge [ ' nonce ' ] = challenge [ ' nonce ' ]
@ -559,7 +595,7 @@ class DigestAuthentication(Authentication):
else :
else :
updated_challenge = _parse_www_authenticate ( response , ' authentication-info ' ) . get ( ' digest ' , { } )
updated_challenge = _parse_www_authenticate ( response , ' authentication-info ' ) . get ( ' digest ' , { } )
if updated_challenge . has_key ( ' nextnonce ' ) :
if ' nextnonce ' in updated_challenge :
self . challenge [ ' nonce ' ] = updated_challenge [ ' nextnonce ' ]
self . challenge [ ' nonce ' ] = updated_challenge [ ' nextnonce ' ]
self . challenge [ ' nc ' ] = 1
self . challenge [ ' nc ' ] = 1
return False
return False
@ -649,6 +685,7 @@ class WsseAuthentication(Authentication):
cnonce ,
cnonce ,
iso_now )
iso_now )
class GoogleLoginAuthentication ( Authentication ) :
class GoogleLoginAuthentication ( Authentication ) :
def __init__ ( self , credentials , host , request_uri , headers , response , content , http ) :
def __init__ ( self , credentials , host , request_uri , headers , response , content , http ) :
from urllib import urlencode
from urllib import urlencode
@ -688,12 +725,13 @@ AUTH_SCHEME_CLASSES = {
AUTH_SCHEME_ORDER = [ " hmacdigest " , " googlelogin " , " digest " , " wsse " , " basic " ]
AUTH_SCHEME_ORDER = [ " hmacdigest " , " googlelogin " , " digest " , " wsse " , " basic " ]
class FileCache ( object ) :
class FileCache ( object ) :
""" Uses a local directory as a store for cached files.
""" Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to
Not really safe to use if multiple threads or processes are going to
be running on the same cache .
be running on the same cache .
"""
"""
def __init__ ( self , cache , safe = safename ) : # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
def __init__ ( self , cache , safe = safename ) : # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self . cache = cache
self . cache = cache
self . safe = safe
self . safe = safe
if not os . path . exists ( cache ) :
if not os . path . exists ( cache ) :
@ -721,6 +759,7 @@ class FileCache(object):
if os . path . exists ( cacheFullPath ) :
if os . path . exists ( cacheFullPath ) :
os . remove ( cacheFullPath )
os . remove ( cacheFullPath )
class Credentials ( object ) :
class Credentials ( object ) :
def __init__ ( self ) :
def __init__ ( self ) :
self . credentials = [ ]
self . credentials = [ ]
@ -736,20 +775,23 @@ class Credentials(object):
if cdomain == " " or domain == cdomain :
if cdomain == " " or domain == cdomain :
yield ( name , password )
yield ( name , password )
class KeyCerts ( Credentials ) :
class KeyCerts ( Credentials ) :
""" Identical to Credentials except that
""" Identical to Credentials except that
name / password are mapped to key / cert . """
name / password are mapped to key / cert . """
pass
pass
class AllHosts ( object ) :
class AllHosts ( object ) :
pass
pass
class ProxyInfo ( object ) :
class ProxyInfo ( object ) :
""" Collect information required to use a proxy. """
""" Collect information required to use a proxy. """
bypass_hosts = ( )
bypass_hosts = ( )
def __init__ ( self , proxy_type , proxy_host , proxy_port ,
def __init__ ( self , proxy_type , proxy_host , proxy_port ,
proxy_rdns = True , proxy_user = None , proxy_pass = None ):
proxy_rdns = True , proxy_user = None , proxy_pass = None , proxy_headers = None ):
"""
"""
Args :
Args :
proxy_type : The type of proxy server . This must be set to one of
proxy_type : The type of proxy server . This must be set to one of
@ -770,6 +812,8 @@ class ProxyInfo(object):
proxy_user : The username used to authenticate with the proxy server .
proxy_user : The username used to authenticate with the proxy server .
proxy_pass : The password used to authenticate with the proxy server .
proxy_pass : The password used to authenticate with the proxy server .
proxy_headers : Additional or modified headers for the proxy connect request .
"""
"""
self . proxy_type = proxy_type
self . proxy_type = proxy_type
self . proxy_host = proxy_host
self . proxy_host = proxy_host
@ -777,10 +821,11 @@ class ProxyInfo(object):
self . proxy_rdns = proxy_rdns
self . proxy_rdns = proxy_rdns
self . proxy_user = proxy_user
self . proxy_user = proxy_user
self . proxy_pass = proxy_pass
self . proxy_pass = proxy_pass
self . proxy_headers = proxy_headers
def astuple ( self ) :
def astuple ( self ) :
return ( self . proxy_type , self . proxy_host , self . proxy_port ,
return ( self . proxy_type , self . proxy_host , self . proxy_port ,
self . proxy_rdns , self . proxy_user , self . proxy_pass )
self . proxy_rdns , self . proxy_user , self . proxy_pass , self . proxy_headers )
def isgood ( self ) :
def isgood ( self ) :
return ( self . proxy_host != None ) and ( self . proxy_port != None )
return ( self . proxy_host != None ) and ( self . proxy_port != None )
@ -793,12 +838,20 @@ class ProxyInfo(object):
if self . bypass_hosts is AllHosts :
if self . bypass_hosts is AllHosts :
return True
return True
bypass = False
hostname = ' . ' + hostname . lstrip ( ' . ' )
for domain in self . bypass_hosts :
for skip_name in self . bypass_hosts :
if hostname . endswith ( domain ) :
# *.suffix
bypass = True
if skip_name . startswith ( ' . ' ) and hostname . endswith ( skip_name ) :
return True
# exact match
if hostname == ' . ' + skip_name :
return True
return False
return bypass
def __repr__ ( self ) :
return (
' <ProxyInfo type= {p.proxy_type} host:port= {p.proxy_host} : {p.proxy_port} rdns= {p.proxy_rdns} ' +
' user= {p.proxy_user} headers= {p.proxy_headers} > ' ) . format ( p = self )
def proxy_info_from_environment ( method = ' http ' ) :
def proxy_info_from_environment ( method = ' http ' ) :
@ -812,20 +865,10 @@ def proxy_info_from_environment(method='http'):
url = os . environ . get ( env_var , os . environ . get ( env_var . upper ( ) ) )
url = os . environ . get ( env_var , os . environ . get ( env_var . upper ( ) ) )
if not url :
if not url :
return
return
pi = proxy_info_from_url ( url , method )
return proxy_info_from_url ( url , method , None )
no_proxy = os . environ . get ( ' no_proxy ' , os . environ . get ( ' NO_PROXY ' , ' ' ) )
bypass_hosts = [ ]
if no_proxy :
bypass_hosts = no_proxy . split ( ' , ' )
# special case, no_proxy=* means all hosts bypassed
if no_proxy == ' * ' :
bypass_hosts = AllHosts
pi . bypass_hosts = bypass_hosts
def proxy_info_from_url ( url , method = ' http ' , noproxy = None ) :
return pi
def proxy_info_from_url ( url , method = ' http ' ) :
"""
"""
Construct a ProxyInfo from a URL ( such as http_proxy env var )
Construct a ProxyInfo from a URL ( such as http_proxy env var )
"""
"""
@ -851,15 +894,30 @@ def proxy_info_from_url(url, method='http'):
else :
else :
port = dict ( https = 443 , http = 80 ) [ method ]
port = dict ( https = 443 , http = 80 ) [ method ]
proxy_type = 3 # socks.PROXY_TYPE_HTTP
proxy_type = 3 # socks.PROXY_TYPE_HTTP
return ProxyInfo (
pi = ProxyInfo (
proxy_type = proxy_type ,
proxy_type = proxy_type ,
proxy_host = host ,
proxy_host = host ,
proxy_port = port ,
proxy_port = port ,
proxy_user = username or None ,
proxy_user = username or None ,
proxy_pass = password or None ,
proxy_pass = password or None ,
proxy_headers = None ,
)
)
bypass_hosts = [ ]
# If not given an explicit noproxy value, respect values in env vars.
if noproxy is None :
noproxy = os . environ . get ( ' no_proxy ' , os . environ . get ( ' NO_PROXY ' , ' ' ) )
# Special case: A single '*' character means all hosts should be bypassed.
if noproxy == ' * ' :
bypass_hosts = AllHosts
elif noproxy . strip ( ) :
bypass_hosts = noproxy . split ( ' , ' )
bypass_hosts = filter ( bool , bypass_hosts ) # To exclude empty string.
pi . bypass_hosts = bypass_hosts
return pi
class HTTPConnectionWithTimeout ( httplib . HTTPConnection ) :
class HTTPConnectionWithTimeout ( httplib . HTTPConnection ) :
"""
"""
@ -885,7 +943,7 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
msg = " getaddrinfo returns an empty list "
msg = " getaddrinfo returns an empty list "
if self . proxy_info and self . proxy_info . isgood ( ) :
if self . proxy_info and self . proxy_info . isgood ( ) :
use_proxy = True
use_proxy = True
proxy_type , proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass = self . proxy_info . astuple ( )
proxy_type , proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass , proxy_headers = self . proxy_info . astuple ( )
host = proxy_host
host = proxy_host
port = proxy_port
port = proxy_port
@ -900,7 +958,7 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
try :
try :
if use_proxy :
if use_proxy :
self . sock = socks . socksocket ( af , socktype , proto )
self . sock = socks . socksocket ( af , socktype , proto )
self . sock . setproxy ( proxy_type , proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass )
self . sock . setproxy ( proxy_type , proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass , proxy_headers )
else :
else :
self . sock = socket . socket ( af , socktype , proto )
self . sock = socket . socket ( af , socktype , proto )
self . sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , 1 )
self . sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , 1 )
@ -909,16 +967,18 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
self . sock . settimeout ( self . timeout )
self . sock . settimeout ( self . timeout )
# End of difference from httplib.
# End of difference from httplib.
if self . debuglevel > 0 :
if self . debuglevel > 0 :
print " connect: ( %s , %s ) ************ " % ( self . host , self . port )
print ( " connect: ( %s , %s ) ************ " % ( self . host , self . port ) )
if use_proxy :
if use_proxy :
print " proxy: %s ************ " % str ( ( proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass ) )
print ( " proxy: %s ************ " % str ( ( proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass , proxy_headers ) ) )
if use_proxy :
self . sock . connect ( ( self . host , self . port ) + sa [ 2 : ] )
self . sock . connect ( ( self . host , self . port ) + sa [ 2 : ] )
except socket . error , msg :
else :
self . sock . connect ( sa )
except socket . error as msg :
if self . debuglevel > 0 :
if self . debuglevel > 0 :
print " connect fail: ( %s , %s ) " % ( self . host , self . port )
print ( " connect fail: ( %s , %s ) " % ( self . host , self . port ) )
if use_proxy :
if use_proxy :
print " proxy: %s " % str ( ( proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass ) )
print ( " proxy: %s " % str ( ( proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass , proxy_headers ) ) )
if self . sock :
if self . sock :
self . sock . close ( )
self . sock . close ( )
self . sock = None
self . sock = None
@ -927,6 +987,7 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
if not self . sock :
if not self . sock :
raise socket . error , msg
raise socket . error , msg
class HTTPSConnectionWithTimeout ( httplib . HTTPSConnection ) :
class HTTPSConnectionWithTimeout ( httplib . HTTPSConnection ) :
"""
"""
This class allows communication via SSL .
This class allows communication via SSL .
@ -938,7 +999,8 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
"""
"""
def __init__ ( self , host , port = None , key_file = None , cert_file = None ,
def __init__ ( self , host , port = None , key_file = None , cert_file = None ,
strict = None , timeout = None , proxy_info = None ,
strict = None , timeout = None , proxy_info = None ,
ca_certs = None , disable_ssl_certificate_validation = False ) :
ca_certs = None , disable_ssl_certificate_validation = False ,
ssl_version = None ) :
httplib . HTTPSConnection . __init__ ( self , host , port = port ,
httplib . HTTPSConnection . __init__ ( self , host , port = port ,
key_file = key_file ,
key_file = key_file ,
cert_file = cert_file , strict = strict )
cert_file = cert_file , strict = strict )
@ -949,6 +1011,7 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
self . ca_certs = ca_certs
self . ca_certs = ca_certs
self . disable_ssl_certificate_validation = \
self . disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
disable_ssl_certificate_validation
self . ssl_version = ssl_version
# The following two methods were adapted from https_wrapper.py, released
# The following two methods were adapted from https_wrapper.py, released
# with the Google Appengine SDK at
# with the Google Appengine SDK at
@ -1007,7 +1070,7 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
msg = " getaddrinfo returns an empty list "
msg = " getaddrinfo returns an empty list "
if self . proxy_info and self . proxy_info . isgood ( ) :
if self . proxy_info and self . proxy_info . isgood ( ) :
use_proxy = True
use_proxy = True
proxy_type , proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass = self . proxy_info . astuple ( )
proxy_type , proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass , proxy_headers = self . proxy_info . astuple ( )
host = proxy_host
host = proxy_host
port = proxy_port
port = proxy_port
@ -1023,21 +1086,26 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
if use_proxy :
if use_proxy :
sock = socks . socksocket ( family , socktype , proto )
sock = socks . socksocket ( family , socktype , proto )
sock . setproxy ( proxy_type , proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass )
sock . setproxy ( proxy_type , proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass , proxy_headers )
else :
else :
sock = socket . socket ( family , socktype , proto )
sock = socket . socket ( family , socktype , proto )
sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , 1 )
sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , 1 )
if has_timeout ( self . timeout ) :
if has_timeout ( self . timeout ) :
sock . settimeout ( self . timeout )
sock . settimeout ( self . timeout )
sock . connect ( ( self . host , self . port ) )
if use_proxy :
sock . connect ( ( self . host , self . port ) + sockaddr [ : 2 ] )
else :
sock . connect ( sockaddr )
self . sock = _ssl_wrap_socket (
self . sock = _ssl_wrap_socket (
sock , self . key_file , self . cert_file ,
sock , self . key_file , self . cert_file ,
self . disable_ssl_certificate_validation , self . ca_certs )
self . disable_ssl_certificate_validation , self . ca_certs ,
self . ssl_version , self . host )
if self . debuglevel > 0 :
if self . debuglevel > 0 :
print " connect: ( %s , %s ) " % ( self . host , self . port )
print ( " connect: ( %s , %s ) " % ( self . host , self . port ) )
if use_proxy :
if use_proxy :
print " proxy: %s " % str ( ( proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass ) )
print ( " proxy: %s " % str ( ( proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass , proxy_headers ) ) )
if not self . disable_ssl_certificate_validation :
if not self . disable_ssl_certificate_validation :
cert = self . sock . getpeercert ( )
cert = self . sock . getpeercert ( )
hostname = self . host . split ( ' : ' , 0 ) [ 0 ]
hostname = self . host . split ( ' : ' , 0 ) [ 0 ]
@ -1045,7 +1113,7 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
raise CertificateHostnameMismatch (
raise CertificateHostnameMismatch (
' Server presented certificate that does not match '
' Server presented certificate that does not match '
' host %s : %s ' % ( hostname , cert ) , hostname , cert )
' host %s : %s ' % ( hostname , cert ) , hostname , cert )
except ssl_SSLError , e :
except ( ssl_SSLError , ssl_CertificateError , CertificateHostnameMismatch ) as e :
if sock :
if sock :
sock . close ( )
sock . close ( )
if self . sock :
if self . sock :
@ -1055,17 +1123,17 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
# to get at more detailed error information, in particular
# to get at more detailed error information, in particular
# whether the error is due to certificate validation or
# whether the error is due to certificate validation or
# something else (such as SSL protocol mismatch).
# something else (such as SSL protocol mismatch).
if e . errno == ssl . SSL_ERROR_SSL :
if getattr ( e , ' errno ' , None ) == ssl . SSL_ERROR_SSL :
raise SSLHandshakeError ( e )
raise SSLHandshakeError ( e )
else :
else :
raise
raise
except ( socket . timeout , socket . gaierror ) :
except ( socket . timeout , socket . gaierror ) :
raise
raise
except socket . error , msg :
except socket . error as msg :
if self . debuglevel > 0 :
if self . debuglevel > 0 :
print " connect fail: ( %s , %s ) " % ( self . host , self . port )
print ( " connect fail: ( %s , %s ) " % ( self . host , self . port ) )
if use_proxy :
if use_proxy :
print " proxy: %s " % str ( ( proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass ) )
print ( " proxy: %s " % str ( ( proxy_host , proxy_port , proxy_rdns , proxy_user , proxy_pass , proxy_headers ) ) )
if self . sock :
if self . sock :
self . sock . close ( )
self . sock . close ( )
self . sock = None
self . sock = None
@ -1079,63 +1147,73 @@ SCHEME_TO_CONNECTION = {
' https ' : HTTPSConnectionWithTimeout
' https ' : HTTPSConnectionWithTimeout
}
}
def _new_fixed_fetch ( validate_certificate ) :
def fixed_fetch ( url , payload = None , method = " GET " , headers = { } ,
allow_truncated = False , follow_redirects = True ,
deadline = None ) :
if deadline is None :
deadline = socket . getdefaulttimeout ( )
return fetch ( url , payload = payload , method = method , headers = headers ,
allow_truncated = allow_truncated ,
follow_redirects = follow_redirects , deadline = deadline ,
validate_certificate = validate_certificate )
return fixed_fetch
class AppEngineHttpConnection ( httplib . HTTPConnection ) :
""" Use httplib on App Engine, but compensate for its weirdness.
The parameters key_file , cert_file , proxy_info , ca_certs ,
disable_ssl_certificate_validation , and ssl_version are all dropped on
the ground .
"""
def __init__ ( self , host , port = None , key_file = None , cert_file = None ,
strict = None , timeout = None , proxy_info = None , ca_certs = None ,
disable_ssl_certificate_validation = False ,
ssl_version = None ) :
httplib . HTTPConnection . __init__ ( self , host , port = port ,
strict = strict , timeout = timeout )
class AppEngineHttpsConnection ( httplib . HTTPSConnection ) :
""" Same as AppEngineHttpConnection, but for HTTPS URIs.
The parameters proxy_info , ca_certs , disable_ssl_certificate_validation ,
and ssl_version are all dropped on the ground .
"""
def __init__ ( self , host , port = None , key_file = None , cert_file = None ,
strict = None , timeout = None , proxy_info = None , ca_certs = None ,
disable_ssl_certificate_validation = False ,
ssl_version = None ) :
httplib . HTTPSConnection . __init__ ( self , host , port = port ,
key_file = key_file ,
cert_file = cert_file , strict = strict ,
timeout = timeout )
self . _fetch = _new_fixed_fetch (
not disable_ssl_certificate_validation )
# Use a different connection object for Google App Engine
# Use a different connection object for Google App Engine
try :
try :
try :
server_software = os . environ . get ( ' SERVER_SOFTWARE ' )
from google . appengine . api import apiproxy_stub_map
if not server_software :
if apiproxy_stub_map . apiproxy . GetStub ( ' urlfetch ' ) is None :
raise NotRunningAppEngineEnvironment ( )
raise ImportError # Bail out; we're not actually running on App Engine.
elif not ( server_software . startswith ( ' Google App Engine/ ' ) or
from google . appengine . api . urlfetch import fetch
server_software . startswith ( ' Development/ ' ) ) :
from google . appengine . api . urlfetch import InvalidURLError
raise NotRunningAppEngineEnvironment ( )
except ( ImportError , AttributeError ) :
from google3 . apphosting . api import apiproxy_stub_map
from google . appengine . api import apiproxy_stub_map
if apiproxy_stub_map . apiproxy . GetStub ( ' urlfetch ' ) is None :
if apiproxy_stub_map . apiproxy . GetStub ( ' urlfetch ' ) is None :
raise ImportError # Bail out; we're not actually running on App Engine.
raise ImportError # Bail out; we're not actually running on App Engine.
from google3 . apphosting . api . urlfetch import fetch
from google . appengine . api . urlfetch import fetch
from google3 . apphosting . api . urlfetch import InvalidURLError
from google . appengine . api . urlfetch import InvalidURLError
def _new_fixed_fetch ( validate_certificate ) :
def fixed_fetch ( url , payload = None , method = " GET " , headers = { } ,
allow_truncated = False , follow_redirects = True ,
deadline = None ) :
if deadline is None :
deadline = socket . getdefaulttimeout ( ) or 5
return fetch ( url , payload = payload , method = method , headers = headers ,
allow_truncated = allow_truncated ,
follow_redirects = follow_redirects , deadline = deadline ,
validate_certificate = validate_certificate )
return fixed_fetch
class AppEngineHttpConnection ( httplib . HTTPConnection ) :
""" Use httplib on App Engine, but compensate for its weirdness.
The parameters key_file , cert_file , proxy_info , ca_certs , and
disable_ssl_certificate_validation are all dropped on the ground .
"""
def __init__ ( self , host , port = None , key_file = None , cert_file = None ,
strict = None , timeout = None , proxy_info = None , ca_certs = None ,
disable_ssl_certificate_validation = False ) :
httplib . HTTPConnection . __init__ ( self , host , port = port ,
strict = strict , timeout = timeout )
class AppEngineHttpsConnection ( httplib . HTTPSConnection ) :
""" Same as AppEngineHttpConnection, but for HTTPS URIs. """
def __init__ ( self , host , port = None , key_file = None , cert_file = None ,
strict = None , timeout = None , proxy_info = None , ca_certs = None ,
disable_ssl_certificate_validation = False ) :
httplib . HTTPSConnection . __init__ ( self , host , port = port ,
key_file = key_file ,
cert_file = cert_file , strict = strict ,
timeout = timeout )
self . _fetch = _new_fixed_fetch (
not disable_ssl_certificate_validation )
# Update the connection classes to use the Googel App Engine specific ones.
# Update the connection classes to use the Googel App Engine specific ones.
SCHEME_TO_CONNECTION = {
SCHEME_TO_CONNECTION = {
' http ' : AppEngineHttpConnection ,
' http ' : AppEngineHttpConnection ,
' https ' : AppEngineHttpsConnection
' https ' : AppEngineHttpsConnection
}
}
except ( ImportError , AttributeError ):
except ( ImportError , AttributeError , NotRunningAppEngineEnvironment ) :
pass
pass
@ -1155,7 +1233,8 @@ class Http(object):
"""
"""
def __init__ ( self , cache = None , timeout = None ,
def __init__ ( self , cache = None , timeout = None ,
proxy_info = proxy_info_from_environment ,
proxy_info = proxy_info_from_environment ,
ca_certs = None , disable_ssl_certificate_validation = False ) :
ca_certs = None , disable_ssl_certificate_validation = False ,
ssl_version = None ) :
""" If ' cache ' is a string then it is used as a directory name for
""" If ' cache ' is a string then it is used as a directory name for
a disk cache . Otherwise it must be an object that supports the
a disk cache . Otherwise it must be an object that supports the
same interface as FileCache .
same interface as FileCache .
@ -1178,11 +1257,14 @@ class Http(object):
If disable_ssl_certificate_validation is true , SSL cert validation will
If disable_ssl_certificate_validation is true , SSL cert validation will
not be performed .
not be performed .
By default , ssl . PROTOCOL_SSLv23 will be used for the ssl version .
"""
"""
self . proxy_info = proxy_info
self . proxy_info = proxy_info
self . ca_certs = ca_certs
self . ca_certs = ca_certs
self . disable_ssl_certificate_validation = \
self . disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
disable_ssl_certificate_validation
self . ssl_version = ssl_version
# Map domain name to an httplib connection
# Map domain name to an httplib connection
self . connections = { }
self . connections = { }
@ -1243,7 +1325,7 @@ class Http(object):
challenges = _parse_www_authenticate ( response , ' www-authenticate ' )
challenges = _parse_www_authenticate ( response , ' www-authenticate ' )
for cred in self . credentials . iter ( host ) :
for cred in self . credentials . iter ( host ) :
for scheme in AUTH_SCHEME_ORDER :
for scheme in AUTH_SCHEME_ORDER :
if challenges. has_key ( scheme ) :
if scheme in challenges :
yield AUTH_SCHEME_CLASSES [ scheme ] ( cred , host , request_uri , headers , response , content , self )
yield AUTH_SCHEME_CLASSES [ scheme ] ( cred , host , request_uri , headers , response , content , self )
def add_credentials ( self , name , password , domain = " " ) :
def add_credentials ( self , name , password , domain = " " ) :
@ -1279,15 +1361,16 @@ class Http(object):
except ssl_SSLError :
except ssl_SSLError :
conn . close ( )
conn . close ( )
raise
raise
except socket . error , e :
except socket . error as e :
err = 0
err = 0
if hasattr ( e , ' args ' ) :
if hasattr ( e , ' args ' ) :
err = getattr ( e , ' args ' ) [ 0 ]
err = getattr ( e , ' args ' ) [ 0 ]
else :
else :
err = e . errno
err = e . errno
if err == errno . ECONNREFUSED : # Connection refused
raise
if err in ( errno . ENETUNREACH , errno . EADDRNOTAVAIL ) and i < RETRIES :
if err in ( errno . ENETUNREACH , errno . EADDRNOTAVAIL ) and i < RETRIES :
continue # retry on potentially transient socket errors
continue # retry on potentially transient socket errors
raise
except httplib . HTTPException :
except httplib . HTTPException :
# Just because the server closed the connection doesn't apparently mean
# Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response.
# that the server didn't send a response.
@ -1370,29 +1453,29 @@ class Http(object):
# Pick out the location header and basically start from the beginning
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
# remembering first to strip the ETag header and decrement our 'depth'
if redirections :
if redirections :
if not response . has_key ( ' location ' ) and response . status != 300 :
if ' location ' not in response and response . status != 300 :
raise RedirectMissingLocation ( _ ( " Redirected but the response is missing a Location: header. " ) , response , content )
raise RedirectMissingLocation ( _ ( " Redirected but the response is missing a Location: header. " ) , response , content )
# Fix-up relative redirects (which violate an RFC 2616 MUST)
# Fix-up relative redirects (which violate an RFC 2616 MUST)
if response . has_key ( ' location ' ) :
if ' location ' in response :
location = response [ ' location ' ]
location = response [ ' location ' ]
( scheme , authority , path , query , fragment ) = parse_uri ( location )
( scheme , authority , path , query , fragment ) = parse_uri ( location )
if authority == None :
if authority == None :
response [ ' location ' ] = urlparse . urljoin ( absolute_uri , location )
response [ ' location ' ] = urlparse . urljoin ( absolute_uri , location )
if response . status == 301 and method in [ " GET " , " HEAD " ] :
if response . status == 301 and method in [ " GET " , " HEAD " ] :
response [ ' -x-permanent-redirect-url ' ] = response [ ' location ' ]
response [ ' -x-permanent-redirect-url ' ] = response [ ' location ' ]
if not response . has_key ( ' content-location ' ) :
if ' content-location ' not in response :
response [ ' content-location ' ] = absolute_uri
response [ ' content-location ' ] = absolute_uri
_updateCache ( headers , response , content , self . cache , cachekey )
_updateCache ( headers , response , content , self . cache , cachekey )
if headers . has_key ( ' if-none-match ' ) :
if ' if-none-match ' in headers :
del headers [ ' if-none-match ' ]
del headers [ ' if-none-match ' ]
if headers . has_key ( ' if-modified-since ' ) :
if ' if-modified-since ' in headers :
del headers [ ' if-modified-since ' ]
del headers [ ' if-modified-since ' ]
if ' authorization ' in headers and not self . forward_authorization_headers :
if ' authorization ' in headers and not self . forward_authorization_headers :
del headers [ ' authorization ' ]
del headers [ ' authorization ' ]
if response . has_key ( ' location ' ) :
if ' location ' in response :
location = response [ ' location ' ]
location = response [ ' location ' ]
old_response = copy . deepcopy ( response )
old_response = copy . deepcopy ( response )
if not old_response . has_key ( ' content-location ' ) :
if ' content-location ' not in old_response :
old_response [ ' content-location ' ] = absolute_uri
old_response [ ' content-location ' ] = absolute_uri
redirect_method = method
redirect_method = method
if response . status in [ 302 , 303 ] :
if response . status in [ 302 , 303 ] :
@ -1407,7 +1490,7 @@ class Http(object):
raise RedirectLimit ( " Redirected more times than rediection_limit allows. " , response , content )
raise RedirectLimit ( " Redirected more times than rediection_limit allows. " , response , content )
elif response . status in [ 200 , 203 ] and method in [ " GET " , " HEAD " ] :
elif response . status in [ 200 , 203 ] and method in [ " GET " , " HEAD " ] :
# Don't cache 206's since we aren't going to handle byte range requests
# Don't cache 206's since we aren't going to handle byte range requests
if not response . has_key ( ' content-location ' ) :
if ' content-location ' not in response :
response [ ' content-location ' ] = absolute_uri
response [ ' content-location ' ] = absolute_uri
_updateCache ( headers , response , content , self . cache , cachekey )
_updateCache ( headers , response , content , self . cache , cachekey )
@ -1449,7 +1532,7 @@ class Http(object):
else :
else :
headers = self . _normalize_headers ( headers )
headers = self . _normalize_headers ( headers )
if not headers . has_key ( ' user-agent ' ) :
if ' user-agent ' not in headers :
headers [ ' user-agent ' ] = " Python-httplib2/ %s (gzip) " % __version__
headers [ ' user-agent ' ] = " Python-httplib2/ %s (gzip) " % __version__
uri = iri2uri ( uri )
uri = iri2uri ( uri )
@ -1477,14 +1560,16 @@ class Http(object):
proxy_info = proxy_info ,
proxy_info = proxy_info ,
ca_certs = self . ca_certs ,
ca_certs = self . ca_certs ,
disable_ssl_certificate_validation =
disable_ssl_certificate_validation =
self . disable_ssl_certificate_validation )
self . disable_ssl_certificate_validation ,
ssl_version = self . ssl_version )
else :
else :
conn = self . connections [ conn_key ] = connection_type (
conn = self . connections [ conn_key ] = connection_type (
authority , timeout = self . timeout ,
authority , timeout = self . timeout ,
proxy_info = proxy_info ,
proxy_info = proxy_info ,
ca_certs = self . ca_certs ,
ca_certs = self . ca_certs ,
disable_ssl_certificate_validation =
disable_ssl_certificate_validation =
self . disable_ssl_certificate_validation )
self . disable_ssl_certificate_validation ,
ssl_version = self . ssl_version )
else :
else :
conn = self . connections [ conn_key ] = connection_type (
conn = self . connections [ conn_key ] = connection_type (
authority , timeout = self . timeout ,
authority , timeout = self . timeout ,
@ -1518,7 +1603,7 @@ class Http(object):
else :
else :
cachekey = None
cachekey = None
if method in self . optimistic_concurrency_methods and self . cache and info . has_key ( ' etag ' ) and not self . ignore_etag and ' if-match ' not in headers :
if method in self . optimistic_concurrency_methods and self . cache and ' etag ' in info and not self . ignore_etag and ' if-match ' not in headers :
# http://www.w3.org/1999/04/Editing/
# http://www.w3.org/1999/04/Editing/
headers [ ' if-match ' ] = info [ ' etag ' ]
headers [ ' if-match ' ] = info [ ' etag ' ]
@ -1539,7 +1624,7 @@ class Http(object):
break
break
if cached_value and method in [ " GET " , " HEAD " ] and self . cache and ' range ' not in headers :
if cached_value and method in [ " GET " , " HEAD " ] and self . cache and ' range ' not in headers :
if info . has_key ( ' -x-permanent-redirect-url ' ) :
if ' -x-permanent-redirect-url ' in info :
# Should cached permanent redirects be counted in our redirection count? For now, yes.
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections < = 0 :
if redirections < = 0 :
raise RedirectLimit ( " Redirected more times than rediection_limit allows. " , { } , " " )
raise RedirectLimit ( " Redirected more times than rediection_limit allows. " , { } , " " )
@ -1569,9 +1654,9 @@ class Http(object):
return ( response , content )
return ( response , content )
if entry_disposition == " STALE " :
if entry_disposition == " STALE " :
if info . has_key ( ' etag ' ) and not self . ignore_etag and not ' if-none-match ' in headers :
if ' etag ' in info and not self . ignore_etag and not ' if-none-match ' in headers :
headers [ ' if-none-match ' ] = info [ ' etag ' ]
headers [ ' if-none-match ' ] = info [ ' etag ' ]
if info . has_key ( ' last-modified ' ) and not ' last-modified ' in headers :
if ' last-modified ' in info and not ' last-modified ' in headers :
headers [ ' if-modified-since ' ] = info [ ' last-modified ' ]
headers [ ' if-modified-since ' ] = info [ ' last-modified ' ]
elif entry_disposition == " TRANSPARENT " :
elif entry_disposition == " TRANSPARENT " :
pass
pass
@ -1601,13 +1686,13 @@ class Http(object):
content = new_content
content = new_content
else :
else :
cc = _parse_cache_control ( headers )
cc = _parse_cache_control ( headers )
if cc . has_key ( ' only-if-cached ' ) :
if ' only-if-cached ' in cc :
info [ ' status ' ] = ' 504 '
info [ ' status ' ] = ' 504 '
response = Response ( info )
response = Response ( info )
content = " "
content = " "
else :
else :
( response , content ) = self . _request ( conn , authority , uri , request_uri , method , body , headers , redirections , cachekey )
( response , content ) = self . _request ( conn , authority , uri , request_uri , method , body , headers , redirections , cachekey )
except Exception , e :
except Exception as e :
if self . force_exception_to_status_code :
if self . force_exception_to_status_code :
if isinstance ( e , HttpLib2ErrorWithResponse ) :
if isinstance ( e , HttpLib2ErrorWithResponse ) :
response = e . response
response = e . response
@ -1688,9 +1773,8 @@ class Response(dict):
self . status = int ( self . get ( ' status ' , self . status ) )
self . status = int ( self . get ( ' status ' , self . status ) )
self . reason = self . get ( ' reason ' , self . reason )
self . reason = self . get ( ' reason ' , self . reason )
def __getattr__ ( self , name ) :
def __getattr__ ( self , name ) :
if name == ' dict ' :
if name == ' dict ' :
return self
return self
else :
else :
raise AttributeError , name
raise AttributeError (name )