aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHaibo Huang <hhb@google.com>2019-02-01 23:51:22 -0800
committerHaibo Huang <hhb@google.com>2019-02-20 21:55:29 +0000
commit993dd23223e5e13e810289bf2ddc25ae59287630 (patch)
tree554d1e572b72a17543956dbb9baaab3cee24411d
parent636f3149dd1af444c613da333ff067d83cf9c3b3 (diff)
parent86c3b4ea8614cbbddd86fc556781f2f79cd7c2ac (diff)
downloadplatform_external_python_httplib2-993dd23223e5e13e810289bf2ddc25ae59287630.tar.gz
platform_external_python_httplib2-993dd23223e5e13e810289bf2ddc25ae59287630.tar.bz2
platform_external_python_httplib2-993dd23223e5e13e810289bf2ddc25ae59287630.zip
Upgrade python/httplib2 to v0.12.0
Test: build Exempt-From-Owner-Approval: owner can not +2 Change-Id: I7dcbc632d38175ef3e8a60584b085b98fbd9a004
-rw-r--r--.gitignore58
-rw-r--r--.travis.yml1
-rw-r--r--CHANGELOG19
-rw-r--r--MANIFEST.in1
-rw-r--r--METADATA13
-rw-r--r--Makefile4
-rwxr-xr-xdoc/conf.py262
-rw-r--r--index.html2
-rw-r--r--python2/httplib2/__init__.py1471
-rw-r--r--python2/httplib2/certs.py42
-rw-r--r--python2/httplib2/iri2uri.py57
-rw-r--r--python2/httplib2/socks.py170
-rw-r--r--python2/httplib2/test/functional/test_proxies.py52
-rw-r--r--python2/httplib2/test/miniserver.py15
-rw-r--r--python2/httplib2/test/smoke_test.py8
-rw-r--r--python2/httplib2/test/test_no_socket.py9
-rw-r--r--python2/httplib2/test/test_ssl_context.py23
-rwxr-xr-xpython2/httplib2test.py1236
-rwxr-xr-xpython2/httplib2test_appengine.py108
-rwxr-xr-xpython2/ssl_protocol_test.py100
-rw-r--r--python3/httplib2/__init__.py1373
-rw-r--r--python3/httplib2/certs.py42
-rw-r--r--python3/httplib2/iri2uri.py234
-rw-r--r--python3/httplib2/socks.py182
-rwxr-xr-xpython3/httplib2test.py3569
-rw-r--r--requirements-test.txt1
-rwxr-xr-xsetup.py77
-rw-r--r--tests/__init__.py348
-rw-r--r--tests/test_auth.py329
-rw-r--r--tests/test_cacerts_from_env.py72
-rw-r--r--tests/test_cache.py317
-rw-r--r--tests/test_encoding.py62
-rw-r--r--tests/test_external.py41
-rw-r--r--tests/test_http.py411
-rw-r--r--tests/test_other.py100
-rw-r--r--tests/test_proxy.py142
-rw-r--r--tests/test_uri.py141
37 files changed, 6636 insertions, 4456 deletions
diff --git a/.gitignore b/.gitignore
index 0a800f2..af48444 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,42 +1,30 @@
-*.py[cod]
-venv*/
-
-# C extensions
-*.so
-
-# Packages
*.egg
*.egg-info
-dist
-build
-eggs
-parts
+*.mo
+*.py[cod]
+*.so
+.DS_Store
+.cache/
+.coverage
+.hg
+.installed.cfg
+.mr.developer.cfg
+.project
+.pydevproject
+.pytest_cache/*
+.tox
+_httplib2_test_cache/*
bin
-var
-sdist
+build
+default.profraw
develop-eggs
-.installed.cfg
+dist
+eggs
lib
lib64
-
-# Installer logs
-pip-log.txt
-
-# Unit test / coverage reports
-.coverage
-.tox
nosetests.xml
-
-# Translations
-*.mo
-
-# Mr Developer
-.mr.developer.cfg
-.project
-.pydevproject
-
-# Mercurial
-.hg
-
-# httplib2
-.cache
+parts
+pip-log.txt
+sdist
+var
+venv*/
diff --git a/.travis.yml b/.travis.yml
index 259a27f..8e0a165 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -13,7 +13,6 @@ env:
- pip_install_common='pip>=9.0 setuptools>=36.2 wheel>=0.30'
python:
- 2.7
- - 3.3
- 3.4
- 3.5
- 3.6
diff --git a/CHANGELOG b/CHANGELOG
index c04f914..fd29f9f 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,3 +1,22 @@
+0.12.0
+
+ Drop support for Python 3.3
+
+ ca_certs from environment HTTPLIB2_CA_CERTS or certifi
+ https://github.com/httplib2/httplib2/pull/117
+
+ PROXY_TYPE_HTTP with non-empty user/pass raised TypeError: bytes required
+ https://github.com/httplib2/httplib2/pull/115
+
+ Revert http:443->https workaround
+ https://github.com/httplib2/httplib2/issues/112
+
+ eliminate connection pool read race
+ https://github.com/httplib2/httplib2/pull/110
+
+ cache: stronger safename
+ https://github.com/httplib2/httplib2/pull/101
+
0.11.3
No changes, just reupload of 0.11.2 after fixing automatic release conditions in Travis.
diff --git a/MANIFEST.in b/MANIFEST.in
index b8bb4a9..12c4cc7 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,3 +1,4 @@
recursive-include python2 *.py *.txt
recursive-include python3 *.py *.txt
include python2/httplib2/test/*.txt
+include requirements*.txt
diff --git a/METADATA b/METADATA
index 57f8ac2..367afc4 100644
--- a/METADATA
+++ b/METADATA
@@ -1,8 +1,5 @@
name: "httplib2"
-description:
- "httplib2 is a comprehensive HTTP client library, httplib2.py supports "
- "many features left out of other HTTP libraries."
-
+description: "httplib2 is a comprehensive HTTP client library, httplib2.py supports many features left out of other HTTP libraries."
third_party {
url {
type: HOMEPAGE
@@ -12,6 +9,10 @@ third_party {
type: GIT
value: "https://github.com/httplib2/httplib2/"
}
- version: "0.11.3"
- last_upgrade_date { year: 2018 month: 6 day: 4 }
+ version: "v0.12.0"
+ last_upgrade_date {
+ year: 2019
+ month: 2
+ day: 1
+ }
}
diff --git a/Makefile b/Makefile
index ed8c013..96fabaa 100644
--- a/Makefile
+++ b/Makefile
@@ -5,11 +5,11 @@ tests:
-cd python2 && python2.6 httplib2test_appengine.py
cd python2 && python2.7 httplib2test.py
cd python2 && python2.7 httplib2test_appengine.py
- cd python3 && python3.2 httplib2test.py
+ cd python3 && python3 httplib2test.py
VERSION = $(shell python setup.py --version)
INLINE_VERSION = $(shell cd python2; python -c "import httplib2;print httplib2.__version__")
-INLINE_VERSION_3 = $(shell cd python3; ~/bin/python3.2 -c "import httplib2;print(httplib2.__version__)")
+INLINE_VERSION_3 = $(shell cd python3; ~/bin/python3 -c "import httplib2;print(httplib2.__version__)")
DST = dist/httplib2-$(VERSION)
release:
diff --git a/doc/conf.py b/doc/conf.py
index b5622b4..e6eda6d 100755
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -1,132 +1,130 @@
-# -*- coding: utf-8 -*-
-#
-# httplib2 documentation build configuration file, created by
-# sphinx-quickstart on Thu Mar 27 16:07:14 2008.
-#
-# This file is execfile()d with the current directory set to its containing dir.
-#
-# The contents of this file are pickled, so don't put values in the namespace
-# that aren't pickleable (module imports are okay, they're removed automatically).
-#
-# All configuration values have a default value; values that are commented out
-# serve to show the default value.
-
-import sys
-
-# If your extensions are in another directory, add it here.
-#sys.path.append('some/directory')
-
-# General configuration
-# ---------------------
-
-# Add any Sphinx extension module names here, as strings. They can be extensions
-# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-#extensions = []
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['.templates']
-
-# The suffix of source filenames.
-source_suffix = '.rst'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General substitutions.
-project = 'httplib2'
-copyright = '2008, Joe Gregorio'
-
-# The default replacements for |version| and |release|, also used in various
-# other places throughout the built documents.
-#
-# The short X.Y version.
-version = '0.4'
-# The full version, including alpha/beta/rc tags.
-release = '0.4'
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-today_fmt = '%B %d, %Y'
-
-# List of documents that shouldn't be included in the build.
-#unused_docs = []
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-
-# Options for HTML output
-# -----------------------
-
-# The style sheet to use for HTML and HTML Help pages. A file of that name
-# must exist either in Sphinx' static/ path, or in one of the custom paths
-# given in html_static_path.
-html_style = 'default.css'
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['.static']
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#html_use_smartypants = True
-
-# Content template for the index page.
-#html_index = ''
-
-# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-#html_use_modindex = True
-
-# If true, the reST sources are included in the HTML build as _sources/<name>.
-#html_copy_source = True
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'httplib2doc'
-
-
-# Options for LaTeX output
-# ------------------------
-
-# The paper size ('letter' or 'a4').
-#latex_paper_size = 'letter'
-
-# The font size ('10pt', '11pt' or '12pt').
-#latex_font_size = '10pt'
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, document class [howto/manual]).
-#latex_documents = []
-
-# Additional stuff for the LaTeX preamble.
-#latex_preamble = ''
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_use_modindex = True
+# -*- coding: utf-8 -*-
+#
+# httplib2 documentation build configuration file, created by
+# sphinx-quickstart on Thu Mar 27 16:07:14 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import sys
+
+# If your extensions are in another directory, add it here.
+# sys.path.append('some/directory')
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+# extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = [".templates"]
+
+# The suffix of source filenames.
+source_suffix = ".rst"
+
+# The master toctree document.
+master_doc = "index"
+
+# General substitutions.
+project = "httplib2"
+copyright = "2008, Joe Gregorio"
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+#
+# The short X.Y version.
+version = "0.4"
+# The full version, including alpha/beta/rc tags.
+release = "0.4"
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = "%B %d, %Y"
+
+# List of documents that shouldn't be included in the build.
+# unused_docs = []
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+html_style = "default.css"
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = [".static"]
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = "%b %d, %Y"
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Content template for the index page.
+# html_index = ''
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_use_modindex = True
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+# html_copy_source = True
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "httplib2doc"
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+# latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+# latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+# latex_documents = []
+
+# Additional stuff for the LaTeX preamble.
+# latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_use_modindex = True
diff --git a/index.html b/index.html
index 5a98539..8b8eeb9 100644
--- a/index.html
+++ b/index.html
@@ -112,7 +112,7 @@ must not be used when handling this request.
<h3>Requirements</h3>
-<p>Requires Python 2.3 or later. Does not require
+<p>Requires Python 2.7, 3.4, or later. Does not require
any libraries beyond what is found in the core library.</p>
<h3>Download/Installation</h3>
diff --git a/python2/httplib2/__init__.py b/python2/httplib2/__init__.py
index 18b013d..b89f2fe 100644
--- a/python2/httplib2/__init__.py
+++ b/python2/httplib2/__init__.py
@@ -1,55 +1,53 @@
-from __future__ import print_function
-"""
-httplib2
-
-A caching http interface that supports ETags and gzip
-to conserve bandwidth.
-
-Requires Python 2.3 or later
-
-Changelog:
-2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
+"""Small, fast HTTP client library for Python.
+Features persistent connections, cache, and Google App Engine Standard
+Environment support.
"""
+from __future__ import print_function
+
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
- "James Antill",
- "Xavier Verges Farrero",
- "Jonathan Feinberg",
- "Blair Zajac",
- "Sam Ruby",
- "Louis Nyffenegger",
- "Alex Yu"]
+__contributors__ = [
+ "Thomas Broyer (t.broyer@ltgt.net)",
+ "James Antill",
+ "Xavier Verges Farrero",
+ "Jonathan Feinberg",
+ "Blair Zajac",
+ "Sam Ruby",
+ "Louis Nyffenegger",
+ "Alex Yu",
+]
__license__ = "MIT"
-__version__ = '0.11.3'
+__version__ = '0.12.0'
-import re
-import sys
+import base64
+import calendar
+import copy
import email
-import email.Utils
-import email.Message
import email.FeedParser
-import StringIO
+import email.Message
+import email.Utils
+import errno
import gzip
-import zlib
import httplib
-import urlparse
-import urllib
-import base64
import os
-import copy
-import calendar
-import time
import random
-import errno
+import re
+import StringIO
+import sys
+import time
+import urllib
+import urlparse
+import zlib
+
try:
from hashlib import sha1 as _sha, md5 as _md5
except ImportError:
# prior to Python 2.5, these were separate modules
import sha
import md5
+
_sha = sha.new
_md5 = md5.new
import hmac
@@ -73,12 +71,13 @@ try:
except ImportError:
pass
if ssl is not None:
- ssl_SSLError = getattr(ssl, 'SSLError', None)
- ssl_CertificateError = getattr(ssl, 'CertificateError', None)
+ ssl_SSLError = getattr(ssl, "SSLError", None)
+ ssl_CertificateError = getattr(ssl, "CertificateError", None)
-def _ssl_wrap_socket(sock, key_file, cert_file, disable_validation,
- ca_certs, ssl_version, hostname):
+def _ssl_wrap_socket(
+ sock, key_file, cert_file, disable_validation, ca_certs, ssl_version, hostname
+):
if disable_validation:
cert_reqs = ssl.CERT_NONE
else:
@@ -86,54 +85,70 @@ def _ssl_wrap_socket(sock, key_file, cert_file, disable_validation,
if ssl_version is None:
ssl_version = ssl.PROTOCOL_SSLv23
- if hasattr(ssl, 'SSLContext'): # Python 2.7.9
+ if hasattr(ssl, "SSLContext"): # Python 2.7.9
context = ssl.SSLContext(ssl_version)
context.verify_mode = cert_reqs
- context.check_hostname = (cert_reqs != ssl.CERT_NONE)
+ context.check_hostname = cert_reqs != ssl.CERT_NONE
if cert_file:
context.load_cert_chain(cert_file, key_file)
if ca_certs:
context.load_verify_locations(ca_certs)
return context.wrap_socket(sock, server_hostname=hostname)
else:
- return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
- cert_reqs=cert_reqs, ca_certs=ca_certs,
- ssl_version=ssl_version)
-
-
-def _ssl_wrap_socket_unsupported(sock, key_file, cert_file, disable_validation,
- ca_certs, ssl_version, hostname):
+ return ssl.wrap_socket(
+ sock,
+ keyfile=key_file,
+ certfile=cert_file,
+ cert_reqs=cert_reqs,
+ ca_certs=ca_certs,
+ ssl_version=ssl_version,
+ )
+
+
+def _ssl_wrap_socket_unsupported(
+ sock, key_file, cert_file, disable_validation, ca_certs, ssl_version, hostname
+):
if not disable_validation:
raise CertificateValidationUnsupported(
- "SSL certificate validation is not supported without "
- "the ssl module installed. To avoid this error, install "
- "the ssl module, or explicity disable validation.")
+ "SSL certificate validation is not supported without "
+ "the ssl module installed. To avoid this error, install "
+ "the ssl module, or explicity disable validation."
+ )
ssl_sock = socket.ssl(sock, key_file, cert_file)
return httplib.FakeSocket(sock, ssl_sock)
+
if ssl is None:
_ssl_wrap_socket = _ssl_wrap_socket_unsupported
-
-if sys.version_info >= (2,3):
+if sys.version_info >= (2, 3):
from iri2uri import iri2uri
else:
+
def iri2uri(uri):
return uri
-def has_timeout(timeout): # python 2.6
- if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
- return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
- return (timeout is not None)
-__all__ = [
- 'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation',
- 'RedirectLimit', 'FailedToDecompressContent',
- 'UnimplementedDigestAuthOptionError',
- 'UnimplementedHmacDigestAuthOptionError',
- 'debuglevel', 'ProxiesUnavailableError']
+def has_timeout(timeout): # python 2.6
+ if hasattr(socket, "_GLOBAL_DEFAULT_TIMEOUT"):
+ return timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT
+ return timeout is not None
+__all__ = [
+ "Http",
+ "Response",
+ "ProxyInfo",
+ "HttpLib2Error",
+ "RedirectMissingLocation",
+ "RedirectLimit",
+ "FailedToDecompressContent",
+ "UnimplementedDigestAuthOptionError",
+ "UnimplementedHmacDigestAuthOptionError",
+ "debuglevel",
+ "ProxiesUnavailableError",
+]
+
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
@@ -141,7 +156,8 @@ debuglevel = 0
RETRIES = 2
# Python 2.3 support
-if sys.version_info < (2,4):
+if sys.version_info < (2, 4):
+
def sorted(seq):
seq.sort()
return seq
@@ -154,11 +170,15 @@ def HTTPResponse__getheaders(self):
raise httplib.ResponseNotReady()
return self.msg.items()
-if not hasattr(httplib.HTTPResponse, 'getheaders'):
+
+if not hasattr(httplib.HTTPResponse, "getheaders"):
httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
+
# All exceptions raised here derive from HttpLib2Error
-class HttpLib2Error(Exception): pass
+class HttpLib2Error(Exception):
+ pass
+
# Some exceptions can be caught and optionally
# be turned back into responses.
@@ -168,26 +188,65 @@ class HttpLib2ErrorWithResponse(HttpLib2Error):
self.content = content
HttpLib2Error.__init__(self, desc)
-class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
-class RedirectLimit(HttpLib2ErrorWithResponse): pass
-class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
-class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-
-class MalformedHeader(HttpLib2Error): pass
-class RelativeURIError(HttpLib2Error): pass
-class ServerNotFoundError(HttpLib2Error): pass
-class ProxiesUnavailableError(HttpLib2Error): pass
-class CertificateValidationUnsupported(HttpLib2Error): pass
-class SSLHandshakeError(HttpLib2Error): pass
-class NotSupportedOnThisPlatform(HttpLib2Error): pass
+
+class RedirectMissingLocation(HttpLib2ErrorWithResponse):
+ pass
+
+
+class RedirectLimit(HttpLib2ErrorWithResponse):
+ pass
+
+
+class FailedToDecompressContent(HttpLib2ErrorWithResponse):
+ pass
+
+
+class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse):
+ pass
+
+
+class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse):
+ pass
+
+
+class MalformedHeader(HttpLib2Error):
+ pass
+
+
+class RelativeURIError(HttpLib2Error):
+ pass
+
+
+class ServerNotFoundError(HttpLib2Error):
+ pass
+
+
+class ProxiesUnavailableError(HttpLib2Error):
+ pass
+
+
+class CertificateValidationUnsupported(HttpLib2Error):
+ pass
+
+
+class SSLHandshakeError(HttpLib2Error):
+ pass
+
+
+class NotSupportedOnThisPlatform(HttpLib2Error):
+ pass
+
+
class CertificateHostnameMismatch(SSLHandshakeError):
def __init__(self, desc, host, cert):
HttpLib2Error.__init__(self, desc)
self.host = host
self.cert = cert
-class NotRunningAppEngineEnvironment(HttpLib2Error): pass
+
+class NotRunningAppEngineEnvironment(HttpLib2Error):
+ pass
+
# Open Items:
# -----------
@@ -204,32 +263,34 @@ class NotRunningAppEngineEnvironment(HttpLib2Error): pass
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
-
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
# requesting that URI again.
DEFAULT_MAX_REDIRECTS = 5
-try:
- # Users can optionally provide a module that tells us where the CA_CERTS
- # are located.
- import ca_certs_locater
- CA_CERTS = ca_certs_locater.get()
-except ImportError:
- # Default CA certificates file bundled with httplib2.
- CA_CERTS = os.path.join(
- os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
+from httplib2 import certs
+CA_CERTS = certs.where()
# Which headers are hop-by-hop headers by default
-HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
+HOP_BY_HOP = [
+ "connection",
+ "keep-alive",
+ "proxy-authenticate",
+ "proxy-authorization",
+ "te",
+ "trailers",
+ "transfer-encoding",
+ "upgrade",
+]
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
- hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
+ hopbyhop.extend([x.strip() for x in response.get("connection", "").split(",")])
return [header for header in response.keys() if header not in hopbyhop]
+
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
@@ -259,53 +320,62 @@ def urlnorm(uri):
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
-re_url_scheme = re.compile(r'^\w+://')
-re_slash = re.compile(r'[?/:|]+')
+re_url_scheme = re.compile(r"^\w+://")
+re_unsafe = re.compile(r"[^\w\-_.()=!]+")
def safename(filename):
"""Return a filename suitable for the cache.
-
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
-
- try:
- if re_url_scheme.match(filename):
- if isinstance(filename,str):
- filename = filename.decode('utf-8')
- filename = filename.encode('idna')
- else:
- filename = filename.encode('idna')
- except UnicodeError:
- pass
- if isinstance(filename,unicode):
- filename=filename.encode('utf-8')
- filemd5 = _md5(filename).hexdigest()
+ if isinstance(filename, str):
+ filename_bytes = filename
+ filename = filename.decode("utf-8")
+ else:
+ filename_bytes = filename.encode("utf-8")
+ filemd5 = _md5(filename_bytes).hexdigest()
filename = re_url_scheme.sub("", filename)
- filename = re_slash.sub(",", filename)
+ filename = re_unsafe.sub("", filename)
+
+ # limit length of filename (vital for Windows)
+ # https://github.com/httplib2/httplib2/pull/74
+ # C:\Users\ <username> \AppData\Local\Temp\ <safe_filename> , <md5>
+ # 9 chars + max 104 chars + 20 chars + x + 1 + 32 = max 259 chars
+ # Thus max safe filename x = 93 chars. Let it be 90 to make a round sum:
+ filename = filename[:90]
- # limit length of filename
- if len(filename)>200:
- filename=filename[:200]
return ",".join((filename, filemd5))
-NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
+
+NORMALIZE_SPACE = re.compile(r"(?:\r\n)?[ \t]+")
def _normalize_headers(headers):
- return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
+ return dict(
+ [
+ (key.lower(), NORMALIZE_SPACE.sub(value, " ").strip())
+ for (key, value) in headers.iteritems()
+ ]
+ )
def _parse_cache_control(headers):
retval = {}
- if 'cache-control' in headers:
- parts = headers['cache-control'].split(',')
- parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
- parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
+ if "cache-control" in headers:
+ parts = headers["cache-control"].split(",")
+ parts_with_args = [
+ tuple([x.strip().lower() for x in part.split("=", 1)])
+ for part in parts
+ if -1 != part.find("=")
+ ]
+ parts_wo_args = [
+ (name.strip().lower(), 1) for name in parts if -1 == name.find("=")
+ ]
retval = dict(parts_with_args + parts_wo_args)
return retval
+
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
@@ -317,10 +387,16 @@ USE_WWW_AUTH_STRICT_PARSING = 0
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
-WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
-WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
-UNQUOTE_PAIRS = re.compile(r'\\(.)')
-def _parse_www_authenticate(headers, headername='www-authenticate'):
+WWW_AUTH_STRICT = re.compile(
+ r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$"
+)
+WWW_AUTH_RELAXED = re.compile(
+ r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$"
+)
+UNQUOTE_PAIRS = re.compile(r"\\(.)")
+
+
+def _parse_www_authenticate(headers, headername="www-authenticate"):
"""Returns a dictionary of dictionaries, one dict
per auth_scheme."""
retval = {}
@@ -328,11 +404,13 @@ def _parse_www_authenticate(headers, headername='www-authenticate'):
try:
authenticate = headers[headername].strip()
- www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
+ www_auth = (
+ USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
+ )
while authenticate:
# Break off the scheme at the beginning of the line
- if headername == 'authentication-info':
- (auth_scheme, the_rest) = ('digest', authenticate)
+ if headername == "authentication-info":
+ (auth_scheme, the_rest) = ("digest", authenticate)
else:
(auth_scheme, the_rest) = authenticate.split(" ", 1)
# Now loop over all the key value pairs that come after the scheme,
@@ -342,7 +420,9 @@ def _parse_www_authenticate(headers, headername='www-authenticate'):
while match:
if match and len(match.groups()) == 3:
(key, value, the_rest) = match.groups()
- auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
+ auth_params[key.lower()] = UNQUOTE_PAIRS.sub(
+ r"\1", value
+ ) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
match = www_auth.search(the_rest)
retval[auth_scheme.lower()] = auth_params
authenticate = the_rest.strip()
@@ -385,41 +465,44 @@ def _entry_disposition(response_headers, request_headers):
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
- if 'pragma' in request_headers and request_headers['pragma'].lower().find('no-cache') != -1:
+ if (
+ "pragma" in request_headers
+ and request_headers["pragma"].lower().find("no-cache") != -1
+ ):
retval = "TRANSPARENT"
- if 'cache-control' not in request_headers:
- request_headers['cache-control'] = 'no-cache'
- elif 'no-cache' in cc:
+ if "cache-control" not in request_headers:
+ request_headers["cache-control"] = "no-cache"
+ elif "no-cache" in cc:
retval = "TRANSPARENT"
- elif 'no-cache' in cc_response:
+ elif "no-cache" in cc_response:
retval = "STALE"
- elif 'only-if-cached' in cc:
+ elif "only-if-cached" in cc:
retval = "FRESH"
- elif 'date' in response_headers:
- date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
+ elif "date" in response_headers:
+ date = calendar.timegm(email.Utils.parsedate_tz(response_headers["date"]))
now = time.time()
current_age = max(0, now - date)
- if 'max-age' in cc_response:
+ if "max-age" in cc_response:
try:
- freshness_lifetime = int(cc_response['max-age'])
+ freshness_lifetime = int(cc_response["max-age"])
except ValueError:
freshness_lifetime = 0
- elif 'expires' in response_headers:
- expires = email.Utils.parsedate_tz(response_headers['expires'])
+ elif "expires" in response_headers:
+ expires = email.Utils.parsedate_tz(response_headers["expires"])
if None == expires:
freshness_lifetime = 0
else:
freshness_lifetime = max(0, calendar.timegm(expires) - date)
else:
freshness_lifetime = 0
- if 'max-age' in cc:
+ if "max-age" in cc:
try:
- freshness_lifetime = int(cc['max-age'])
+ freshness_lifetime = int(cc["max-age"])
except ValueError:
freshness_lifetime = 0
- if 'min-fresh' in cc:
+ if "min-fresh" in cc:
try:
- min_fresh = int(cc['min-fresh'])
+ min_fresh = int(cc["min-fresh"])
except ValueError:
min_fresh = 0
current_age += min_fresh
@@ -431,19 +514,24 @@ def _entry_disposition(response_headers, request_headers):
def _decompressContent(response, new_content):
content = new_content
try:
- encoding = response.get('content-encoding', None)
- if encoding in ['gzip', 'deflate']:
- if encoding == 'gzip':
+ encoding = response.get("content-encoding", None)
+ if encoding in ["gzip", "deflate"]:
+ if encoding == "gzip":
content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
- if encoding == 'deflate':
+ if encoding == "deflate":
content = zlib.decompress(content, -zlib.MAX_WBITS)
- response['content-length'] = str(len(content))
+ response["content-length"] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere.
- response['-content-encoding'] = response['content-encoding']
- del response['content-encoding']
+ response["-content-encoding"] = response["content-encoding"]
+ del response["content-encoding"]
except (IOError, zlib.error):
content = ""
- raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
+ raise FailedToDecompressContent(
+ _("Content purported to be compressed with %s but failed to decompress.")
+ % response.get("content-encoding"),
+ response,
+ content,
+ )
return content
@@ -451,21 +539,21 @@ def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey:
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
- if 'no-store' in cc or 'no-store' in cc_response:
+ if "no-store" in cc or "no-store" in cc_response:
cache.delete(cachekey)
else:
info = email.Message.Message()
for key, value in response_headers.iteritems():
- if key not in ['status','content-encoding','transfer-encoding']:
+ if key not in ["status", "content-encoding", "transfer-encoding"]:
info[key] = value
# Add annotations to the cache to indicate what headers
# are variant for this request.
- vary = response_headers.get('vary', None)
+ vary = response_headers.get("vary", None)
if vary:
- vary_headers = vary.lower().replace(' ', '').split(',')
+ vary_headers = vary.lower().replace(" ", "").split(",")
for header in vary_headers:
- key = '-varied-%s' % header
+ key = "-varied-%s" % header
try:
info[key] = request_headers[header]
except KeyError:
@@ -475,7 +563,7 @@ def _updateCache(request_headers, response_headers, content, cache, cachekey):
if status == 304:
status = 200
- status_header = 'status: %d\r\n' % status
+ status_header = "status: %d\r\n" % status
header_str = info.as_string()
@@ -484,12 +572,19 @@ def _updateCache(request_headers, response_headers, content, cache, cachekey):
cache.set(cachekey, text)
+
def _cnonce():
- dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
+ dig = _md5(
+ "%s:%s"
+ % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])
+ ).hexdigest()
return dig[:16]
+
def _wsse_username_token(cnonce, iso_now, password):
- return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
+ return base64.b64encode(
+ _sha("%s%s%s" % (cnonce, iso_now, password)).digest()
+ ).strip()
# For credentials we need two things, first
@@ -500,8 +595,11 @@ def _wsse_username_token(cnonce, iso_now, password):
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
+
class Authentication(object):
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
self.path = path
self.host = host
@@ -510,7 +608,7 @@ class Authentication(object):
def depth(self, request_uri):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
- return request_uri[len(self.path):].count("/")
+ return request_uri[len(self.path) :].count("/")
def inscope(self, host, request_uri):
# XXX Should we normalize the request_uri?
@@ -534,105 +632,169 @@ class Authentication(object):
class BasicAuthentication(Authentication):
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
- headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
+ headers["authorization"] = (
+ "Basic " + base64.b64encode("%s:%s" % self.credentials).strip()
+ )
class DigestAuthentication(Authentication):
"""Only do qop='auth' and MD5, since that
is all Apache currently implements"""
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
- challenge = _parse_www_authenticate(response, 'www-authenticate')
- self.challenge = challenge['digest']
- qop = self.challenge.get('qop', 'auth')
- self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
- if self.challenge['qop'] is None:
- raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
- self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
- if self.challenge['algorithm'] != 'MD5':
- raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
- self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
- self.challenge['nc'] = 1
-
- def request(self, method, request_uri, headers, content, cnonce = None):
+
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
+ challenge = _parse_www_authenticate(response, "www-authenticate")
+ self.challenge = challenge["digest"]
+ qop = self.challenge.get("qop", "auth")
+ self.challenge["qop"] = (
+ ("auth" in [x.strip() for x in qop.split()]) and "auth" or None
+ )
+ if self.challenge["qop"] is None:
+ raise UnimplementedDigestAuthOptionError(
+ _("Unsupported value for qop: %s." % qop)
+ )
+ self.challenge["algorithm"] = self.challenge.get("algorithm", "MD5").upper()
+ if self.challenge["algorithm"] != "MD5":
+ raise UnimplementedDigestAuthOptionError(
+ _("Unsupported value for algorithm: %s." % self.challenge["algorithm"])
+ )
+ self.A1 = "".join(
+ [
+ self.credentials[0],
+ ":",
+ self.challenge["realm"],
+ ":",
+ self.credentials[1],
+ ]
+ )
+ self.challenge["nc"] = 1
+
+ def request(self, method, request_uri, headers, content, cnonce=None):
"""Modify the request headers"""
H = lambda x: _md5(x).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
- self.challenge['cnonce'] = cnonce or _cnonce()
- request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
- self.challenge['nonce'],
- '%08x' % self.challenge['nc'],
- self.challenge['cnonce'],
- self.challenge['qop'], H(A2)))
- headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
- self.credentials[0],
- self.challenge['realm'],
- self.challenge['nonce'],
- request_uri,
- self.challenge['algorithm'],
- request_digest,
- self.challenge['qop'],
- self.challenge['nc'],
- self.challenge['cnonce'])
- if self.challenge.get('opaque'):
- headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
- self.challenge['nc'] += 1
+ self.challenge["cnonce"] = cnonce or _cnonce()
+ request_digest = '"%s"' % KD(
+ H(self.A1),
+ "%s:%s:%s:%s:%s"
+ % (
+ self.challenge["nonce"],
+ "%08x" % self.challenge["nc"],
+ self.challenge["cnonce"],
+ self.challenge["qop"],
+ H(A2),
+ ),
+ )
+ headers["authorization"] = (
+ 'Digest username="%s", realm="%s", nonce="%s", '
+ 'uri="%s", algorithm=%s, response=%s, qop=%s, '
+ 'nc=%08x, cnonce="%s"'
+ ) % (
+ self.credentials[0],
+ self.challenge["realm"],
+ self.challenge["nonce"],
+ request_uri,
+ self.challenge["algorithm"],
+ request_digest,
+ self.challenge["qop"],
+ self.challenge["nc"],
+ self.challenge["cnonce"],
+ )
+ if self.challenge.get("opaque"):
+ headers["authorization"] += ', opaque="%s"' % self.challenge["opaque"]
+ self.challenge["nc"] += 1
def response(self, response, content):
- if 'authentication-info' not in response:
- challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
- if 'true' == challenge.get('stale'):
- self.challenge['nonce'] = challenge['nonce']
- self.challenge['nc'] = 1
+ if "authentication-info" not in response:
+ challenge = _parse_www_authenticate(response, "www-authenticate").get(
+ "digest", {}
+ )
+ if "true" == challenge.get("stale"):
+ self.challenge["nonce"] = challenge["nonce"]
+ self.challenge["nc"] = 1
return True
else:
- updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
+ updated_challenge = _parse_www_authenticate(
+ response, "authentication-info"
+ ).get("digest", {})
- if 'nextnonce' in updated_challenge:
- self.challenge['nonce'] = updated_challenge['nextnonce']
- self.challenge['nc'] = 1
+ if "nextnonce" in updated_challenge:
+ self.challenge["nonce"] = updated_challenge["nextnonce"]
+ self.challenge["nc"] = 1
return False
class HmacDigestAuthentication(Authentication):
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
+
__author__ = "Thomas Broyer (t.broyer@ltgt.net)"
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
- challenge = _parse_www_authenticate(response, 'www-authenticate')
- self.challenge = challenge['hmacdigest']
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
+ challenge = _parse_www_authenticate(response, "www-authenticate")
+ self.challenge = challenge["hmacdigest"]
# TODO: self.challenge['domain']
- self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
- if self.challenge['reason'] not in ['unauthorized', 'integrity']:
- self.challenge['reason'] = 'unauthorized'
- self.challenge['salt'] = self.challenge.get('salt', '')
- if not self.challenge.get('snonce'):
- raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
- self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
- if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
- raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
- self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
- if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
- raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
- if self.challenge['algorithm'] == 'HMAC-MD5':
+ self.challenge["reason"] = self.challenge.get("reason", "unauthorized")
+ if self.challenge["reason"] not in ["unauthorized", "integrity"]:
+ self.challenge["reason"] = "unauthorized"
+ self.challenge["salt"] = self.challenge.get("salt", "")
+ if not self.challenge.get("snonce"):
+ raise UnimplementedHmacDigestAuthOptionError(
+ _("The challenge doesn't contain a server nonce, or this one is empty.")
+ )
+ self.challenge["algorithm"] = self.challenge.get("algorithm", "HMAC-SHA-1")
+ if self.challenge["algorithm"] not in ["HMAC-SHA-1", "HMAC-MD5"]:
+ raise UnimplementedHmacDigestAuthOptionError(
+ _("Unsupported value for algorithm: %s." % self.challenge["algorithm"])
+ )
+ self.challenge["pw-algorithm"] = self.challenge.get("pw-algorithm", "SHA-1")
+ if self.challenge["pw-algorithm"] not in ["SHA-1", "MD5"]:
+ raise UnimplementedHmacDigestAuthOptionError(
+ _(
+ "Unsupported value for pw-algorithm: %s."
+ % self.challenge["pw-algorithm"]
+ )
+ )
+ if self.challenge["algorithm"] == "HMAC-MD5":
self.hashmod = _md5
else:
self.hashmod = _sha
- if self.challenge['pw-algorithm'] == 'MD5':
+ if self.challenge["pw-algorithm"] == "MD5":
self.pwhashmod = _md5
else:
self.pwhashmod = _sha
- self.key = "".join([self.credentials[0], ":",
- self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
- ":", self.challenge['realm']])
+ self.key = "".join(
+ [
+ self.credentials[0],
+ ":",
+ self.pwhashmod.new(
+ "".join([self.credentials[1], self.challenge["salt"]])
+ )
+ .hexdigest()
+ .lower(),
+ ":",
+ self.challenge["realm"],
+ ]
+ )
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
@@ -640,23 +802,38 @@ class HmacDigestAuthentication(Authentication):
keys = _get_end2end_headers(headers)
keylist = "".join(["%s " % k for k in keys])
headers_val = "".join([headers[k] for k in keys])
- created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
+ created = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
- request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
- request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
- headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
- self.credentials[0],
- self.challenge['realm'],
- self.challenge['snonce'],
- cnonce,
- request_uri,
- created,
- request_digest,
- keylist)
+ request_digest = "%s:%s:%s:%s:%s" % (
+ method,
+ request_uri,
+ cnonce,
+ self.challenge["snonce"],
+ headers_val,
+ )
+ request_digest = (
+ hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
+ )
+ headers["authorization"] = (
+ 'HMACDigest username="%s", realm="%s", snonce="%s",'
+ ' cnonce="%s", uri="%s", created="%s", '
+ 'response="%s", headers="%s"'
+ ) % (
+ self.credentials[0],
+ self.challenge["realm"],
+ self.challenge["snonce"],
+ cnonce,
+ request_uri,
+ created,
+ request_digest,
+ keylist,
+ )
def response(self, response, content):
- challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
- if challenge.get('reason') in ['integrity', 'stale']:
+ challenge = _parse_www_authenticate(response, "www-authenticate").get(
+ "hmacdigest", {}
+ )
+ if challenge.get("reason") in ["integrity", "stale"]:
return True
return False
@@ -669,50 +846,69 @@ class WsseAuthentication(Authentication):
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
+
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
- headers['authorization'] = 'WSSE profile="UsernameToken"'
+ headers["authorization"] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
- headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
- self.credentials[0],
- password_digest,
- cnonce,
- iso_now)
+ headers["X-WSSE"] = (
+ 'UsernameToken Username="%s", PasswordDigest="%s", '
+ 'Nonce="%s", Created="%s"'
+ ) % (self.credentials[0], password_digest, cnonce, iso_now)
class GoogleLoginAuthentication(Authentication):
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
from urllib import urlencode
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
- challenge = _parse_www_authenticate(response, 'www-authenticate')
- service = challenge['googlelogin'].get('service', 'xapi')
+
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
+ challenge = _parse_www_authenticate(response, "www-authenticate")
+ service = challenge["googlelogin"].get("service", "xapi")
# Bloggger actually returns the service in the challenge
# For the rest we guess based on the URI
- if service == 'xapi' and request_uri.find("calendar") > 0:
+ if service == "xapi" and request_uri.find("calendar") > 0:
service = "cl"
# No point in guessing Base or Spreadsheet
- #elif request_uri.find("spreadsheets") > 0:
+ # elif request_uri.find("spreadsheets") > 0:
# service = "wise"
- auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
- resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
- lines = content.split('\n')
+ auth = dict(
+ Email=credentials[0],
+ Passwd=credentials[1],
+ service=service,
+ source=headers["user-agent"],
+ )
+ resp, content = self.http.request(
+ "https://www.google.com/accounts/ClientLogin",
+ method="POST",
+ body=urlencode(auth),
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ )
+ lines = content.split("\n")
d = dict([tuple(line.split("=", 1)) for line in lines if line])
if resp.status == 403:
self.Auth = ""
else:
- self.Auth = d['Auth']
+ self.Auth = d["Auth"]
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
- headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
+ headers["authorization"] = "GoogleLogin Auth=" + self.Auth
AUTH_SCHEME_CLASSES = {
@@ -720,7 +916,7 @@ AUTH_SCHEME_CLASSES = {
"wsse": WsseAuthentication,
"digest": DigestAuthentication,
"hmacdigest": HmacDigestAuthentication,
- "googlelogin": GoogleLoginAuthentication
+ "googlelogin": GoogleLoginAuthentication,
}
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
@@ -731,7 +927,10 @@ class FileCache(object):
Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
- def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
+
+ def __init__(
+ self, cache, safe=safename
+ ): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
if not os.path.exists(cache):
@@ -779,6 +978,7 @@ class Credentials(object):
class KeyCerts(Credentials):
"""Identical to Credentials except that
name/password are mapped to key/cert."""
+
pass
@@ -788,32 +988,35 @@ class AllHosts(object):
class ProxyInfo(object):
"""Collect information required to use a proxy."""
- bypass_hosts = ()
- def __init__(self, proxy_type, proxy_host, proxy_port,
- proxy_rdns=True, proxy_user=None, proxy_pass=None, proxy_headers=None):
- """
- Args:
- proxy_type: The type of proxy server. This must be set to one of
- socks.PROXY_TYPE_XXX constants. For example:
+ bypass_hosts = ()
- p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
- proxy_host='localhost', proxy_port=8000)
+ def __init__(
+ self,
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns=True,
+ proxy_user=None,
+ proxy_pass=None,
+ proxy_headers=None,
+ ):
+ """Args:
+ proxy_type: The type of proxy server. This must be set to one of
+ socks.PROXY_TYPE_XXX constants. For example: p =
+ ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost',
+ proxy_port=8000)
proxy_host: The hostname or IP address of the proxy server.
-
proxy_port: The port that the proxy server is running on.
-
proxy_rdns: If True (default), DNS queries will not be performed
locally, and instead, handed to the proxy to resolve. This is useful
- if the network does not allow resolution of non-local names. In
+ if the network does not allow resolution of non-local names. In
httplib2 0.9 and earlier, this defaulted to False.
-
proxy_user: The username used to authenticate with the proxy server.
-
proxy_pass: The password used to authenticate with the proxy server.
-
- proxy_headers: Additional or modified headers for the proxy connect request.
+ proxy_headers: Additional or modified headers for the proxy connect
+ request.
"""
self.proxy_type = proxy_type
self.proxy_host = proxy_host
@@ -824,8 +1027,15 @@ class ProxyInfo(object):
self.proxy_headers = proxy_headers
def astuple(self):
- return (self.proxy_type, self.proxy_host, self.proxy_port,
- self.proxy_rdns, self.proxy_user, self.proxy_pass, self.proxy_headers)
+ return (
+ self.proxy_type,
+ self.proxy_host,
+ self.proxy_port,
+ self.proxy_rdns,
+ self.proxy_user,
+ self.proxy_pass,
+ self.proxy_headers,
+ )
def isgood(self):
return (self.proxy_host != None) and (self.proxy_port != None)
@@ -838,54 +1048,54 @@ class ProxyInfo(object):
if self.bypass_hosts is AllHosts:
return True
- hostname = '.' + hostname.lstrip('.')
+ hostname = "." + hostname.lstrip(".")
for skip_name in self.bypass_hosts:
# *.suffix
- if skip_name.startswith('.') and hostname.endswith(skip_name):
+ if skip_name.startswith(".") and hostname.endswith(skip_name):
return True
# exact match
- if hostname == '.' + skip_name:
+ if hostname == "." + skip_name:
return True
return False
def __repr__(self):
return (
- '<ProxyInfo type={p.proxy_type} host:port={p.proxy_host}:{p.proxy_port} rdns={p.proxy_rdns}' +
- ' user={p.proxy_user} headers={p.proxy_headers}>').format(p=self)
+ "<ProxyInfo type={p.proxy_type} "
+ "host:port={p.proxy_host}:{p.proxy_port} rdns={p.proxy_rdns}"
+ + " user={p.proxy_user} headers={p.proxy_headers}>"
+ ).format(p=self)
-def proxy_info_from_environment(method='http'):
- """
- Read proxy info from the environment variables.
+def proxy_info_from_environment(method="http"):
+ """Read proxy info from the environment variables.
"""
- if method not in ['http', 'https']:
+ if method not in ["http", "https"]:
return
- env_var = method + '_proxy'
+ env_var = method + "_proxy"
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url:
return
return proxy_info_from_url(url, method, None)
-def proxy_info_from_url(url, method='http', noproxy=None):
- """
- Construct a ProxyInfo from a URL (such as http_proxy env var)
+def proxy_info_from_url(url, method="http", noproxy=None):
+ """Construct a ProxyInfo from a URL (such as http_proxy env var)
"""
url = urlparse.urlparse(url)
username = None
password = None
port = None
- if '@' in url[1]:
- ident, host_port = url[1].split('@', 1)
- if ':' in ident:
- username, password = ident.split(':', 1)
+ if "@" in url[1]:
+ ident, host_port = url[1].split("@", 1)
+ if ":" in ident:
+ username, password = ident.split(":", 1)
else:
password = ident
else:
host_port = url[1]
- if ':' in host_port:
- host, port = host_port.split(':', 1)
+ if ":" in host_port:
+ host, port = host_port.split(":", 1)
else:
host = host_port
@@ -896,23 +1106,23 @@ def proxy_info_from_url(url, method='http', noproxy=None):
proxy_type = 3 # socks.PROXY_TYPE_HTTP
pi = ProxyInfo(
- proxy_type = proxy_type,
- proxy_host = host,
- proxy_port = port,
- proxy_user = username or None,
- proxy_pass = password or None,
- proxy_headers = None,
+ proxy_type=proxy_type,
+ proxy_host=host,
+ proxy_port=port,
+ proxy_user=username or None,
+ proxy_pass=password or None,
+ proxy_headers=None,
)
bypass_hosts = []
# If not given an explicit noproxy value, respect values in env vars.
if noproxy is None:
- noproxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
+ noproxy = os.environ.get("no_proxy", os.environ.get("NO_PROXY", ""))
# Special case: A single '*' character means all hosts should be bypassed.
- if noproxy == '*':
+ if noproxy == "*":
bypass_hosts = AllHosts
elif noproxy.strip():
- bypass_hosts = noproxy.split(',')
+ bypass_hosts = noproxy.split(",")
bypass_hosts = filter(bool, bypass_hosts) # To exclude empty string.
pi.bypass_hosts = bypass_hosts
@@ -920,8 +1130,7 @@ def proxy_info_from_url(url, method='http', noproxy=None):
class HTTPConnectionWithTimeout(httplib.HTTPConnection):
- """
- HTTPConnection subclass that supports timeouts
+ """HTTPConnection subclass that supports timeouts
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
@@ -939,11 +1148,14 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
# Mostly verbatim from httplib.py.
if self.proxy_info and socks is None:
raise ProxiesUnavailableError(
- 'Proxy support missing but proxy use was requested!')
+ "Proxy support missing but proxy use was requested!"
+ )
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
- proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = self.proxy_info.astuple()
+ proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = (
+ self.proxy_info.astuple()
+ )
host = proxy_host
port = proxy_port
@@ -958,7 +1170,15 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
try:
if use_proxy:
self.sock = socks.socksocket(af, socktype, proto)
- self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)
+ self.sock.setproxy(
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
else:
self.sock = socket.socket(af, socktype, proto)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
@@ -969,7 +1189,19 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
if self.debuglevel > 0:
print("connect: (%s, %s) ************" % (self.host, self.port))
if use_proxy:
- print("proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
+ print(
+ "proxy: %s ************"
+ % str(
+ (
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+ )
+ )
if use_proxy:
self.sock.connect((self.host, self.port) + sa[2:])
else:
@@ -978,39 +1210,59 @@ class HTTPConnectionWithTimeout(httplib.HTTPConnection):
if self.debuglevel > 0:
print("connect fail: (%s, %s)" % (self.host, self.port))
if use_proxy:
- print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
+ print(
+ "proxy: %s"
+ % str(
+ (
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+ )
+ )
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
- raise socket.error, msg
+ raise socket.error(msg)
class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
- """
- This class allows communication via SSL.
+ """This class allows communication via SSL.
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=None, proxy_info=None,
- ca_certs=None, disable_ssl_certificate_validation=False,
- ssl_version=None):
- httplib.HTTPSConnection.__init__(self, host, port=port,
- key_file=key_file,
- cert_file=cert_file, strict=strict)
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ strict=None,
+ timeout=None,
+ proxy_info=None,
+ ca_certs=None,
+ disable_ssl_certificate_validation=False,
+ ssl_version=None,
+ ):
+ httplib.HTTPSConnection.__init__(
+ self, host, port=port, key_file=key_file, cert_file=cert_file, strict=strict
+ )
self.timeout = timeout
self.proxy_info = proxy_info
if ca_certs is None:
ca_certs = CA_CERTS
self.ca_certs = ca_certs
- self.disable_ssl_certificate_validation = \
- disable_ssl_certificate_validation
+ self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
self.ssl_version = ssl_version
# The following two methods were adapted from https_wrapper.py, released
@@ -1041,12 +1293,10 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
Returns:
list: A list of valid host globs.
"""
- if 'subjectAltName' in cert:
- return [x[1] for x in cert['subjectAltName']
- if x[0].lower() == 'dns']
+ if "subjectAltName" in cert:
+ return [x[1] for x in cert["subjectAltName"] if x[0].lower() == "dns"]
else:
- return [x[0][1] for x in cert['subject']
- if x[0][0].lower() == 'commonname']
+ return [x[0][1] for x in cert["subject"] if x[0][0].lower() == "commonname"]
def _ValidateCertificateHostname(self, cert, hostname):
"""Validates that a given hostname is valid for an SSL certificate.
@@ -1059,8 +1309,8 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
"""
hosts = self._GetValidHostsForCert(cert)
for host in hosts:
- host_re = host.replace('.', '\.').replace('*', '[^.]*')
- if re.search('^%s$' % (host_re,), hostname, re.I):
+ host_re = host.replace(".", "\.").replace("*", "[^.]*")
+ if re.search("^%s$" % (host_re,), hostname, re.I):
return True
return False
@@ -1070,7 +1320,9 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
- proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = self.proxy_info.astuple()
+ proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = (
+ self.proxy_info.astuple()
+ )
host = proxy_host
port = proxy_port
@@ -1086,7 +1338,15 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
if use_proxy:
sock = socks.socksocket(family, socktype, proto)
- sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)
+ sock.setproxy(
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
else:
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
@@ -1098,22 +1358,46 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
sock.connect((self.host, self.port) + sockaddr[:2])
else:
sock.connect(sockaddr)
- self.sock =_ssl_wrap_socket(
- sock, self.key_file, self.cert_file,
- self.disable_ssl_certificate_validation, self.ca_certs,
- self.ssl_version, self.host)
+ self.sock = _ssl_wrap_socket(
+ sock,
+ self.key_file,
+ self.cert_file,
+ self.disable_ssl_certificate_validation,
+ self.ca_certs,
+ self.ssl_version,
+ self.host,
+ )
if self.debuglevel > 0:
print("connect: (%s, %s)" % (self.host, self.port))
if use_proxy:
- print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
+ print(
+ "proxy: %s"
+ % str(
+ (
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+ )
+ )
if not self.disable_ssl_certificate_validation:
cert = self.sock.getpeercert()
- hostname = self.host.split(':', 0)[0]
+ hostname = self.host.split(":", 0)[0]
if not self._ValidateCertificateHostname(cert, hostname):
raise CertificateHostnameMismatch(
- 'Server presented certificate that does not match '
- 'host %s: %s' % (hostname, cert), hostname, cert)
- except (ssl_SSLError, ssl_CertificateError, CertificateHostnameMismatch) as e:
+ "Server presented certificate that does not match "
+ "host %s: %s" % (hostname, cert),
+ hostname,
+ cert,
+ )
+ except (
+ ssl_SSLError,
+ ssl_CertificateError,
+ CertificateHostnameMismatch,
+ ) as e:
if sock:
sock.close()
if self.sock:
@@ -1123,7 +1407,7 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
# to get at more detailed error information, in particular
# whether the error is due to certificate validation or
# something else (such as SSL protocol mismatch).
- if getattr(e, 'errno', None) == ssl.SSL_ERROR_SSL:
+ if getattr(e, "errno", None) == ssl.SSL_ERROR_SSL:
raise SSLHandshakeError(e)
else:
raise
@@ -1133,31 +1417,56 @@ class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
if self.debuglevel > 0:
print("connect fail: (%s, %s)" % (self.host, self.port))
if use_proxy:
- print("proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers)))
+ print(
+ "proxy: %s"
+ % str(
+ (
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+ )
+ )
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
- raise socket.error, msg
+ raise socket.error(msg)
+
SCHEME_TO_CONNECTION = {
- 'http': HTTPConnectionWithTimeout,
- 'https': HTTPSConnectionWithTimeout
+ "http": HTTPConnectionWithTimeout,
+ "https": HTTPSConnectionWithTimeout,
}
def _new_fixed_fetch(validate_certificate):
- def fixed_fetch(url, payload=None, method="GET", headers={},
- allow_truncated=False, follow_redirects=True,
- deadline=None):
- if deadline is None:
- deadline = socket.getdefaulttimeout()
- return fetch(url, payload=payload, method=method, headers=headers,
- allow_truncated=allow_truncated,
- follow_redirects=follow_redirects, deadline=deadline,
- validate_certificate=validate_certificate)
+
+ def fixed_fetch(
+ url,
+ payload=None,
+ method="GET",
+ headers={},
+ allow_truncated=False,
+ follow_redirects=True,
+ deadline=None,
+ ):
+ return fetch(
+ url,
+ payload=payload,
+ method=method,
+ headers=headers,
+ allow_truncated=allow_truncated,
+ follow_redirects=follow_redirects,
+ deadline=deadline,
+ validate_certificate=validate_certificate,
+ )
+
return fixed_fetch
@@ -1168,12 +1477,23 @@ class AppEngineHttpConnection(httplib.HTTPConnection):
disable_ssl_certificate_validation, and ssl_version are all dropped on
the ground.
"""
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=None, proxy_info=None, ca_certs=None,
- disable_ssl_certificate_validation=False,
- ssl_version=None):
- httplib.HTTPConnection.__init__(self, host, port=port,
- strict=strict, timeout=timeout)
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ strict=None,
+ timeout=None,
+ proxy_info=None,
+ ca_certs=None,
+ disable_ssl_certificate_validation=False,
+ ssl_version=None,
+ ):
+ httplib.HTTPConnection.__init__(
+ self, host, port=port, strict=strict, timeout=timeout
+ )
class AppEngineHttpsConnection(httplib.HTTPSConnection):
@@ -1182,38 +1502,58 @@ class AppEngineHttpsConnection(httplib.HTTPSConnection):
The parameters proxy_info, ca_certs, disable_ssl_certificate_validation,
and ssl_version are all dropped on the ground.
"""
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=None, proxy_info=None, ca_certs=None,
- disable_ssl_certificate_validation=False,
- ssl_version=None):
- httplib.HTTPSConnection.__init__(self, host, port=port,
- key_file=key_file,
- cert_file=cert_file, strict=strict,
- timeout=timeout)
- self._fetch = _new_fixed_fetch(
- not disable_ssl_certificate_validation)
-
-# Use a different connection object for Google App Engine
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ strict=None,
+ timeout=None,
+ proxy_info=None,
+ ca_certs=None,
+ disable_ssl_certificate_validation=False,
+ ssl_version=None,
+ ):
+ httplib.HTTPSConnection.__init__(
+ self,
+ host,
+ port=port,
+ key_file=key_file,
+ cert_file=cert_file,
+ strict=strict,
+ timeout=timeout,
+ )
+ self._fetch = _new_fixed_fetch(not disable_ssl_certificate_validation)
+
+
+# Use a different connection object for Google App Engine Standard Environment.
+def is_gae_instance():
+ server_software = os.environ.get('SERVER_SOFTWARE', '')
+ if (server_software.startswith('Google App Engine/') or
+ server_software.startswith('Development/') or
+ server_software.startswith('testutil/')):
+ return True
+ return False
+
+
try:
- server_software = os.environ.get('SERVER_SOFTWARE')
- if not server_software:
- raise NotRunningAppEngineEnvironment()
- elif not (server_software.startswith('Google App Engine/') or
- server_software.startswith('Development/')):
+ if not is_gae_instance():
raise NotRunningAppEngineEnvironment()
from google.appengine.api import apiproxy_stub_map
- if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
- raise ImportError # Bail out; we're not actually running on App Engine.
+ if apiproxy_stub_map.apiproxy.GetStub("urlfetch") is None:
+ raise ImportError
+
from google.appengine.api.urlfetch import fetch
- from google.appengine.api.urlfetch import InvalidURLError
# Update the connection classes to use the Googel App Engine specific ones.
SCHEME_TO_CONNECTION = {
- 'http': AppEngineHttpConnection,
- 'https': AppEngineHttpsConnection
+ "http": AppEngineHttpConnection,
+ "https": AppEngineHttpsConnection,
}
-except (ImportError, AttributeError, NotRunningAppEngineEnvironment):
+except (ImportError, NotRunningAppEngineEnvironment):
pass
@@ -1231,10 +1571,16 @@ class Http(object):
and more.
"""
- def __init__(self, cache=None, timeout=None,
- proxy_info=proxy_info_from_environment,
- ca_certs=None, disable_ssl_certificate_validation=False,
- ssl_version=None):
+
+ def __init__(
+ self,
+ cache=None,
+ timeout=None,
+ proxy_info=proxy_info_from_environment,
+ ca_certs=None,
+ disable_ssl_certificate_validation=False,
+ ssl_version=None,
+ ):
"""If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the
same interface as FileCache.
@@ -1262,8 +1608,7 @@ class Http(object):
"""
self.proxy_info = proxy_info
self.ca_certs = ca_certs
- self.disable_ssl_certificate_validation = \
- disable_ssl_certificate_validation
+ self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
self.ssl_version = ssl_version
# Map domain name to an httplib connection
@@ -1308,10 +1653,10 @@ class Http(object):
state_dict = copy.copy(self.__dict__)
# In case request is augmented by some foreign object such as
# credentials which handle auth
- if 'request' in state_dict:
- del state_dict['request']
- if 'connections' in state_dict:
- del state_dict['connections']
+ if "request" in state_dict:
+ del state_dict["request"]
+ if "connections" in state_dict:
+ del state_dict["connections"]
return state_dict
def __setstate__(self, state):
@@ -1322,11 +1667,13 @@ class Http(object):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
- challenges = _parse_www_authenticate(response, 'www-authenticate')
+ challenges = _parse_www_authenticate(response, "www-authenticate")
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if scheme in challenges:
- yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
+ yield AUTH_SCHEME_CLASSES[scheme](
+ cred, host, request_uri, headers, response, content, self
+ )
def add_credentials(self, name, password, domain=""):
"""Add a name and password that will be used
@@ -1350,7 +1697,7 @@ class Http(object):
while i < RETRIES:
i += 1
try:
- if hasattr(conn, 'sock') and conn.sock is None:
+ if hasattr(conn, "sock") and conn.sock is None:
conn.connect()
conn.request(method, request_uri, body, headers)
except socket.timeout:
@@ -1363,8 +1710,8 @@ class Http(object):
raise
except socket.error as e:
err = 0
- if hasattr(e, 'args'):
- err = getattr(e, 'args')[0]
+ if hasattr(e, "args"):
+ err = getattr(e, "args")[0]
else:
err = e.errno
if err == errno.ECONNREFUSED: # Connection refused
@@ -1374,15 +1721,15 @@ class Http(object):
except httplib.HTTPException:
# Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response.
- if hasattr(conn, 'sock') and conn.sock is None:
- if i < RETRIES-1:
+ if hasattr(conn, "sock") and conn.sock is None:
+ if i < RETRIES - 1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
- if i < RETRIES-1:
+ if i < RETRIES - 1:
conn.close()
conn.connect()
continue
@@ -1402,7 +1749,7 @@ class Http(object):
conn.close()
raise
except (socket.error, httplib.HTTPException):
- if i < RETRIES-1:
+ if i < RETRIES - 1:
conn.close()
conn.connect()
continue
@@ -1421,77 +1768,121 @@ class Http(object):
break
return (response, content)
-
- def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
+ def _request(
+ self,
+ conn,
+ host,
+ absolute_uri,
+ request_uri,
+ method,
+ body,
+ headers,
+ redirections,
+ cachekey,
+ ):
"""Do the actual request using the connection object
and also follow one level of redirects if necessary"""
- auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
+ auths = [
+ (auth.depth(request_uri), auth)
+ for auth in self.authorizations
+ if auth.inscope(host, request_uri)
+ ]
auth = auths and sorted(auths)[0][1] or None
if auth:
auth.request(method, request_uri, headers, body)
- (response, content) = self._conn_request(conn, request_uri, method, body, headers)
+ (response, content) = self._conn_request(
+ conn, request_uri, method, body, headers
+ )
if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
- (response, content) = self._conn_request(conn, request_uri, method, body, headers )
+ (response, content) = self._conn_request(
+ conn, request_uri, method, body, headers
+ )
response._stale_digest = 1
if response.status == 401:
- for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
+ for authorization in self._auth_from_challenge(
+ host, request_uri, headers, response, content
+ ):
authorization.request(method, request_uri, headers, body)
- (response, content) = self._conn_request(conn, request_uri, method, body, headers, )
+ (response, content) = self._conn_request(
+ conn, request_uri, method, body, headers
+ )
if response.status != 401:
self.authorizations.append(authorization)
authorization.response(response, body)
break
- if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
+ if (
+ self.follow_all_redirects
+ or (method in ["GET", "HEAD"])
+ or response.status == 303
+ ):
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
- if 'location' not in response and response.status != 300:
- raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
+ if "location" not in response and response.status != 300:
+ raise RedirectMissingLocation(
+ _(
+ "Redirected but the response is missing a Location: header."
+ ),
+ response,
+ content,
+ )
# Fix-up relative redirects (which violate an RFC 2616 MUST)
- if 'location' in response:
- location = response['location']
+ if "location" in response:
+ location = response["location"]
(scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None:
- response['location'] = urlparse.urljoin(absolute_uri, location)
+ response["location"] = urlparse.urljoin(
+ absolute_uri, location
+ )
if response.status == 301 and method in ["GET", "HEAD"]:
- response['-x-permanent-redirect-url'] = response['location']
- if 'content-location' not in response:
- response['content-location'] = absolute_uri
+ response["-x-permanent-redirect-url"] = response["location"]
+ if "content-location" not in response:
+ response["content-location"] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
- if 'if-none-match' in headers:
- del headers['if-none-match']
- if 'if-modified-since' in headers:
- del headers['if-modified-since']
- if 'authorization' in headers and not self.forward_authorization_headers:
- del headers['authorization']
- if 'location' in response:
- location = response['location']
+ if "if-none-match" in headers:
+ del headers["if-none-match"]
+ if "if-modified-since" in headers:
+ del headers["if-modified-since"]
+ if (
+ "authorization" in headers
+ and not self.forward_authorization_headers
+ ):
+ del headers["authorization"]
+ if "location" in response:
+ location = response["location"]
old_response = copy.deepcopy(response)
- if 'content-location' not in old_response:
- old_response['content-location'] = absolute_uri
+ if "content-location" not in old_response:
+ old_response["content-location"] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
redirect_method = "GET"
body = None
(response, content) = self.request(
- location, method=redirect_method,
- body=body, headers=headers,
- redirections=redirections - 1)
+ location,
+ method=redirect_method,
+ body=body,
+ headers=headers,
+ redirections=redirections - 1,
+ )
response.previous = old_response
else:
- raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
+ raise RedirectLimit(
+ "Redirected more times than rediection_limit allows.",
+ response,
+ content,
+ )
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
- if 'content-location' not in response:
- response['content-location'] = absolute_uri
+ if "content-location" not in response:
+ response["content-location"] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
@@ -1499,12 +1890,19 @@ class Http(object):
def _normalize_headers(self, headers):
return _normalize_headers(headers)
-# Need to catch and rebrand some exceptions
-# Then need to optionally turn all exceptions into status codes
-# including all socket.* and httplib.* exceptions.
-
-
- def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
+ # Need to catch and rebrand some exceptions
+ # Then need to optionally turn all exceptions into status codes
+ # including all socket.* and httplib.* exceptions.
+
+ def request(
+ self,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=DEFAULT_MAX_REDIRECTS,
+ connection_type=None,
+ ):
""" Performs a single HTTP request.
The 'uri' is the URI of the HTTP resource and can begin with either
@@ -1532,57 +1930,55 @@ class Http(object):
else:
headers = self._normalize_headers(headers)
- if 'user-agent' not in headers:
- headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
+ if "user-agent" not in headers:
+ headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
- domain_port = authority.split(":")[0:2]
- if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
- scheme = 'https'
- authority = domain_port[0]
proxy_info = self._get_proxy_info(scheme, authority)
- conn_key = scheme+":"+authority
- if conn_key in self.connections:
- conn = self.connections[conn_key]
- else:
+ conn_key = scheme + ":" + authority
+ conn = self.connections.get(conn_key)
+ if conn is None:
if not connection_type:
connection_type = SCHEME_TO_CONNECTION[scheme]
certs = list(self.certificates.iter(authority))
- if scheme == 'https':
+ if scheme == "https":
if certs:
conn = self.connections[conn_key] = connection_type(
- authority, key_file=certs[0][0],
- cert_file=certs[0][1], timeout=self.timeout,
- proxy_info=proxy_info,
- ca_certs=self.ca_certs,
- disable_ssl_certificate_validation=
- self.disable_ssl_certificate_validation,
- ssl_version=self.ssl_version)
+ authority,
+ key_file=certs[0][0],
+ cert_file=certs[0][1],
+ timeout=self.timeout,
+ proxy_info=proxy_info,
+ ca_certs=self.ca_certs,
+ disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+ ssl_version=self.ssl_version,
+ )
else:
conn = self.connections[conn_key] = connection_type(
- authority, timeout=self.timeout,
- proxy_info=proxy_info,
- ca_certs=self.ca_certs,
- disable_ssl_certificate_validation=
- self.disable_ssl_certificate_validation,
- ssl_version=self.ssl_version)
+ authority,
+ timeout=self.timeout,
+ proxy_info=proxy_info,
+ ca_certs=self.ca_certs,
+ disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+ ssl_version=self.ssl_version,
+ )
else:
conn = self.connections[conn_key] = connection_type(
- authority, timeout=self.timeout,
- proxy_info=proxy_info)
+ authority, timeout=self.timeout, proxy_info=proxy_info
+ )
conn.set_debuglevel(debuglevel)
- if 'range' not in headers and 'accept-encoding' not in headers:
- headers['accept-encoding'] = 'gzip, deflate'
+ if "range" not in headers and "accept-encoding" not in headers:
+ headers["accept-encoding"] = "gzip, deflate"
info = email.Message.Message()
cached_value = None
if self.cache:
- cachekey = defrag_uri.encode('utf-8')
+ cachekey = defrag_uri.encode("utf-8")
cached_value = self.cache.get(cachekey)
if cached_value:
# info = email.message_from_string(cached_value)
@@ -1591,7 +1987,7 @@ class Http(object):
# to fix the non-existent bug not fixed in this
# bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
try:
- info, content = cached_value.split('\r\n\r\n', 1)
+ info, content = cached_value.split("\r\n\r\n", 1)
feedparser = email.FeedParser.FeedParser()
feedparser.feed(info)
info = feedparser.close()
@@ -1603,9 +1999,15 @@ class Http(object):
else:
cachekey = None
- if method in self.optimistic_concurrency_methods and self.cache and 'etag' in info and not self.ignore_etag and 'if-match' not in headers:
+ if (
+ method in self.optimistic_concurrency_methods
+ and self.cache
+ and "etag" in info
+ and not self.ignore_etag
+ and "if-match" not in headers
+ ):
# http://www.w3.org/1999/04/Editing/
- headers['if-match'] = info['etag']
+ headers["if-match"] = info["etag"]
if method not in ["GET", "HEAD"] and self.cache and cachekey:
# RFC 2616 Section 13.10
@@ -1613,24 +2015,36 @@ class Http(object):
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
- if method in ['GET', 'HEAD'] and 'vary' in info:
- vary = info['vary']
- vary_headers = vary.lower().replace(' ', '').split(',')
+ if method in ["GET", "HEAD"] and "vary" in info:
+ vary = info["vary"]
+ vary_headers = vary.lower().replace(" ", "").split(",")
for header in vary_headers:
- key = '-varied-%s' % header
+ key = "-varied-%s" % header
value = info[key]
if headers.get(header, None) != value:
cached_value = None
break
- if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
- if '-x-permanent-redirect-url' in info:
+ if (
+ cached_value
+ and method in ["GET", "HEAD"]
+ and self.cache
+ and "range" not in headers
+ ):
+ if "-x-permanent-redirect-url" in info:
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
- raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
+ raise RedirectLimit(
+ "Redirected more times than rediection_limit allows.",
+ {},
+ "",
+ )
(response, new_content) = self.request(
- info['-x-permanent-redirect-url'], method='GET',
- headers=headers, redirections=redirections - 1)
+ info["-x-permanent-redirect-url"],
+ method="GET",
+ headers=headers,
+ redirections=redirections - 1,
+ )
response.previous = Response(info)
response.previous.fromcache = True
else:
@@ -1646,7 +2060,7 @@ class Http(object):
if entry_disposition == "FRESH":
if not cached_value:
- info['status'] = '504'
+ info["status"] = "504"
content = ""
response = Response(info)
if cached_value:
@@ -1654,14 +2068,28 @@ class Http(object):
return (response, content)
if entry_disposition == "STALE":
- if 'etag' in info and not self.ignore_etag and not 'if-none-match' in headers:
- headers['if-none-match'] = info['etag']
- if 'last-modified' in info and not 'last-modified' in headers:
- headers['if-modified-since'] = info['last-modified']
+ if (
+ "etag" in info
+ and not self.ignore_etag
+ and not "if-none-match" in headers
+ ):
+ headers["if-none-match"] = info["etag"]
+ if "last-modified" in info and not "last-modified" in headers:
+ headers["if-modified-since"] = info["last-modified"]
elif entry_disposition == "TRANSPARENT":
pass
- (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
+ (response, new_content) = self._request(
+ conn,
+ authority,
+ uri,
+ request_uri,
+ method,
+ body,
+ headers,
+ redirections,
+ cachekey,
+ )
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
@@ -1674,7 +2102,9 @@ class Http(object):
merged_response = Response(info)
if hasattr(response, "_stale_digest"):
merged_response._stale_digest = response._stale_digest
- _updateCache(headers, merged_response, content, self.cache, cachekey)
+ _updateCache(
+ headers, merged_response, content, self.cache, cachekey
+ )
response = merged_response
response.status = 200
response.fromcache = True
@@ -1686,12 +2116,22 @@ class Http(object):
content = new_content
else:
cc = _parse_cache_control(headers)
- if 'only-if-cached' in cc:
- info['status'] = '504'
+ if "only-if-cached" in cc:
+ info["status"] = "504"
response = Response(info)
content = ""
else:
- (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
+ (response, content) = self._request(
+ conn,
+ authority,
+ uri,
+ request_uri,
+ method,
+ body,
+ headers,
+ redirections,
+ cachekey,
+ )
except Exception as e:
if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse):
@@ -1701,24 +2141,27 @@ class Http(object):
response.reason = str(e)
elif isinstance(e, socket.timeout):
content = "Request Timeout"
- response = Response({
- "content-type": "text/plain",
- "status": "408",
- "content-length": len(content)
- })
+ response = Response(
+ {
+ "content-type": "text/plain",
+ "status": "408",
+ "content-length": len(content),
+ }
+ )
response.reason = "Request Timeout"
else:
content = str(e)
- response = Response({
- "content-type": "text/plain",
- "status": "400",
- "content-length": len(content)
- })
+ response = Response(
+ {
+ "content-type": "text/plain",
+ "status": "400",
+ "content-length": len(content),
+ }
+ )
response.reason = "Bad Request"
else:
raise
-
return (response, content)
def _get_proxy_info(self, scheme, authority):
@@ -1730,8 +2173,7 @@ class Http(object):
if callable(proxy_info):
proxy_info = proxy_info(scheme)
- if (hasattr(proxy_info, 'applies_to')
- and not proxy_info.applies_to(hostname)):
+ if hasattr(proxy_info, "applies_to") and not proxy_info.applies_to(hostname):
proxy_info = None
return proxy_info
@@ -1741,13 +2183,14 @@ class Response(dict):
"""Is this response from our local cache"""
fromcache = False
+ """HTTP protocol version used by server.
- """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
+ 10 for HTTP/1.0, 11 for HTTP/1.1.
+ """
version = 11
"Status code returned by server. "
status = 200
-
"""Reason phrase returned by server."""
reason = "Ok"
@@ -1760,21 +2203,21 @@ class Response(dict):
for key, value in info.getheaders():
self[key.lower()] = value
self.status = info.status
- self['status'] = str(self.status)
+ self["status"] = str(self.status)
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.Message.Message):
for key, value in info.items():
self[key.lower()] = value
- self.status = int(self['status'])
+ self.status = int(self["status"])
else:
for key, value in info.iteritems():
self[key.lower()] = value
- self.status = int(self.get('status', self.status))
- self.reason = self.get('reason', self.reason)
+ self.status = int(self.get("status", self.status))
+ self.reason = self.get("reason", self.reason)
def __getattr__(self, name):
- if name == 'dict':
+ if name == "dict":
return self
else:
raise AttributeError(name)
diff --git a/python2/httplib2/certs.py b/python2/httplib2/certs.py
new file mode 100644
index 0000000..59d1ffc
--- /dev/null
+++ b/python2/httplib2/certs.py
@@ -0,0 +1,42 @@
+"""Utilities for certificate management."""
+
+import os
+
+certifi_available = False
+certifi_where = None
+try:
+ from certifi import where as certifi_where
+ certifi_available = True
+except ImportError:
+ pass
+
+custom_ca_locater_available = False
+custom_ca_locater_where = None
+try:
+ from ca_certs_locater import get as custom_ca_locater_where
+ custom_ca_locater_available = True
+except ImportError:
+ pass
+
+
+BUILTIN_CA_CERTS = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "cacerts.txt"
+)
+
+
+def where():
+ env = os.environ.get("HTTPLIB2_CA_CERTS")
+ if env is not None:
+ if os.path.isfile(env):
+ return env
+ else:
+ raise RuntimeError("Environment variable HTTPLIB2_CA_CERTS not a valid file")
+ if custom_ca_locater_available:
+ return custom_ca_locater_where()
+ if certifi_available:
+ return certifi_where()
+ return BUILTIN_CA_CERTS
+
+
+if __name__ == "__main__":
+ print(where())
diff --git a/python2/httplib2/iri2uri.py b/python2/httplib2/iri2uri.py
index d88c91f..0a978a7 100644
--- a/python2/httplib2/iri2uri.py
+++ b/python2/httplib2/iri2uri.py
@@ -1,20 +1,13 @@
-"""
-iri2uri
+"""Converts an IRI to a URI."""
-Converts an IRI to a URI.
-
-"""
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = []
__version__ = "1.0.0"
__license__ = "MIT"
-__history__ = """
-"""
import urlparse
-
# Convert an IRI to a URI following the rules in RFC 3987
#
# The characters we need to enocde and escape are defined in the spec:
@@ -50,6 +43,7 @@ escape_range = [
(0x100000, 0x10FFFD),
]
+
def encode(c):
retval = c
i = ord(c)
@@ -57,7 +51,7 @@ def encode(c):
if i < low:
break
if i >= low and i <= high:
- retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')])
+ retval = "".join(["%%%2X" % ord(o) for o in c.encode("utf-8")])
break
return retval
@@ -66,9 +60,9 @@ def iri2uri(uri):
"""Convert an IRI to a URI. Note that IRIs must be
passed in a unicode strings. That is, do not utf-8 encode
the IRI before passing it into the function."""
- if isinstance(uri ,unicode):
+ if isinstance(uri, unicode):
(scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
- authority = authority.encode('idna')
+ authority = authority.encode("idna")
# For each character in 'ucschar' or 'iprivate'
# 1. encode as utf-8
# 2. then %-encode each octet of that utf-8
@@ -76,11 +70,11 @@ def iri2uri(uri):
uri = "".join([encode(c) for c in uri])
return uri
+
if __name__ == "__main__":
import unittest
class Test(unittest.TestCase):
-
def test_uris(self):
"""Test that URIs are invariant under the transformation."""
invariant = [
@@ -91,20 +85,39 @@ if __name__ == "__main__":
u"news:comp.infosystems.www.servers.unix",
u"tel:+1-816-555-1212",
u"telnet://192.0.2.16:80/",
- u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
+ u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+ ]
for uri in invariant:
self.assertEqual(uri, iri2uri(uri))
def test_iri(self):
- """ Test that the right type of escaping is done for each part of the URI."""
- self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
- self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
- self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
+ """Test that the right type of escaping is done for each part of the URI."""
+ self.assertEqual(
+ "http://xn--o3h.com/%E2%98%84",
+ iri2uri(u"http://\N{COMET}.com/\N{COMET}"),
+ )
+ self.assertEqual(
+ "http://bitworking.org/?fred=%E2%98%84",
+ iri2uri(u"http://bitworking.org/?fred=\N{COMET}"),
+ )
+ self.assertEqual(
+ "http://bitworking.org/#%E2%98%84",
+ iri2uri(u"http://bitworking.org/#\N{COMET}"),
+ )
self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
- self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
- self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
- self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
+ self.assertEqual(
+ "/fred?bar=%E2%98%9A#%E2%98%84",
+ iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"),
+ )
+ self.assertEqual(
+ "/fred?bar=%E2%98%9A#%E2%98%84",
+ iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")),
+ )
+ self.assertNotEqual(
+ "/fred?bar=%E2%98%9A#%E2%98%84",
+ iri2uri(
+ u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8")
+ ),
+ )
unittest.main()
-
-
diff --git a/python2/httplib2/socks.py b/python2/httplib2/socks.py
index dbbe511..5cef776 100644
--- a/python2/httplib2/socks.py
+++ b/python2/httplib2/socks.py
@@ -1,4 +1,5 @@
"""SocksiPy - Python SOCKS module.
+
Version 1.00
Copyright 2006 Dan-Haim. All rights reserved.
@@ -24,20 +25,14 @@ OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
-
This module provides a standard socket-like interface for Python
for tunneling connections through SOCKS proxies.
-"""
-
-"""
-
-Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
-for use in PyLoris (http://pyloris.sourceforge.net/)
+Minor modifications made by Christopher Gilbert (http://motomastyle.com/) for
+use in PyLoris (http://pyloris.sourceforge.net/).
Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
-mainly to merge bug fixes found in Sourceforge
-
+mainly to merge bug fixes found in Sourceforge.
"""
import base64
@@ -45,8 +40,8 @@ import socket
import struct
import sys
-if getattr(socket, 'socket', None) is None:
- raise ImportError('socket.socket missing, proxy support unusable')
+if getattr(socket, "socket", None) is None:
+ raise ImportError("socket.socket missing, proxy support unusable")
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
@@ -56,21 +51,42 @@ PROXY_TYPE_HTTP_NO_TUNNEL = 4
_defaultproxy = None
_orgsocket = socket.socket
-class ProxyError(Exception): pass
-class GeneralProxyError(ProxyError): pass
-class Socks5AuthError(ProxyError): pass
-class Socks5Error(ProxyError): pass
-class Socks4Error(ProxyError): pass
-class HTTPError(ProxyError): pass
-_generalerrors = ("success",
+class ProxyError(Exception):
+ pass
+
+
+class GeneralProxyError(ProxyError):
+ pass
+
+
+class Socks5AuthError(ProxyError):
+ pass
+
+
+class Socks5Error(ProxyError):
+ pass
+
+
+class Socks4Error(ProxyError):
+ pass
+
+
+class HTTPError(ProxyError):
+ pass
+
+
+_generalerrors = (
+ "success",
"invalid data",
"not connected",
"not available",
"bad proxy type",
- "bad input")
+ "bad input",
+)
-_socks5errors = ("succeeded",
+_socks5errors = (
+ "succeeded",
"general SOCKS server failure",
"connection not allowed by ruleset",
"Network unreachable",
@@ -79,21 +95,30 @@ _socks5errors = ("succeeded",
"TTL expired",
"Command not supported",
"Address type not supported",
- "Unknown error")
+ "Unknown error",
+)
-_socks5autherrors = ("succeeded",
+_socks5autherrors = (
+ "succeeded",
"authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password",
- "unknown error")
+ "unknown error",
+)
-_socks4errors = ("request granted",
+_socks4errors = (
+ "request granted",
"request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
- "request rejected because the client program and identd report different user-ids",
- "unknown error")
+ "request rejected because the client program and identd report different "
+ "user-ids",
+ "unknown error",
+)
-def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
+
+def setdefaultproxy(
+ proxytype=None, addr=None, port=None, rdns=True, username=None, password=None
+):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
@@ -101,11 +126,14 @@ def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=No
global _defaultproxy
_defaultproxy = (proxytype, addr, port, rdns, username, password)
+
def wrapmodule(module):
"""wrapmodule(module)
+
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
- This will only work on modules that import socket directly into the namespace;
+ This will only work on modules that import socket directly into the
+ namespace;
most of the Python Standard Library falls into this category.
"""
if _defaultproxy != None:
@@ -113,6 +141,7 @@ def wrapmodule(module):
else:
raise GeneralProxyError((4, "no proxy specified"))
+
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
@@ -120,7 +149,9 @@ class socksocket(socket.socket):
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
- def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
+ def __init__(
+ self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None
+ ):
_orgsocket.__init__(self, family, type, proto, _sock)
if _defaultproxy != None:
self.__proxy = _defaultproxy
@@ -137,8 +168,9 @@ class socksocket(socket.socket):
"""
data = self.recv(count)
while len(data) < count:
- d = self.recv(count-len(data))
- if not d: raise GeneralProxyError((0, "connection closed unexpectedly"))
+ d = self.recv(count - len(data))
+ if not d:
+ raise GeneralProxyError((0, "connection closed unexpectedly"))
data = data + d
return data
@@ -167,7 +199,7 @@ class socksocket(socket.socket):
hdrs.remove(endpt)
host = host.split(" ")[1]
endpt = endpt.split(" ")
- if (self.__proxy[4] != None and self.__proxy[5] != None):
+ if self.__proxy[4] != None and self.__proxy[5] != None:
hdrs.insert(0, self.__getauthheader())
hdrs.insert(0, "Host: %s" % host)
hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
@@ -177,8 +209,18 @@ class socksocket(socket.socket):
auth = self.__proxy[4] + ":" + self.__proxy[5]
return "Proxy-Authorization: Basic " + base64.b64encode(auth)
- def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None, headers=None):
+ def setproxy(
+ self,
+ proxytype=None,
+ addr=None,
+ port=None,
+ rdns=True,
+ username=None,
+ password=None,
+ headers=None,
+ ):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
+
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
@@ -193,7 +235,8 @@ class socksocket(socket.socket):
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
- headers - Additional or modified headers for the proxy connect request.
+ headers - Additional or modified headers for the proxy connect
+ request.
"""
self.__proxy = (proxytype, addr, port, rdns, username, password, headers)
@@ -202,15 +245,15 @@ class socksocket(socket.socket):
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
- if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
+ if (self.__proxy[4] != None) and (self.__proxy[5] != None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
- self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02))
+ self.sendall(struct.pack("BBBB", 0x05, 0x02, 0x00, 0x02))
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
- self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00))
+ self.sendall(struct.pack("BBB", 0x05, 0x01, 0x00))
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
@@ -224,7 +267,13 @@ class socksocket(socket.socket):
elif chosenauth[1:2] == chr(0x02).encode():
# Okay, we need to perform a basic username/password
# authentication.
- self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
+ self.sendall(
+ chr(0x01).encode()
+ + chr(len(self.__proxy[4]))
+ + self.__proxy[4]
+ + chr(len(self.__proxy[5]))
+ + self.__proxy[5]
+ )
authstat = self.__recvall(2)
if authstat[0:1] != chr(0x01).encode():
# Bad response
@@ -243,7 +292,7 @@ class socksocket(socket.socket):
else:
raise GeneralProxyError((1, _generalerrors[1]))
# Now we can request the actual connection
- req = struct.pack('BBB', 0x05, 0x01, 0x00)
+ req = struct.pack("BBB", 0x05, 0x01, 0x00)
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
@@ -254,7 +303,12 @@ class socksocket(socket.socket):
if self.__proxy[3]:
# Resolve remotely
ipaddr = None
- req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr.encode()
+ req = (
+ req
+ + chr(0x03).encode()
+ + chr(len(destaddr)).encode()
+ + destaddr.encode()
+ )
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
@@ -269,7 +323,7 @@ class socksocket(socket.socket):
elif resp[1:2] != chr(0x00).encode():
# Connection failed
self.close()
- if ord(resp[1:2])<=8:
+ if ord(resp[1:2]) <= 8:
raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
else:
raise Socks5Error((9, _socks5errors[9]))
@@ -281,7 +335,7 @@ class socksocket(socket.socket):
boundaddr = self.__recvall(ord(resp[4:5]))
else:
self.close()
- raise GeneralProxyError((1,_generalerrors[1]))
+ raise GeneralProxyError((1, _generalerrors[1]))
boundport = struct.unpack(">H", self.__recvall(2))[0]
self.__proxysockname = (boundaddr, boundport)
if ipaddr != None:
@@ -308,7 +362,7 @@ class socksocket(socket.socket):
"""
return self.__proxypeername
- def __negotiatesocks4(self,destaddr,destport):
+ def __negotiatesocks4(self, destaddr, destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
@@ -340,7 +394,7 @@ class socksocket(socket.socket):
if resp[0:1] != chr(0x00).encode():
# Bad data
self.close()
- raise GeneralProxyError((1,_generalerrors[1]))
+ raise GeneralProxyError((1, _generalerrors[1]))
if resp[1:2] != chr(0x5A).encode():
# Server returned an error
self.close()
@@ -350,7 +404,10 @@ class socksocket(socket.socket):
else:
raise Socks4Error((94, _socks4errors[4]))
# Get the bound address/port
- self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
+ self.__proxysockname = (
+ socket.inet_ntoa(resp[4:]),
+ struct.unpack(">H", resp[2:4])[0],
+ )
if rmtrslv != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
@@ -365,18 +422,18 @@ class socksocket(socket.socket):
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
- headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
+ headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
wrote_host_header = False
wrote_auth_header = False
if self.__proxy[6] != None:
for key, val in self.__proxy[6].iteritems():
headers += [key, ": ", val, "\r\n"]
- wrote_host_header = (key.lower() == "host")
- wrote_auth_header = (key.lower() == "proxy-authorization")
+ wrote_host_header = key.lower() == "host"
+ wrote_auth_header = key.lower() == "proxy-authorization"
if not wrote_host_header:
headers += ["Host: ", destaddr, "\r\n"]
if not wrote_auth_header:
- if (self.__proxy[4] != None and self.__proxy[5] != None):
+ if self.__proxy[4] != None and self.__proxy[5] != None:
headers += [self.__getauthheader(), "\r\n"]
headers.append("\r\n")
self.sendall("".join(headers).encode())
@@ -409,7 +466,12 @@ class socksocket(socket.socket):
To select the proxy server use setproxy().
"""
# Do a minimal input check first
- if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int):
+ if (
+ (not type(destpair) in (list, tuple))
+ or (len(destpair) < 2)
+ or (not isinstance(destpair[0], basestring))
+ or (type(destpair[1]) != int)
+ ):
raise GeneralProxyError((5, _generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] != None:
@@ -423,23 +485,23 @@ class socksocket(socket.socket):
portnum = self.__proxy[2]
else:
portnum = 1080
- _orgsocket.connect(self,(self.__proxy[1], portnum))
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks4(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
- _orgsocket.connect(self,(self.__proxy[1], portnum))
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatehttp(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
- _orgsocket.connect(self,(self.__proxy[1],portnum))
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
if destpair[1] == 443:
- self.__negotiatehttp(destpair[0],destpair[1])
+ self.__negotiatehttp(destpair[0], destpair[1])
else:
self.__httptunnel = False
elif self.__proxy[0] == None:
diff --git a/python2/httplib2/test/functional/test_proxies.py b/python2/httplib2/test/functional/test_proxies.py
index e11369d..939140d 100644
--- a/python2/httplib2/test/functional/test_proxies.py
+++ b/python2/httplib2/test/functional/test_proxies.py
@@ -27,35 +27,35 @@ LogLevel Info
class FunctionalProxyHttpTest(unittest.TestCase):
def setUp(self):
if not socks:
- raise nose.SkipTest('socks module unavailable')
+ raise nose.SkipTest("socks module unavailable")
if not subprocess:
- raise nose.SkipTest('subprocess module unavailable')
+ raise nose.SkipTest("subprocess module unavailable")
# start a short-lived miniserver so we can get a likely port
# for the proxy
- self.httpd, self.proxyport = miniserver.start_server(
- miniserver.ThisDirHandler)
+ self.httpd, self.proxyport = miniserver.start_server(miniserver.ThisDirHandler)
self.httpd.shutdown()
- self.httpd, self.port = miniserver.start_server(
- miniserver.ThisDirHandler)
+ self.httpd, self.port = miniserver.start_server(miniserver.ThisDirHandler)
self.pidfile = tempfile.mktemp()
self.logfile = tempfile.mktemp()
fd, self.conffile = tempfile.mkstemp()
- f = os.fdopen(fd, 'w')
- our_cfg = tinyproxy_cfg % {'user': os.getlogin(),
- 'pidfile': self.pidfile,
- 'port': self.proxyport,
- 'logfile': self.logfile}
+ f = os.fdopen(fd, "w")
+ our_cfg = tinyproxy_cfg % {
+ "user": os.getlogin(),
+ "pidfile": self.pidfile,
+ "port": self.proxyport,
+ "logfile": self.logfile,
+ }
f.write(our_cfg)
f.close()
try:
# TODO use subprocess.check_call when 2.4 is dropped
- ret = subprocess.call(['tinyproxy', '-c', self.conffile])
+ ret = subprocess.call(["tinyproxy", "-c", self.conffile])
self.assertEqual(0, ret)
except OSError as e:
if e.errno == errno.ENOENT:
- raise nose.SkipTest('tinyproxy not available')
+ raise nose.SkipTest("tinyproxy not available")
raise
def tearDown(self):
@@ -65,25 +65,23 @@ class FunctionalProxyHttpTest(unittest.TestCase):
os.kill(pid, signal.SIGTERM)
except OSError as e:
if e.errno == errno.ESRCH:
- print('\n\n\nTinyProxy Failed to start, log follows:')
+ print("\n\n\nTinyProxy Failed to start, log follows:")
print(open(self.logfile).read())
- print('end tinyproxy log\n\n\n')
+ print("end tinyproxy log\n\n\n")
raise
- map(os.unlink, (self.pidfile,
- self.logfile,
- self.conffile))
+ map(os.unlink, (self.pidfile, self.logfile, self.conffile))
def testSimpleProxy(self):
- proxy_info = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP,
- 'localhost', self.proxyport)
+ proxy_info = httplib2.ProxyInfo(
+ socks.PROXY_TYPE_HTTP, "localhost", self.proxyport
+ )
client = httplib2.Http(proxy_info=proxy_info)
- src = 'miniserver.py'
- response, body = client.request('http://localhost:%d/%s' %
- (self.port, src))
+ src = "miniserver.py"
+ response, body = client.request("http://localhost:%d/%s" % (self.port, src))
self.assertEqual(response.status, 200)
self.assertEqual(body, open(os.path.join(miniserver.HERE, src)).read())
lf = open(self.logfile).read()
- expect = ('Established connection to host "127.0.0.1" '
- 'using file descriptor')
- self.assertTrue(expect in lf,
- 'tinyproxy did not proxy a request for miniserver')
+ expect = 'Established connection to host "127.0.0.1" ' "using file descriptor"
+ self.assertTrue(
+ expect in lf, "tinyproxy did not proxy a request for miniserver"
+ )
diff --git a/python2/httplib2/test/miniserver.py b/python2/httplib2/test/miniserver.py
index f72ecca..47c3ee5 100644
--- a/python2/httplib2/test/miniserver.py
+++ b/python2/httplib2/test/miniserver.py
@@ -12,8 +12,8 @@ logger = logging.getLogger(__name__)
class ThisDirHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def translate_path(self, path):
- path = path.split('?', 1)[0].split('#', 1)[0]
- return os.path.join(HERE, *filter(None, path.split('/')))
+ path = path.split("?", 1)[0].split("#", 1)[0]
+ return os.path.join(HERE, *filter(None, path.split("/")))
def log_message(self, s, *args):
# output via logging so nose can catch it
@@ -38,12 +38,13 @@ class ShutdownServer(SocketServer.TCPServer):
SocketServer.TCPServer.server_bind(self)
if self.__use_tls:
import ssl
- self.socket = ssl.wrap_socket(self.socket,
- os.path.join(os.path.dirname(__file__), 'server.key'),
- os.path.join(os.path.dirname(__file__), 'server.pem'),
- True
- )
+ self.socket = ssl.wrap_socket(
+ self.socket,
+ os.path.join(os.path.dirname(__file__), "server.key"),
+ os.path.join(os.path.dirname(__file__), "server.pem"),
+ True,
+ )
def serve_forever(self, poll_interval=0.1):
"""Handle one request at a time until shutdown.
diff --git a/python2/httplib2/test/smoke_test.py b/python2/httplib2/test/smoke_test.py
index 9f1e6f0..25e9cf2 100644
--- a/python2/httplib2/test/smoke_test.py
+++ b/python2/httplib2/test/smoke_test.py
@@ -8,16 +8,14 @@ from httplib2.test import miniserver
class HttpSmokeTest(unittest.TestCase):
def setUp(self):
- self.httpd, self.port = miniserver.start_server(
- miniserver.ThisDirHandler)
+ self.httpd, self.port = miniserver.start_server(miniserver.ThisDirHandler)
def tearDown(self):
self.httpd.shutdown()
def testGetFile(self):
client = httplib2.Http()
- src = 'miniserver.py'
- response, body = client.request('http://localhost:%d/%s' %
- (self.port, src))
+ src = "miniserver.py"
+ response, body = client.request("http://localhost:%d/%s" % (self.port, src))
self.assertEqual(response.status, 200)
self.assertEqual(body, open(os.path.join(miniserver.HERE, src)).read())
diff --git a/python2/httplib2/test/test_no_socket.py b/python2/httplib2/test/test_no_socket.py
index 66ba056..d251cbc 100644
--- a/python2/httplib2/test/test_no_socket.py
+++ b/python2/httplib2/test/test_no_socket.py
@@ -8,6 +8,7 @@ import unittest
import httplib2
+
class MissingSocketTest(unittest.TestCase):
def setUp(self):
self._oldsocks = httplib2.socks
@@ -17,8 +18,8 @@ class MissingSocketTest(unittest.TestCase):
httplib2.socks = self._oldsocks
def testProxyDisabled(self):
- proxy_info = httplib2.ProxyInfo('blah',
- 'localhost', 0)
+ proxy_info = httplib2.ProxyInfo("blah", "localhost", 0)
client = httplib2.Http(proxy_info=proxy_info)
- self.assertRaises(httplib2.ProxiesUnavailableError,
- client.request, 'http://localhost:-1/')
+ self.assertRaises(
+ httplib2.ProxiesUnavailableError, client.request, "http://localhost:-1/"
+ )
diff --git a/python2/httplib2/test/test_ssl_context.py b/python2/httplib2/test/test_ssl_context.py
index 5cf9efb..43504dc 100644
--- a/python2/httplib2/test/test_ssl_context.py
+++ b/python2/httplib2/test/test_ssl_context.py
@@ -10,15 +10,14 @@ import unittest
import httplib2
from httplib2.test import miniserver
-
logger = logging.getLogger(__name__)
class KeepAliveHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+ """Request handler that keeps the HTTP connection open, so that the test can inspect the resulting SSL connection object
+
"""
- Request handler that keeps the HTTP connection open, so that the test can
- inspect the resulting SSL connection object
- """
+
def do_GET(self):
self.send_response(200)
self.send_header("Content-Length", "0")
@@ -40,7 +39,7 @@ class HttpsContextTest(unittest.TestCase):
else:
return
- self.ca_certs_path = os.path.join(os.path.dirname(__file__), 'server.pem')
+ self.ca_certs_path = os.path.join(os.path.dirname(__file__), "server.pem")
self.httpd, self.port = miniserver.start_server(KeepAliveHandler, True)
def tearDown(self):
@@ -50,16 +49,16 @@ class HttpsContextTest(unittest.TestCase):
client = httplib2.Http(ca_certs=self.ca_certs_path)
# Establish connection to local server
- client.request('https://localhost:%d/' % (self.port))
+ client.request("https://localhost:%d/" % (self.port))
# Verify that connection uses a TLS context with the correct hostname
- conn = client.connections['https:localhost:%d' % self.port]
+ conn = client.connections["https:localhost:%d" % self.port]
self.assertIsInstance(conn.sock, ssl.SSLSocket)
- self.assertTrue(hasattr(conn.sock, 'context'))
+ self.assertTrue(hasattr(conn.sock, "context"))
self.assertIsInstance(conn.sock.context, ssl.SSLContext)
self.assertTrue(conn.sock.context.check_hostname)
- self.assertEqual(conn.sock.server_hostname, 'localhost')
+ self.assertEqual(conn.sock.server_hostname, "localhost")
self.assertEqual(conn.sock.context.verify_mode, ssl.CERT_REQUIRED)
self.assertEqual(conn.sock.context.protocol, ssl.PROTOCOL_SSLv23)
@@ -72,15 +71,15 @@ class HttpsContextTest(unittest.TestCase):
# which was also added to original patch.
# url host is intentionally different, we provoke ssl hostname mismatch error
- url = 'https://127.0.0.1:%d/' % (self.port,)
+ url = "https://127.0.0.1:%d/" % (self.port,)
http = httplib2.Http(ca_certs=self.ca_certs_path, proxy_info=None)
def once():
try:
http.request(url)
- assert False, 'expected certificate hostname mismatch error'
+ assert False, "expected certificate hostname mismatch error"
except Exception as e:
- print('%s errno=%s' % (repr(e), getattr(e, 'errno', None)))
+ print("%s errno=%s" % (repr(e), getattr(e, "errno", None)))
once()
once()
diff --git a/python2/httplib2test.py b/python2/httplib2test.py
index 82faabc..3999622 100755
--- a/python2/httplib2test.py
+++ b/python2/httplib2test.py
@@ -1,27 +1,19 @@
#!/usr/bin/env python2.4
-"""
-httplib2test
-
-A set of unit tests for httplib2.py.
-
-Requires Python 2.4 or later
-"""
+"""A set of unit tests for httplib2.py."""
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = []
__license__ = "MIT"
-__history__ = """ """
__version__ = "0.1 ($Rev: 118 $)"
-
-import StringIO
import base64
import httplib
import httplib2
import os
import pickle
import socket
+import StringIO
import sys
import time
import unittest
@@ -33,13 +25,13 @@ except ImportError:
pass
# Python 2.3 support
-if not hasattr(unittest.TestCase, 'assertTrue'):
+if not hasattr(unittest.TestCase, "assertTrue"):
unittest.TestCase.assertTrue = unittest.TestCase.failUnless
unittest.TestCase.assertFalse = unittest.TestCase.failIf
# The test resources base uri
-base = 'http://bitworking.org/projects/httplib2/test/'
-#base = 'http://localhost/projects/httplib2/test/'
+base = "http://bitworking.org/projects/httplib2/test/"
+# base = 'http://localhost/projects/httplib2/test/'
cacheDirName = ".cache"
@@ -64,49 +56,109 @@ class CredentialsTest(unittest.TestCase):
class ParserTest(unittest.TestCase):
def testFromStd66(self):
- self.assertEqual( ('http', 'example.com', '', None, None ), httplib2.parse_uri("http://example.com"))
- self.assertEqual( ('https', 'example.com', '', None, None ), httplib2.parse_uri("https://example.com"))
- self.assertEqual( ('https', 'example.com:8080', '', None, None ), httplib2.parse_uri("https://example.com:8080"))
- self.assertEqual( ('http', 'example.com', '/', None, None ), httplib2.parse_uri("http://example.com/"))
- self.assertEqual( ('http', 'example.com', '/path', None, None ), httplib2.parse_uri("http://example.com/path"))
- self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', None ), httplib2.parse_uri("http://example.com/path?a=1&b=2"))
- self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"))
- self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"))
+ self.assertEqual(
+ ("http", "example.com", "", None, None),
+ httplib2.parse_uri("http://example.com"),
+ )
+ self.assertEqual(
+ ("https", "example.com", "", None, None),
+ httplib2.parse_uri("https://example.com"),
+ )
+ self.assertEqual(
+ ("https", "example.com:8080", "", None, None),
+ httplib2.parse_uri("https://example.com:8080"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/", None, None),
+ httplib2.parse_uri("http://example.com/"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/path", None, None),
+ httplib2.parse_uri("http://example.com/path"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/path", "a=1&b=2", None),
+ httplib2.parse_uri("http://example.com/path?a=1&b=2"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/path", "a=1&b=2", "fred"),
+ httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/path", "a=1&b=2", "fred"),
+ httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"),
+ )
class UrlNormTest(unittest.TestCase):
def test(self):
- self.assertEqual( "http://example.org/", httplib2.urlnorm("http://example.org")[-1])
- self.assertEqual( "http://example.org/", httplib2.urlnorm("http://EXAMple.org")[-1])
- self.assertEqual( "http://example.org/?=b", httplib2.urlnorm("http://EXAMple.org?=b")[-1])
- self.assertEqual( "http://example.org/mypath?a=b", httplib2.urlnorm("http://EXAMple.org/mypath?a=b")[-1])
- self.assertEqual( "http://localhost:80/", httplib2.urlnorm("http://localhost:80")[-1])
- self.assertEqual( httplib2.urlnorm("http://localhost:80/"), httplib2.urlnorm("HTTP://LOCALHOST:80"))
+ self.assertEqual(
+ "http://example.org/", httplib2.urlnorm("http://example.org")[-1]
+ )
+ self.assertEqual(
+ "http://example.org/", httplib2.urlnorm("http://EXAMple.org")[-1]
+ )
+ self.assertEqual(
+ "http://example.org/?=b", httplib2.urlnorm("http://EXAMple.org?=b")[-1]
+ )
+ self.assertEqual(
+ "http://example.org/mypath?a=b",
+ httplib2.urlnorm("http://EXAMple.org/mypath?a=b")[-1],
+ )
+ self.assertEqual(
+ "http://localhost:80/", httplib2.urlnorm("http://localhost:80")[-1]
+ )
+ self.assertEqual(
+ httplib2.urlnorm("http://localhost:80/"),
+ httplib2.urlnorm("HTTP://LOCALHOST:80"),
+ )
try:
httplib2.urlnorm("/")
self.fail("Non-absolute URIs should raise an exception")
except httplib2.RelativeURIError:
pass
+
class UrlSafenameTest(unittest.TestCase):
def test(self):
# Test that different URIs end up generating different safe names
- self.assertEqual( "example.org,fred,a=b,58489f63a7a83c3b7794a6a398ee8b1f", httplib2.safename("http://example.org/fred/?a=b"))
- self.assertEqual( "example.org,fred,a=b,8c5946d56fec453071f43329ff0be46b", httplib2.safename("http://example.org/fred?/a=b"))
- self.assertEqual( "www.example.org,fred,a=b,499c44b8d844a011b67ea2c015116968", httplib2.safename("http://www.example.org/fred?/a=b"))
- self.assertEqual( httplib2.safename(httplib2.urlnorm("http://www")[-1]), httplib2.safename(httplib2.urlnorm("http://WWW")[-1]))
- self.assertEqual( "www.example.org,fred,a=b,692e843a333484ce0095b070497ab45d", httplib2.safename("https://www.example.org/fred?/a=b"))
- self.assertNotEqual( httplib2.safename("http://www"), httplib2.safename("https://www"))
+ self.assertEqual(
+ "example.org,fred,a=b,58489f63a7a83c3b7794a6a398ee8b1f",
+ httplib2.safename("http://example.org/fred/?a=b"),
+ )
+ self.assertEqual(
+ "example.org,fred,a=b,8c5946d56fec453071f43329ff0be46b",
+ httplib2.safename("http://example.org/fred?/a=b"),
+ )
+ self.assertEqual(
+ "www.example.org,fred,a=b,499c44b8d844a011b67ea2c015116968",
+ httplib2.safename("http://www.example.org/fred?/a=b"),
+ )
+ self.assertEqual(
+ httplib2.safename(httplib2.urlnorm("http://www")[-1]),
+ httplib2.safename(httplib2.urlnorm("http://WWW")[-1]),
+ )
+ self.assertEqual(
+ "www.example.org,fred,a=b,692e843a333484ce0095b070497ab45d",
+ httplib2.safename("https://www.example.org/fred?/a=b"),
+ )
+ self.assertNotEqual(
+ httplib2.safename("http://www"), httplib2.safename("https://www")
+ )
# Test the max length limits
uri = "http://" + ("w" * 200) + ".org"
uri2 = "http://" + ("w" * 201) + ".org"
- self.assertNotEqual( httplib2.safename(uri2), httplib2.safename(uri))
+ self.assertNotEqual(httplib2.safename(uri2), httplib2.safename(uri))
# Max length should be 200 + 1 (",") + 32
self.assertEqual(233, len(httplib2.safename(uri2)))
self.assertEqual(233, len(httplib2.safename(uri)))
# Unicode
- if sys.version_info >= (2,3):
- self.assertEqual( "xn--http,-4y1d.org,fred,a=b,579924c35db315e5a32e3d9963388193", httplib2.safename(u"http://\u2304.org/fred/?a=b"))
+ if sys.version_info >= (2, 3):
+ self.assertEqual(
+ "xn--http,-4y1d.org,fred,a=b,579924c35db315e5a32e3d9963388193",
+ httplib2.safename(u"http://\u2304.org/fred/?a=b"),
+ )
+
class _MyResponse(StringIO.StringIO):
def __init__(self, body, **kwargs):
@@ -120,8 +172,16 @@ class _MyResponse(StringIO.StringIO):
class _MyHTTPConnection(object):
"This class is just a mock of httplib.HTTPConnection used for testing"
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=None, proxy_info=None):
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ strict=None,
+ timeout=None,
+ proxy_info=None,
+ ):
self.host = host
self.port = port
self.timeout = timeout
@@ -144,13 +204,22 @@ class _MyHTTPConnection(object):
def getresponse(self):
return _MyResponse("the body", status="200")
+
class _MyHTTPBadStatusConnection(object):
"Mock of httplib.HTTPConnection that raises BadStatusLine."
num_calls = 0
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=None, proxy_info=None):
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ strict=None,
+ timeout=None,
+ proxy_info=None,
+ ):
self.host = host
self.port = port
self.timeout = timeout
@@ -178,43 +247,48 @@ class _MyHTTPBadStatusConnection(object):
class HttpTest(unittest.TestCase):
def setUp(self):
if os.path.exists(cacheDirName):
- [os.remove(os.path.join(cacheDirName, file)) for file in os.listdir(cacheDirName)]
+ [
+ os.remove(os.path.join(cacheDirName, file))
+ for file in os.listdir(cacheDirName)
+ ]
if sys.version_info < (2, 6):
disable_cert_validation = True
else:
disable_cert_validation = False
self.http = httplib2.Http(
- cacheDirName,
- disable_ssl_certificate_validation=disable_cert_validation)
+ cacheDirName, disable_ssl_certificate_validation=disable_cert_validation
+ )
self.http.clear_credentials()
def testIPv6NoSSL(self):
try:
- self.http.request("http://[::1]/")
+ self.http.request("http://[::1]/")
except socket.gaierror:
- self.fail("should get the address family right for IPv6")
+ self.fail("should get the address family right for IPv6")
except socket.error:
- # Even if IPv6 isn't installed on a machine it should just raise socket.error
- pass
+ # Even if IPv6 isn't installed on a machine it should just raise socket.error
+ pass
def testIPv6SSL(self):
try:
- self.http.request("https://[::1]/")
+ self.http.request("https://[::1]/")
except socket.gaierror:
- self.fail("should get the address family right for IPv6")
+ self.fail("should get the address family right for IPv6")
except httplib2.CertificateHostnameMismatch:
- # We connected and verified that the certificate doesn't match
- # the name. Good enough.
- pass
+ # We connected and verified that the certificate doesn't match
+ # the name. Good enough.
+ pass
except socket.error:
- # Even if IPv6 isn't installed on a machine it should just raise socket.error
- pass
+ # Even if IPv6 isn't installed on a machine it should just raise socket.error
+ pass
def testConnectionType(self):
self.http.force_exception_to_status_code = False
- response, content = self.http.request("http://bitworking.org", connection_type=_MyHTTPConnection)
- self.assertEqual(response['content-location'], "http://bitworking.org")
+ response, content = self.http.request(
+ "http://bitworking.org", connection_type=_MyHTTPConnection
+ )
+ self.assertEqual(response["content-location"], "http://bitworking.org")
self.assertEqual(content, "the body")
def testBadStatusLineRetry(self):
@@ -222,8 +296,9 @@ class HttpTest(unittest.TestCase):
httplib2.RETRIES = 1
self.http.force_exception_to_status_code = False
try:
- response, content = self.http.request("http://bitworking.org",
- connection_type=_MyHTTPBadStatusConnection)
+ response, content = self.http.request(
+ "http://bitworking.org", connection_type=_MyHTTPBadStatusConnection
+ )
except httplib.BadStatusLine:
self.assertEqual(2, _MyHTTPBadStatusConnection.num_calls)
httplib2.RETRIES = old_retries
@@ -232,7 +307,9 @@ class HttpTest(unittest.TestCase):
self.http.force_exception_to_status_code = False
try:
self.http.request("http://fred.bitworking.org/")
- self.fail("An httplib2.ServerNotFoundError Exception must be thrown on an unresolvable server.")
+ self.fail(
+ "An httplib2.ServerNotFoundError Exception must be thrown on an unresolvable server."
+ )
except httplib2.ServerNotFoundError:
pass
@@ -240,15 +317,15 @@ class HttpTest(unittest.TestCase):
self.http.force_exception_to_status_code = True
(response, content) = self.http.request("http://fred.bitworking.org/")
- self.assertEqual(response['content-type'], 'text/plain')
+ self.assertEqual(response["content-type"], "text/plain")
self.assertTrue(content.startswith("Unable to find"))
self.assertEqual(response.status, 400)
def testGetConnectionRefused(self):
self.http.force_exception_to_status_code = False
try:
- self.http.request("http://localhost:7777/")
- self.fail("An socket.error exception must be thrown on Connection Refused.")
+ self.http.request("http://localhost:7777/")
+ self.fail("An socket.error exception must be thrown on Connection Refused.")
except socket.error:
pass
@@ -256,32 +333,35 @@ class HttpTest(unittest.TestCase):
self.http.force_exception_to_status_code = True
(response, content) = self.http.request("http://localhost:7777/")
- self.assertEqual(response['content-type'], 'text/plain')
- self.assertTrue("Connection refused" in content
- or "actively refused" in content,
- "Unexpected status %(content)s" % vars())
+ self.assertEqual(response["content-type"], "text/plain")
+ self.assertTrue(
+ "Connection refused" in content or "actively refused" in content,
+ "Unexpected status %(content)s" % vars(),
+ )
self.assertEqual(response.status, 400)
def testGetIRI(self):
- if sys.version_info >= (2,3):
- uri = urlparse.urljoin(base, u"reflector/reflector.cgi?d=\N{CYRILLIC CAPITAL LETTER DJE}")
+ if sys.version_info >= (2, 3):
+ uri = urlparse.urljoin(
+ base, u"reflector/reflector.cgi?d=\N{CYRILLIC CAPITAL LETTER DJE}"
+ )
(response, content) = self.http.request(uri, "GET")
d = self.reflector(content)
- self.assertTrue('QUERY_STRING' in d)
- self.assertTrue(d['QUERY_STRING'].find('%D0%82') > 0)
+ self.assertTrue("QUERY_STRING" in d)
+ self.assertTrue(d["QUERY_STRING"].find("%D0%82") > 0)
def testGetIsDefaultMethod(self):
# Test that GET is the default method
uri = urlparse.urljoin(base, "methods/method_reflector.cgi")
(response, content) = self.http.request(uri)
- self.assertEqual(response['x-method'], "GET")
+ self.assertEqual(response["x-method"], "GET")
def testDifferentMethods(self):
# Test that all methods can be used
uri = urlparse.urljoin(base, "methods/method_reflector.cgi")
for method in ["GET", "PUT", "DELETE", "POST"]:
(response, content) = self.http.request(uri, method, body=" ")
- self.assertEqual(response['x-method'], method)
+ self.assertEqual(response["x-method"], method)
def testHeadRead(self):
# Test that we don't try to read the response of a HEAD request
@@ -305,14 +385,18 @@ class HttpTest(unittest.TestCase):
# Test that can do a GET with cache and 'only-if-cached'
uri = urlparse.urljoin(base, "304/test_etag.txt")
(response, content) = self.http.request(uri, "GET")
- (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "only-if-cached"}
+ )
self.assertEqual(response.fromcache, True)
self.assertEqual(response.status, 200)
def testGetOnlyIfCachedCacheMiss(self):
# Test that can do a GET with no cache with 'only-if-cached'
uri = urlparse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "only-if-cached"}
+ )
self.assertEqual(response.fromcache, False)
self.assertEqual(response.status, 504)
@@ -323,7 +407,9 @@ class HttpTest(unittest.TestCase):
# test can't really be guaranteed to pass.
http = httplib2.Http()
uri = urlparse.urljoin(base, "304/test_etag.txt")
- (response, content) = http.request(uri, "GET", headers={'cache-control': 'only-if-cached'})
+ (response, content) = http.request(
+ uri, "GET", headers={"cache-control": "only-if-cached"}
+ )
self.assertEqual(response.fromcache, False)
self.assertEqual(response.status, 504)
@@ -338,7 +424,9 @@ class HttpTest(unittest.TestCase):
# Test that the default user-agent can be over-ridden
uri = urlparse.urljoin(base, "user-agent/test.cgi")
- (response, content) = self.http.request(uri, "GET", headers={'User-Agent': 'fred/1.0'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"User-Agent": "fred/1.0"}
+ )
self.assertEqual(response.status, 200)
self.assertTrue(content.startswith("fred/1.0"))
@@ -371,7 +459,7 @@ class HttpTest(unittest.TestCase):
uri = urlparse.urljoin(base, "300/without-location-header.asis")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 300)
- self.assertTrue(response['content-type'].startswith("text/html"))
+ self.assertTrue(response["content-type"].startswith("text/html"))
self.assertEqual(response.previous, None)
def testGet301(self):
@@ -381,15 +469,15 @@ class HttpTest(unittest.TestCase):
destination = urlparse.urljoin(base, "302/final-destination.txt")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
- self.assertTrue('content-location' in response)
- self.assertEqual(response['content-location'], destination)
+ self.assertTrue("content-location" in response)
+ self.assertEqual(response["content-location"], destination)
self.assertEqual(content, "This is the final destination.\n")
self.assertEqual(response.previous.status, 301)
self.assertEqual(response.previous.fromcache, False)
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
- self.assertEqual(response['content-location'], destination)
+ self.assertEqual(response["content-location"], destination)
self.assertEqual(content, "This is the final destination.\n")
self.assertEqual(response.previous.status, 301)
self.assertEqual(response.previous.fromcache, True)
@@ -412,7 +500,6 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 301)
-
def testGet302(self):
# Test that we automatically follow 302 redirects
# and that we DO NOT cache the 302 response
@@ -420,7 +507,7 @@ class HttpTest(unittest.TestCase):
destination = urlparse.urljoin(base, "302/final-destination.txt")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
- self.assertEqual(response['content-location'], destination)
+ self.assertEqual(response["content-location"], destination)
self.assertEqual(content, "This is the final destination.\n")
self.assertEqual(response.previous.status, 302)
self.assertEqual(response.previous.fromcache, False)
@@ -429,11 +516,11 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, True)
- self.assertEqual(response['content-location'], destination)
+ self.assertEqual(response["content-location"], destination)
self.assertEqual(content, "This is the final destination.\n")
self.assertEqual(response.previous.status, 302)
self.assertEqual(response.previous.fromcache, False)
- self.assertEqual(response.previous['content-location'], uri)
+ self.assertEqual(response.previous["content-location"], uri)
uri = urlparse.urljoin(base, "302/twostep.asis")
@@ -452,7 +539,7 @@ class HttpTest(unittest.TestCase):
uri = urlparse.urljoin(base, "302/twostep.asis")
try:
- (response, content) = self.http.request(uri, "GET", redirections = 1)
+ (response, content) = self.http.request(uri, "GET", redirections=1)
self.fail("This should not happen")
except httplib2.RedirectLimit:
pass
@@ -462,10 +549,10 @@ class HttpTest(unittest.TestCase):
# Re-run the test with out the exceptions
self.http.force_exception_to_status_code = True
- (response, content) = self.http.request(uri, "GET", redirections = 1)
+ (response, content) = self.http.request(uri, "GET", redirections=1)
self.assertEqual(response.status, 500)
self.assertTrue(response.reason.startswith("Redirected more"))
- self.assertEqual("302", response['status'])
+ self.assertEqual("302", response["status"])
self.assertTrue(content.startswith("<html>"))
self.assertTrue(response.previous != None)
@@ -488,7 +575,7 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 500)
self.assertTrue(response.reason.startswith("Redirected but"))
- self.assertEqual("302", response['status'])
+ self.assertEqual("302", response["status"])
self.assertTrue(content.startswith("This is content"))
def testGet301ViaHttps(self):
@@ -499,7 +586,9 @@ class HttpTest(unittest.TestCase):
def testGetViaHttps(self):
# Test that we can handle HTTPS
- (response, content) = self.http.request("https://www.google.com/adsense/", "GET")
+ (response, content) = self.http.request(
+ "https://www.google.com/adsense/", "GET"
+ )
self.assertEqual(200, response.status)
def testGetViaHttpsSpecViolationOnLocation(self):
@@ -514,30 +603,34 @@ class HttpTest(unittest.TestCase):
def testSslCertValidationDoubleDots(self):
pass
# No longer a valid test.
- #if sys.version_info >= (2, 6):
+ # if sys.version_info >= (2, 6):
# Test that we get match a double dot cert
- #try:
+ # try:
# self.http.request("https://www.appspot.com/", "GET")
- #except httplib2.CertificateHostnameMismatch:
+ # except httplib2.CertificateHostnameMismatch:
# self.fail('cert with *.*.appspot.com should not raise an exception.')
def testSslHostnameValidation(self):
- pass
+ pass
# No longer a valid test.
- #if sys.version_info >= (2, 6):
- # The SSL server at google.com:443 returns a certificate for
- # 'www.google.com', which results in a host name mismatch.
- # Note that this test only works because the ssl module and httplib2
- # do not support SNI; for requests specifying a server name of
- # 'google.com' via SNI, a matching cert would be returned.
+ # if sys.version_info >= (2, 6):
+ # The SSL server at google.com:443 returns a certificate for
+ # 'www.google.com', which results in a host name mismatch.
+ # Note that this test only works because the ssl module and httplib2
+ # do not support SNI; for requests specifying a server name of
+ # 'google.com' via SNI, a matching cert would be returned.
# self.assertRaises(httplib2.CertificateHostnameMismatch,
# self.http.request, "https://google.com/", "GET")
def testSslCertValidationWithoutSslModuleFails(self):
if sys.version_info < (2, 6):
http = httplib2.Http(disable_ssl_certificate_validation=False)
- self.assertRaises(httplib2.CertificateValidationUnsupported,
- http.request, "https://www.google.com/", "GET")
+ self.assertRaises(
+ httplib2.CertificateValidationUnsupported,
+ http.request,
+ "https://www.google.com/",
+ "GET",
+ )
def testGetViaHttpsKeyCert(self):
# At this point I can only test
@@ -555,17 +648,20 @@ class HttpTest(unittest.TestCase):
except:
pass
self.assertEqual(http.connections["https:bitworking.org"].key_file, "akeyfile")
- self.assertEqual(http.connections["https:bitworking.org"].cert_file, "acertfile")
+ self.assertEqual(
+ http.connections["https:bitworking.org"].cert_file, "acertfile"
+ )
try:
(response, content) = http.request("https://notthere.bitworking.org", "GET")
except:
pass
- self.assertEqual(http.connections["https:notthere.bitworking.org"].key_file, None)
- self.assertEqual(http.connections["https:notthere.bitworking.org"].cert_file, None)
-
-
-
+ self.assertEqual(
+ http.connections["https:notthere.bitworking.org"].key_file, None
+ )
+ self.assertEqual(
+ http.connections["https:notthere.bitworking.org"].cert_file, None
+ )
def testGet303(self):
# Do a follow-up GET on a Location: header
@@ -587,36 +683,46 @@ class HttpTest(unittest.TestCase):
def test303ForDifferentMethods(self):
# Test that all methods can be used
uri = urlparse.urljoin(base, "303/redirect-to-reflector.cgi")
- for (method, method_on_303) in [("PUT", "GET"), ("DELETE", "GET"), ("POST", "GET"), ("GET", "GET"), ("HEAD", "GET")]:
+ for (method, method_on_303) in [
+ ("PUT", "GET"),
+ ("DELETE", "GET"),
+ ("POST", "GET"),
+ ("GET", "GET"),
+ ("HEAD", "GET"),
+ ]:
(response, content) = self.http.request(uri, method, body=" ")
- self.assertEqual(response['x-method'], method_on_303)
+ self.assertEqual(response["x-method"], method_on_303)
def test303AndForwardAuthorizationHeader(self):
# Test that all methods can be used
uri = urlparse.urljoin(base, "303/redirect-to-header-reflector.cgi")
- headers = {'authorization': 'Bearer foo'}
- response, content = self.http.request(uri, 'GET', body=" ",
- headers=headers)
+ headers = {"authorization": "Bearer foo"}
+ response, content = self.http.request(uri, "GET", body=" ", headers=headers)
# self.assertTrue('authorization' not in content)
self.http.follow_all_redirects = True
self.http.forward_authorization_headers = True
- response, content = self.http.request(uri, 'GET', body=" ",
- headers=headers)
+ response, content = self.http.request(uri, "GET", body=" ", headers=headers)
# Oh, how I wish Apache didn't eat the Authorization header.
# self.assertTrue('authorization' in content)
def testGet304(self):
# Test that we use ETags properly to validate our cache
uri = urlparse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET", headers= {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
(response, content) = self.http.request(uri, "GET")
- (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'must-revalidate'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "must-revalidate"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, True)
- cache_file_name = os.path.join(cacheDirName, httplib2.safename(httplib2.urlnorm(uri)[-1]))
+ cache_file_name = os.path.join(
+ cacheDirName, httplib2.safename(httplib2.urlnorm(uri)[-1])
+ )
f = open(cache_file_name, "r")
status_line = f.readline()
f.close()
@@ -627,57 +733,83 @@ class HttpTest(unittest.TestCase):
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "GET", headers = {'range': 'bytes=0-0'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"range": "bytes=0-0"}
+ )
self.assertEqual(response.status, 206)
self.assertEqual(response.fromcache, False)
def testGetIgnoreEtag(self):
# Test that we can forcibly ignore ETags
uri = urlparse.urljoin(base, "reflector/reflector.cgi")
- (response, content) = self.http.request(uri, "GET", headers= {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'cache-control': 'max-age=0'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
+
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "max-age=0"},
+ )
d = self.reflector(content)
- self.assertTrue('HTTP_IF_NONE_MATCH' in d)
+ self.assertTrue("HTTP_IF_NONE_MATCH" in d)
self.http.ignore_etag = True
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'cache-control': 'max-age=0'})
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "max-age=0"},
+ )
d = self.reflector(content)
self.assertEqual(response.fromcache, False)
- self.assertFalse('HTTP_IF_NONE_MATCH' in d)
+ self.assertFalse("HTTP_IF_NONE_MATCH" in d)
def testOverrideEtag(self):
# Test that we can forcibly ignore ETags
uri = urlparse.urljoin(base, "reflector/reflector.cgi")
- (response, content) = self.http.request(uri, "GET", headers= {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'cache-control': 'max-age=0'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
+
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "max-age=0"},
+ )
d = self.reflector(content)
- self.assertTrue('HTTP_IF_NONE_MATCH' in d)
- self.assertNotEqual(d['HTTP_IF_NONE_MATCH'], "fred")
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'cache-control': 'max-age=0', 'if-none-match': 'fred'})
+ self.assertTrue("HTTP_IF_NONE_MATCH" in d)
+ self.assertNotEqual(d["HTTP_IF_NONE_MATCH"], "fred")
+
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={
+ "accept-encoding": "identity",
+ "cache-control": "max-age=0",
+ "if-none-match": "fred",
+ },
+ )
d = self.reflector(content)
- self.assertTrue('HTTP_IF_NONE_MATCH' in d)
- self.assertEqual(d['HTTP_IF_NONE_MATCH'], "fred")
-
-#MAP-commented this out because it consistently fails
-# def testGet304EndToEnd(self):
-# # Test that end to end headers get overwritten in the cache
-# uri = urlparse.urljoin(base, "304/end2end.cgi")
-# (response, content) = self.http.request(uri, "GET")
-# self.assertNotEqual(response['etag'], "")
-# old_date = response['date']
-# time.sleep(2)
-#
-# (response, content) = self.http.request(uri, "GET", headers = {'Cache-Control': 'max-age=0'})
-# # The response should be from the cache, but the Date: header should be updated.
-# new_date = response['date']
-# self.assertNotEqual(new_date, old_date)
-# self.assertEqual(response.status, 200)
-# self.assertEqual(response.fromcache, True)
+ self.assertTrue("HTTP_IF_NONE_MATCH" in d)
+ self.assertEqual(d["HTTP_IF_NONE_MATCH"], "fred")
+
+ # MAP-commented this out because it consistently fails
+ # def testGet304EndToEnd(self):
+ # # Test that end to end headers get overwritten in the cache
+ # uri = urlparse.urljoin(base, "304/end2end.cgi")
+ # (response, content) = self.http.request(uri, "GET")
+ # self.assertNotEqual(response['etag'], "")
+ # old_date = response['date']
+ # time.sleep(2)
+ #
+ # (response, content) = self.http.request(uri, "GET", headers = {'Cache-Control': 'max-age=0'})
+ # # The response should be from the cache, but the Date: header should be updated.
+ # new_date = response['date']
+ # self.assertNotEqual(new_date, old_date)
+ # self.assertEqual(response.status, 200)
+ # self.assertEqual(response.fromcache, True)
def testGet304LastModified(self):
# Test that we can still handle a 304
@@ -685,7 +817,7 @@ class HttpTest(unittest.TestCase):
uri = urlparse.urljoin(base, "304/last-modified-only/last-modified-only.txt")
(response, content) = self.http.request(uri, "GET")
- self.assertNotEqual(response['last-modified'], "")
+ self.assertNotEqual(response["last-modified"], "")
(response, content) = self.http.request(uri, "GET")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
@@ -715,29 +847,29 @@ class HttpTest(unittest.TestCase):
self.assertEqual(response.status, 410)
def testVaryHeaderSimple(self):
- """
- RFC 2616 13.6
- When the cache receives a subsequent request whose Request-URI
- specifies one or more cache entries including a Vary header field,
- the cache MUST NOT use such a cache entry to construct a response
- to the new request unless all of the selecting request-headers
- present in the new request match the corresponding stored
- request-headers in the original request.
+ """RFC 2616 13.6 When the cache receives a subsequent request whose Request-URI specifies one or more cache entries including a Vary header field, the cache MUST NOT use such a cache entry to construct a response to the new request unless all of the selecting request-headers present in the new request match the corresponding stored request-headers in the original request.
+
"""
# test that the vary header is sent
uri = urlparse.urljoin(base, "vary/accept.asis")
- (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
self.assertEqual(response.status, 200)
- self.assertTrue('vary' in response)
+ self.assertTrue("vary" in response)
# get the resource again, from the cache since accept header in this
# request is the same as the request
- (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, True, msg="Should be from cache")
# get the resource again, not from cache since Accept headers does not match
- (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/html"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, False, msg="Should not be from cache")
@@ -766,45 +898,62 @@ class HttpTest(unittest.TestCase):
def testVaryHeaderDouble(self):
uri = urlparse.urljoin(base, "vary/accept-double.asis")
- (response, content) = self.http.request(uri, "GET", headers={
- 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'})
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={
+ "Accept": "text/plain",
+ "Accept-Language": "da, en-gb;q=0.8, en;q=0.7",
+ },
+ )
self.assertEqual(response.status, 200)
- self.assertTrue('vary' in response)
+ self.assertTrue("vary" in response)
# we are from cache
- (response, content) = self.http.request(uri, "GET", headers={
- 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'})
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={
+ "Accept": "text/plain",
+ "Accept-Language": "da, en-gb;q=0.8, en;q=0.7",
+ },
+ )
self.assertEqual(response.fromcache, True, msg="Should be from cache")
- (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, False)
# get the resource again, not from cache, varied headers don't match exact
- (response, content) = self.http.request(uri, "GET", headers={'Accept-Language': 'da'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept-Language": "da"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, False, msg="Should not be from cache")
def testVaryUnusedHeader(self):
# A header's value is not considered to vary if it's not used at all.
uri = urlparse.urljoin(base, "vary/unused-header.asis")
- (response, content) = self.http.request(uri, "GET", headers={
- 'Accept': 'text/plain'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
self.assertEqual(response.status, 200)
- self.assertTrue('vary' in response)
+ self.assertTrue("vary" in response)
# we are from cache
- (response, content) = self.http.request(uri, "GET", headers={
- 'Accept': 'text/plain',})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
self.assertEqual(response.fromcache, True, msg="Should be from cache")
-
def testHeadGZip(self):
# Test that we don't try to decompress a HEAD response
uri = urlparse.urljoin(base, "gzip/final-destination.txt")
(response, content) = self.http.request(uri, "HEAD")
self.assertEqual(response.status, 200)
- self.assertNotEqual(int(response['content-length']), 0)
+ self.assertNotEqual(int(response["content-length"]), 0)
self.assertEqual(content, "")
def testGetGZip(self):
@@ -812,17 +961,19 @@ class HttpTest(unittest.TestCase):
uri = urlparse.urljoin(base, "gzip/final-destination.txt")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
- self.assertFalse('content-encoding' in response)
- self.assertTrue('-content-encoding' in response)
- self.assertEqual(int(response['content-length']), len("This is the final destination.\n"))
+ self.assertFalse("content-encoding" in response)
+ self.assertTrue("-content-encoding" in response)
+ self.assertEqual(
+ int(response["content-length"]), len("This is the final destination.\n")
+ )
self.assertEqual(content, "This is the final destination.\n")
def testPostAndGZipResponse(self):
uri = urlparse.urljoin(base, "gzip/post.cgi")
(response, content) = self.http.request(uri, "POST", body=" ")
self.assertEqual(response.status, 200)
- self.assertFalse('content-encoding' in response)
- self.assertTrue('-content-encoding' in response)
+ self.assertFalse("content-encoding" in response)
+ self.assertTrue("-content-encoding" in response)
def testGetGZipFailure(self):
# Test that we raise a good exception when the gzip fails
@@ -848,6 +999,7 @@ class HttpTest(unittest.TestCase):
uri = urlparse.urljoin(base, "timeout/timeout.cgi")
try:
import socket
+
socket.setdefaulttimeout(1)
except:
# Don't run the test if we can't set the timeout
@@ -867,9 +1019,8 @@ class HttpTest(unittest.TestCase):
self.assertTrue(response.reason.startswith("Request Timeout"))
self.assertTrue(content.startswith("Request Timeout"))
-
def testHTTPSInitTimeout(self):
- c = httplib2.HTTPSConnectionWithTimeout('localhost', 80, timeout=47)
+ c = httplib2.HTTPSConnectionWithTimeout("localhost", 80, timeout=47)
self.assertEqual(47, c.timeout)
def testGetDeflate(self):
@@ -877,8 +1028,10 @@ class HttpTest(unittest.TestCase):
uri = urlparse.urljoin(base, "deflate/deflated.asis")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
- self.assertFalse('content-encoding' in response)
- self.assertEqual(int(response['content-length']), len("This is the final destination."))
+ self.assertFalse("content-encoding" in response)
+ self.assertEqual(
+ int(response["content-length"]), len("This is the final destination.")
+ )
self.assertEqual(content, "This is the final destination.")
def testGetDeflateFailure(self):
@@ -907,31 +1060,48 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
self.assertEqual(content, "This is content\n")
- self.assertEqual(response['link'].split(",")[0], '<http://bitworking.org>; rel="home"; title="BitWorking"')
+ self.assertEqual(
+ response["link"].split(",")[0],
+ '<http://bitworking.org>; rel="home"; title="BitWorking"',
+ )
def testGetCacheControlNoCache(self):
# Test Cache-Control: no-cache on requests
uri = urlparse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET", headers= {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
- (response, content) = self.http.request(uri, "GET", headers= {'accept-encoding': 'identity'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "GET", headers={'accept-encoding': 'identity', 'Cache-Control': 'no-cache'})
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "Cache-Control": "no-cache"},
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, False)
def testGetCacheControlPragmaNoCache(self):
# Test Pragma: no-cache on requests
uri = urlparse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET", headers= {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
- (response, content) = self.http.request(uri, "GET", headers= {'accept-encoding': 'identity'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "GET", headers={'accept-encoding': 'identity', 'Pragma': 'no-cache'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity", "Pragma": "no-cache"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, False)
@@ -939,11 +1109,15 @@ class HttpTest(unittest.TestCase):
# A no-store request means that the response should not be stored.
uri = urlparse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Cache-Control": "no-store"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, False)
- (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Cache-Control": "no-store"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, False)
@@ -967,8 +1141,12 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'})
- (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Cache-Control": "no-store, no-cache"}
+ )
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Cache-Control": "no-store, no-cache"}
+ )
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, False)
@@ -1016,7 +1194,6 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "PATCH", body="foo")
self.assertEqual(response.status, 412)
-
def testUpdateUsesCachedETagAndOCMethod(self):
# Test that we natively support http://www.w3.org/1999/04/Editing/
uri = urlparse.urljoin(base, "conditional-updates/test.cgi")
@@ -1031,7 +1208,6 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "DELETE")
self.assertEqual(response.status, 200)
-
def testUpdateUsesCachedETagOverridden(self):
# Test that we natively support http://www.w3.org/1999/04/Editing/
uri = urlparse.urljoin(base, "conditional-updates/test.cgi")
@@ -1042,7 +1218,9 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "PUT", body="foo", headers={'if-match': 'fred'})
+ (response, content) = self.http.request(
+ uri, "PUT", body="foo", headers={"if-match": "fred"}
+ )
self.assertEqual(response.status, 412)
def testBasicAuth(self):
@@ -1055,7 +1233,7 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 401)
- self.http.add_credentials('joe', 'password')
+ self.http.add_credentials("joe", "password")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
@@ -1073,7 +1251,7 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 401)
- self.http.add_credentials('joe', 'password', "example.org")
+ self.http.add_credentials("joe", "password", "example.org")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 401)
@@ -1082,7 +1260,7 @@ class HttpTest(unittest.TestCase):
self.assertEqual(response.status, 401)
domain = urlparse.urlparse(base)[1]
- self.http.add_credentials('joe', 'password', domain)
+ self.http.add_credentials("joe", "password", domain)
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
@@ -1090,11 +1268,6 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
-
-
-
-
-
def testBasicAuthTwoDifferentCredentials(self):
# Test Basic Authentication with multiple sets of credentials
uri = urlparse.urljoin(base, "basic2/file.txt")
@@ -1105,7 +1278,7 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 401)
- self.http.add_credentials('fred', 'barney')
+ self.http.add_credentials("fred", "barney")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
@@ -1125,7 +1298,7 @@ class HttpTest(unittest.TestCase):
self.assertEqual(response.status, 401)
# Now add in credentials one at a time and test.
- self.http.add_credentials('joe', 'password')
+ self.http.add_credentials("joe", "password")
uri = urlparse.urljoin(base, "basic-nested/")
(response, content) = self.http.request(uri, "GET")
@@ -1135,7 +1308,7 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 401)
- self.http.add_credentials('fred', 'barney')
+ self.http.add_credentials("fred", "barney")
uri = urlparse.urljoin(base, "basic-nested/")
(response, content) = self.http.request(uri, "GET")
@@ -1151,7 +1324,7 @@ class HttpTest(unittest.TestCase):
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 401)
- self.http.add_credentials('joe', 'password')
+ self.http.add_credentials("joe", "password")
(response, content) = self.http.request(uri, "GET")
self.assertEqual(response.status, 200)
@@ -1162,49 +1335,59 @@ class HttpTest(unittest.TestCase):
# Test that if the server sets nextnonce that we reset
# the nonce count back to 1
uri = urlparse.urljoin(base, "digest/file.txt")
- self.http.add_credentials('joe', 'password')
- (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
- info = httplib2._parse_www_authenticate(response, 'authentication-info')
+ self.http.add_credentials("joe", "password")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "no-cache"}
+ )
+ info = httplib2._parse_www_authenticate(response, "authentication-info")
self.assertEqual(response.status, 200)
- (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
- info2 = httplib2._parse_www_authenticate(response, 'authentication-info')
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "no-cache"}
+ )
+ info2 = httplib2._parse_www_authenticate(response, "authentication-info")
self.assertEqual(response.status, 200)
- if 'nextnonce' in info:
- self.assertEqual(info2['nc'], 1)
+ if "nextnonce" in info:
+ self.assertEqual(info2["nc"], 1)
def testDigestAuthStale(self):
# Test that we can handle a nonce becoming stale
uri = urlparse.urljoin(base, "digest-expire/file.txt")
- self.http.add_credentials('joe', 'password')
- (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
- info = httplib2._parse_www_authenticate(response, 'authentication-info')
+ self.http.add_credentials("joe", "password")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "no-cache"}
+ )
+ info = httplib2._parse_www_authenticate(response, "authentication-info")
self.assertEqual(response.status, 200)
time.sleep(3)
# Sleep long enough that the nonce becomes stale
- (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "no-cache"}
+ )
self.assertFalse(response.fromcache)
self.assertTrue(response._stale_digest)
- info3 = httplib2._parse_www_authenticate(response, 'authentication-info')
+ info3 = httplib2._parse_www_authenticate(response, "authentication-info")
self.assertEqual(response.status, 200)
def reflector(self, content):
- return dict( [tuple(x.split("=", 1)) for x in content.strip().split("\n")] )
+ return dict([tuple(x.split("=", 1)) for x in content.strip().split("\n")])
def testReflector(self):
uri = urlparse.urljoin(base, "reflector/reflector.cgi")
(response, content) = self.http.request(uri, "GET")
d = self.reflector(content)
- self.assertTrue('HTTP_USER_AGENT' in d)
+ self.assertTrue("HTTP_USER_AGENT" in d)
def testConnectionClose(self):
uri = "http://www.google.com/"
(response, content) = self.http.request(uri, "GET")
for c in self.http.connections.values():
self.assertNotEqual(None, c.sock)
- (response, content) = self.http.request(uri, "GET", headers={"connection": "close"})
+ (response, content) = self.http.request(
+ uri, "GET", headers={"connection": "close"}
+ )
for c in self.http.connections.values():
self.assertEqual(None, c.sock)
@@ -1212,43 +1395,48 @@ class HttpTest(unittest.TestCase):
pickled_http = pickle.dumps(self.http)
new_http = pickle.loads(pickled_http)
- self.assertEqual(sorted(new_http.__dict__.keys()),
- sorted(self.http.__dict__.keys()))
+ self.assertEqual(
+ sorted(new_http.__dict__.keys()), sorted(self.http.__dict__.keys())
+ )
for key in new_http.__dict__:
- if key in ('certificates', 'credentials'):
- self.assertEqual(new_http.__dict__[key].credentials,
- self.http.__dict__[key].credentials)
- elif key == 'cache':
- self.assertEqual(new_http.__dict__[key].cache,
- self.http.__dict__[key].cache)
+ if key in ("certificates", "credentials"):
+ self.assertEqual(
+ new_http.__dict__[key].credentials,
+ self.http.__dict__[key].credentials,
+ )
+ elif key == "cache":
+ self.assertEqual(
+ new_http.__dict__[key].cache, self.http.__dict__[key].cache
+ )
else:
- self.assertEqual(new_http.__dict__[key],
- self.http.__dict__[key])
+ self.assertEqual(new_http.__dict__[key], self.http.__dict__[key])
def testPickleHttpWithConnection(self):
- self.http.request('http://bitworking.org',
- connection_type=_MyHTTPConnection)
+ self.http.request("http://bitworking.org", connection_type=_MyHTTPConnection)
pickled_http = pickle.dumps(self.http)
new_http = pickle.loads(pickled_http)
- self.assertEqual(self.http.connections.keys(), ['http:bitworking.org'])
+ self.assertEqual(self.http.connections.keys(), ["http:bitworking.org"])
self.assertEqual(new_http.connections, {})
def testPickleCustomRequestHttp(self):
def dummy_request(*args, **kwargs):
return new_request(*args, **kwargs)
- dummy_request.dummy_attr = 'dummy_value'
+
+ dummy_request.dummy_attr = "dummy_value"
self.http.request = dummy_request
pickled_http = pickle.dumps(self.http)
self.assertFalse("S'request'" in pickled_http)
+
try:
import memcache
+
class HttpTestMemCached(HttpTest):
def setUp(self):
- self.cache = memcache.Client(['127.0.0.1:11211'], debug=0)
- #self.cache = memcache.Client(['10.0.0.4:11211'], debug=1)
+ self.cache = memcache.Client(["127.0.0.1:11211"], debug=0)
+ # self.cache = memcache.Client(['10.0.0.4:11211'], debug=1)
self.http = httplib2.Http(self.cache)
self.cache.flush_all()
# Not exactly sure why the sleep is needed here, but
@@ -1258,171 +1446,189 @@ try:
# was previously cached. (Maybe the flush is handled async?)
time.sleep(1)
self.http.clear_credentials()
-except:
- pass
-
+except:
+ pass
# ------------------------------------------------------------------------
-class HttpPrivateTest(unittest.TestCase):
+class HttpPrivateTest(unittest.TestCase):
def testParseCacheControl(self):
# Test that we can parse the Cache-Control header
self.assertEqual({}, httplib2._parse_cache_control({}))
- self.assertEqual({'no-cache': 1}, httplib2._parse_cache_control({'cache-control': ' no-cache'}))
- cc = httplib2._parse_cache_control({'cache-control': ' no-cache, max-age = 7200'})
- self.assertEqual(cc['no-cache'], 1)
- self.assertEqual(cc['max-age'], '7200')
- cc = httplib2._parse_cache_control({'cache-control': ' , '})
- self.assertEqual(cc[''], 1)
+ self.assertEqual(
+ {"no-cache": 1},
+ httplib2._parse_cache_control({"cache-control": " no-cache"}),
+ )
+ cc = httplib2._parse_cache_control(
+ {"cache-control": " no-cache, max-age = 7200"}
+ )
+ self.assertEqual(cc["no-cache"], 1)
+ self.assertEqual(cc["max-age"], "7200")
+ cc = httplib2._parse_cache_control({"cache-control": " , "})
+ self.assertEqual(cc[""], 1)
try:
- cc = httplib2._parse_cache_control({'cache-control': 'Max-age=3600;post-check=1800,pre-check=3600'})
+ cc = httplib2._parse_cache_control(
+ {"cache-control": "Max-age=3600;post-check=1800,pre-check=3600"}
+ )
self.assertTrue("max-age" in cc)
except:
self.fail("Should not throw exception")
def testNormalizeHeaders(self):
# Test that we normalize headers to lowercase
- h = httplib2._normalize_headers({'Cache-Control': 'no-cache', 'Other': 'Stuff'})
- self.assertTrue('cache-control' in h)
- self.assertTrue('other' in h)
- self.assertEqual('Stuff', h['other'])
+ h = httplib2._normalize_headers({"Cache-Control": "no-cache", "Other": "Stuff"})
+ self.assertTrue("cache-control" in h)
+ self.assertTrue("other" in h)
+ self.assertEqual("Stuff", h["other"])
def testExpirationModelTransparent(self):
# Test that no-cache makes our request TRANSPARENT
- response_headers = {
- 'cache-control': 'max-age=7200'
- }
- request_headers = {
- 'cache-control': 'no-cache'
- }
- self.assertEqual("TRANSPARENT", httplib2._entry_disposition(response_headers, request_headers))
+ response_headers = {"cache-control": "max-age=7200"}
+ request_headers = {"cache-control": "no-cache"}
+ self.assertEqual(
+ "TRANSPARENT",
+ httplib2._entry_disposition(response_headers, request_headers),
+ )
def testMaxAgeNonNumeric(self):
# Test that no-cache makes our request TRANSPARENT
- response_headers = {
- 'cache-control': 'max-age=fred, min-fresh=barney'
- }
- request_headers = {
- }
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
+ response_headers = {"cache-control": "max-age=fred, min-fresh=barney"}
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpirationModelNoCacheResponse(self):
# The date and expires point to an entry that should be
# FRESH, but the no-cache over-rides that.
now = time.time()
response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)),
- 'cache-control': 'no-cache'
- }
- request_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 4)),
+ "cache-control": "no-cache",
}
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpirationModelStaleRequestMustReval(self):
# must-revalidate forces STALE
- self.assertEqual("STALE", httplib2._entry_disposition({}, {'cache-control': 'must-revalidate'}))
+ self.assertEqual(
+ "STALE",
+ httplib2._entry_disposition({}, {"cache-control": "must-revalidate"}),
+ )
def testExpirationModelStaleResponseMustReval(self):
# must-revalidate forces STALE
- self.assertEqual("STALE", httplib2._entry_disposition({'cache-control': 'must-revalidate'}, {}))
+ self.assertEqual(
+ "STALE",
+ httplib2._entry_disposition({"cache-control": "must-revalidate"}, {}),
+ )
def testExpirationModelFresh(self):
response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
- 'cache-control': 'max-age=2'
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
+ "cache-control": "max-age=2",
}
- request_headers = {
- }
- self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
+ request_headers = {}
+ self.assertEqual(
+ "FRESH", httplib2._entry_disposition(response_headers, request_headers)
+ )
time.sleep(3)
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpirationMaxAge0(self):
response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
- 'cache-control': 'max-age=0'
- }
- request_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
+ "cache-control": "max-age=0",
}
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpirationModelDateAndExpires(self):
now = time.time()
response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)),
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 2)),
}
- request_headers = {
- }
- self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
+ request_headers = {}
+ self.assertEqual(
+ "FRESH", httplib2._entry_disposition(response_headers, request_headers)
+ )
time.sleep(3)
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpiresZero(self):
now = time.time()
response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': "0",
- }
- request_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": "0",
}
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpirationModelDateOnly(self):
now = time.time()
response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+3)),
- }
- request_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 3))
}
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpirationModelOnlyIfCached(self):
- response_headers = {
- }
- request_headers = {
- 'cache-control': 'only-if-cached',
- }
- self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
+ response_headers = {}
+ request_headers = {"cache-control": "only-if-cached"}
+ self.assertEqual(
+ "FRESH", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpirationModelMaxAgeBoth(self):
now = time.time()
response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'cache-control': 'max-age=2'
- }
- request_headers = {
- 'cache-control': 'max-age=0'
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "cache-control": "max-age=2",
}
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+ request_headers = {"cache-control": "max-age=0"}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpirationModelDateAndExpiresMinFresh1(self):
now = time.time()
response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)),
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 2)),
}
- request_headers = {
- 'cache-control': 'min-fresh=2'
- }
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+ request_headers = {"cache-control": "min-fresh=2"}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testExpirationModelDateAndExpiresMinFresh2(self):
now = time.time()
response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)),
- }
- request_headers = {
- 'cache-control': 'min-fresh=2'
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 4)),
}
- self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
+ request_headers = {"cache-control": "min-fresh=2"}
+ self.assertEqual(
+ "FRESH", httplib2._entry_disposition(response_headers, request_headers)
+ )
def testParseWWWAuthenticateEmpty(self):
res = httplib2._parse_www_authenticate({})
@@ -1430,199 +1636,275 @@ class HttpPrivateTest(unittest.TestCase):
def testParseWWWAuthenticate(self):
# different uses of spaces around commas
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'})
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'
+ }
+ )
self.assertEqual(len(res.keys()), 1)
- self.assertEqual(len(res['test'].keys()), 5)
+ self.assertEqual(len(res["test"].keys()), 5)
# tokens with non-alphanum
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'})
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'}
+ )
self.assertEqual(len(res.keys()), 1)
- self.assertEqual(len(res['t*!%#st'].keys()), 2)
+ self.assertEqual(len(res["t*!%#st"].keys()), 2)
# quoted string with quoted pairs
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="a \\"test\\" realm"'})
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Test realm="a \\"test\\" realm"'}
+ )
self.assertEqual(len(res.keys()), 1)
- self.assertEqual(res['test']['realm'], 'a "test" realm')
+ self.assertEqual(res["test"]["realm"], 'a "test" realm')
def testParseWWWAuthenticateStrict(self):
- httplib2.USE_WWW_AUTH_STRICT_PARSING = 1;
- self.testParseWWWAuthenticate();
- httplib2.USE_WWW_AUTH_STRICT_PARSING = 0;
+ httplib2.USE_WWW_AUTH_STRICT_PARSING = 1
+ self.testParseWWWAuthenticate()
+ httplib2.USE_WWW_AUTH_STRICT_PARSING = 0
def testParseWWWAuthenticateBasic(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me"'})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
-
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm="MD5"'})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
- self.assertEqual('MD5', basic['algorithm'])
-
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm=MD5'})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
- self.assertEqual('MD5', basic['algorithm'])
+ res = httplib2._parse_www_authenticate({"www-authenticate": 'Basic realm="me"'})
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Basic realm="me", algorithm="MD5"'}
+ )
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+ self.assertEqual("MD5", basic["algorithm"])
+
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Basic realm="me", algorithm=MD5'}
+ )
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+ self.assertEqual("MD5", basic["algorithm"])
def testParseWWWAuthenticateBasic2(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me",other="fred" '})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
- self.assertEqual('fred', basic['other'])
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Basic realm="me",other="fred" '}
+ )
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+ self.assertEqual("fred", basic["other"])
def testParseWWWAuthenticateBasic3(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic REAlm="me" '})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
-
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Basic REAlm="me" '}
+ )
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
def testParseWWWAuthenticateDigest(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"'})
- digest = res['digest']
- self.assertEqual('testrealm@host.com', digest['realm'])
- self.assertEqual('auth,auth-int', digest['qop'])
-
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"'
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("testrealm@host.com", digest["realm"])
+ self.assertEqual("auth,auth-int", digest["qop"])
def testParseWWWAuthenticateMultiple(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41" Basic REAlm="me" '})
- digest = res['digest']
- self.assertEqual('testrealm@host.com', digest['realm'])
- self.assertEqual('auth,auth-int', digest['qop'])
- self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
- self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41" Basic REAlm="me" '
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("testrealm@host.com", digest["realm"])
+ self.assertEqual("auth,auth-int", digest["qop"])
+ self.assertEqual("dcd98b7102dd2f0e8b11d0f600bfb0c093", digest["nonce"])
+ self.assertEqual("5ccc069c403ebaf9f0171e9517f40e41", digest["opaque"])
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
def testParseWWWAuthenticateMultiple2(self):
# Handle an added comma between challenges, which might get thrown in if the challenges were
# originally sent in separate www-authenticate headers.
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me" '})
- digest = res['digest']
- self.assertEqual('testrealm@host.com', digest['realm'])
- self.assertEqual('auth,auth-int', digest['qop'])
- self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
- self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me" '
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("testrealm@host.com", digest["realm"])
+ self.assertEqual("auth,auth-int", digest["qop"])
+ self.assertEqual("dcd98b7102dd2f0e8b11d0f600bfb0c093", digest["nonce"])
+ self.assertEqual("5ccc069c403ebaf9f0171e9517f40e41", digest["opaque"])
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
def testParseWWWAuthenticateMultiple3(self):
# Handle an added comma between challenges, which might get thrown in if the challenges were
# originally sent in separate www-authenticate headers.
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'})
- digest = res['digest']
- self.assertEqual('testrealm@host.com', digest['realm'])
- self.assertEqual('auth,auth-int', digest['qop'])
- self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
- self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
- wsse = res['wsse']
- self.assertEqual('foo', wsse['realm'])
- self.assertEqual('UsernameToken', wsse['profile'])
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("testrealm@host.com", digest["realm"])
+ self.assertEqual("auth,auth-int", digest["qop"])
+ self.assertEqual("dcd98b7102dd2f0e8b11d0f600bfb0c093", digest["nonce"])
+ self.assertEqual("5ccc069c403ebaf9f0171e9517f40e41", digest["opaque"])
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+ wsse = res["wsse"]
+ self.assertEqual("foo", wsse["realm"])
+ self.assertEqual("UsernameToken", wsse["profile"])
def testParseWWWAuthenticateMultiple4(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'})
- digest = res['digest']
- self.assertEqual('test-real.m@host.com', digest['realm'])
- self.assertEqual('\tauth,auth-int', digest['qop'])
- self.assertEqual('(*)&^&$%#', digest['nonce'])
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("test-real.m@host.com", digest["realm"])
+ self.assertEqual("\tauth,auth-int", digest["qop"])
+ self.assertEqual("(*)&^&$%#", digest["nonce"])
def testParseWWWAuthenticateMoreQuoteCombos(self):
- res = httplib2._parse_www_authenticate({'www-authenticate':'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'})
- digest = res['digest']
- self.assertEqual('myrealm', digest['realm'])
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("myrealm", digest["realm"])
def testParseWWWAuthenticateMalformed(self):
try:
- res = httplib2._parse_www_authenticate({'www-authenticate':'OAuth "Facebook Platform" "invalid_token" "Invalid OAuth access token."'})
- self.fail("should raise an exception")
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'OAuth "Facebook Platform" "invalid_token" "Invalid OAuth access token."'
+ }
+ )
+ self.fail("should raise an exception")
except httplib2.MalformedHeader:
- pass
+ pass
def testDigestObject(self):
- credentials = ('joe', 'password')
+ credentials = ("joe", "password")
host = None
- request_uri = '/projects/httplib2/test/digest/'
+ request_uri = "/projects/httplib2/test/digest/"
headers = {}
response = {
- 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth"'
+ "www-authenticate": 'Digest realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", '
+ 'algorithm=MD5, qop="auth"'
}
content = ""
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46")
- our_request = "authorization: %s" % headers['authorization']
- working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46"'
+ our_request = "authorization: %s" % headers["authorization"]
+ working_request = (
+ 'authorization: Digest username="joe", realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306",'
+ ' uri="/projects/httplib2/test/digest/", algorithm=MD5, '
+ 'response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, '
+ 'nc=00000001, cnonce="33033375ec278a46"'
+ )
self.assertEqual(our_request, working_request)
def testDigestObjectWithOpaque(self):
- credentials = ('joe', 'password')
+ credentials = ("joe", "password")
host = None
- request_uri = '/projects/httplib2/test/digest/'
+ request_uri = "/projects/httplib2/test/digest/"
headers = {}
response = {
- 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", opaque="atestopaque"'
+ "www-authenticate": 'Digest realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", '
+ 'algorithm=MD5, qop="auth", opaque="atestopaque"'
}
content = ""
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46")
- our_request = "authorization: %s" % headers['authorization']
- working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46", opaque="atestopaque"'
+ our_request = "authorization: %s" % headers["authorization"]
+ working_request = (
+ 'authorization: Digest username="joe", realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306",'
+ ' uri="/projects/httplib2/test/digest/", algorithm=MD5, '
+ 'response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, '
+ 'nc=00000001, cnonce="33033375ec278a46", '
+ 'opaque="atestopaque"'
+ )
self.assertEqual(our_request, working_request)
def testDigestObjectStale(self):
- credentials = ('joe', 'password')
+ credentials = ("joe", "password")
host = None
- request_uri = '/projects/httplib2/test/digest/'
+ request_uri = "/projects/httplib2/test/digest/"
headers = {}
- response = httplib2.Response({ })
- response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
+ response = httplib2.Response({})
+ response["www-authenticate"] = (
+ 'Digest realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306",'
+ ' algorithm=MD5, qop="auth", stale=true'
+ )
response.status = 401
content = ""
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
# Returns true to force a retry
- self.assertTrue( d.response(response, content) )
+ self.assertTrue(d.response(response, content))
def testDigestObjectAuthInfo(self):
- credentials = ('joe', 'password')
+ credentials = ("joe", "password")
host = None
- request_uri = '/projects/httplib2/test/digest/'
+ request_uri = "/projects/httplib2/test/digest/"
headers = {}
- response = httplib2.Response({ })
- response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
- response['authentication-info'] = 'nextnonce="fred"'
+ response = httplib2.Response({})
+ response["www-authenticate"] = (
+ 'Digest realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306",'
+ ' algorithm=MD5, qop="auth", stale=true'
+ )
+ response["authentication-info"] = 'nextnonce="fred"'
content = ""
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
# Returns true to force a retry
- self.assertFalse( d.response(response, content) )
- self.assertEqual('fred', d.challenge['nonce'])
- self.assertEqual(1, d.challenge['nc'])
+ self.assertFalse(d.response(response, content))
+ self.assertEqual("fred", d.challenge["nonce"])
+ self.assertEqual(1, d.challenge["nc"])
def testWsseAlgorithm(self):
- digest = httplib2._wsse_username_token("d36e316282959a9ed4c89851497a717f", "2003-12-15T14:43:07Z", "taadtaadpstcsm")
+ digest = httplib2._wsse_username_token(
+ "d36e316282959a9ed4c89851497a717f", "2003-12-15T14:43:07Z", "taadtaadpstcsm"
+ )
expected = "quR/EWLAV4xLf9Zqyw4pDmfV9OY="
self.assertEqual(expected, digest)
def testEnd2End(self):
# one end to end header
- response = {'content-type': 'application/atom+xml', 'te': 'deflate'}
+ response = {"content-type": "application/atom+xml", "te": "deflate"}
end2end = httplib2._get_end2end_headers(response)
- self.assertTrue('content-type' in end2end)
- self.assertTrue('te' not in end2end)
- self.assertTrue('connection' not in end2end)
+ self.assertTrue("content-type" in end2end)
+ self.assertTrue("te" not in end2end)
+ self.assertTrue("connection" not in end2end)
# one end to end header that gets eliminated
- response = {'connection': 'content-type', 'content-type': 'application/atom+xml', 'te': 'deflate'}
+ response = {
+ "connection": "content-type",
+ "content-type": "application/atom+xml",
+ "te": "deflate",
+ }
end2end = httplib2._get_end2end_headers(response)
- self.assertTrue('content-type' not in end2end)
- self.assertTrue('te' not in end2end)
- self.assertTrue('connection' not in end2end)
+ self.assertTrue("content-type" not in end2end)
+ self.assertTrue("te" not in end2end)
+ self.assertTrue("connection" not in end2end)
# Degenerate case of no headers
response = {}
@@ -1630,7 +1912,7 @@ class HttpPrivateTest(unittest.TestCase):
self.assertEquals(0, len(end2end))
# Degenerate case of connection referrring to a header not passed in
- response = {'connection': 'content-type'}
+ response = {"connection": "content-type"}
end2end = httplib2._get_end2end_headers(response)
self.assertEquals(0, len(end2end))
@@ -1644,34 +1926,33 @@ class TestProxyInfo(unittest.TestCase):
os.environ.update(self.orig_env)
def test_from_url(self):
- pi = httplib2.proxy_info_from_url('http://myproxy.example.com')
- self.assertEquals(pi.proxy_host, 'myproxy.example.com')
+ pi = httplib2.proxy_info_from_url("http://myproxy.example.com")
+ self.assertEquals(pi.proxy_host, "myproxy.example.com")
self.assertEquals(pi.proxy_port, 80)
self.assertEquals(pi.proxy_user, None)
def test_from_url_ident(self):
- pi = httplib2.proxy_info_from_url('http://zoidberg:fish@someproxy:99')
- self.assertEquals(pi.proxy_host, 'someproxy')
+ pi = httplib2.proxy_info_from_url("http://zoidberg:fish@someproxy:99")
+ self.assertEquals(pi.proxy_host, "someproxy")
self.assertEquals(pi.proxy_port, 99)
- self.assertEquals(pi.proxy_user, 'zoidberg')
- self.assertEquals(pi.proxy_pass, 'fish')
+ self.assertEquals(pi.proxy_user, "zoidberg")
+ self.assertEquals(pi.proxy_pass, "fish")
def test_from_env(self):
- os.environ['http_proxy'] = 'http://myproxy.example.com:8080'
+ os.environ["http_proxy"] = "http://myproxy.example.com:8080"
pi = httplib2.proxy_info_from_environment()
- self.assertEquals(pi.proxy_host, 'myproxy.example.com')
+ self.assertEquals(pi.proxy_host, "myproxy.example.com")
self.assertEquals(pi.proxy_port, 8080)
self.assertEquals(pi.bypass_hosts, [])
def test_from_env_no_proxy(self):
- os.environ['http_proxy'] = 'http://myproxy.example.com:80'
- os.environ['https_proxy'] = 'http://myproxy.example.com:81'
- os.environ['no_proxy'] = 'localhost,otherhost.domain.local'
- pi = httplib2.proxy_info_from_environment('https')
- self.assertEquals(pi.proxy_host, 'myproxy.example.com')
+ os.environ["http_proxy"] = "http://myproxy.example.com:80"
+ os.environ["https_proxy"] = "http://myproxy.example.com:81"
+ os.environ["no_proxy"] = "localhost,otherhost.domain.local"
+ pi = httplib2.proxy_info_from_environment("https")
+ self.assertEquals(pi.proxy_host, "myproxy.example.com")
self.assertEquals(pi.proxy_port, 81)
- self.assertEquals(pi.bypass_hosts, ['localhost',
- 'otherhost.domain.local'])
+ self.assertEquals(pi.bypass_hosts, ["localhost", "otherhost.domain.local"])
def test_from_env_none(self):
os.environ.clear()
@@ -1679,25 +1960,28 @@ class TestProxyInfo(unittest.TestCase):
self.assertEquals(pi, None)
def test_applies_to(self):
- os.environ['http_proxy'] = 'http://myproxy.example.com:80'
- os.environ['https_proxy'] = 'http://myproxy.example.com:81'
- os.environ['no_proxy'] = 'localhost,otherhost.domain.local,example.com'
+ os.environ["http_proxy"] = "http://myproxy.example.com:80"
+ os.environ["https_proxy"] = "http://myproxy.example.com:81"
+ os.environ["no_proxy"] = "localhost,otherhost.domain.local,example.com"
pi = httplib2.proxy_info_from_environment()
- self.assertFalse(pi.applies_to('localhost'))
- self.assertTrue(pi.applies_to('www.google.com'))
- self.assertFalse(pi.applies_to('www.example.com'))
+ self.assertFalse(pi.applies_to("localhost"))
+ self.assertTrue(pi.applies_to("www.google.com"))
+ self.assertFalse(pi.applies_to("www.example.com"))
def test_no_proxy_star(self):
- os.environ['http_proxy'] = 'http://myproxy.example.com:80'
- os.environ['NO_PROXY'] = '*'
+ os.environ["http_proxy"] = "http://myproxy.example.com:80"
+ os.environ["NO_PROXY"] = "*"
pi = httplib2.proxy_info_from_environment()
- for host in ('localhost', '169.254.38.192', 'www.google.com'):
+ for host in ("localhost", "169.254.38.192", "www.google.com"):
self.assertFalse(pi.applies_to(host))
def test_proxy_headers(self):
- headers = {'key0': 'val0', 'key1': 'val1'}
- pi = httplib2.ProxyInfo(httplib2.socks.PROXY_TYPE_HTTP, 'localhost', 1234, proxy_headers = headers)
+ headers = {"key0": "val0", "key1": "val1"}
+ pi = httplib2.ProxyInfo(
+ httplib2.socks.PROXY_TYPE_HTTP, "localhost", 1234, proxy_headers=headers
+ )
self.assertEquals(pi.proxy_headers, headers)
-if __name__ == '__main__':
+
+if __name__ == "__main__":
unittest.main()
diff --git a/python2/httplib2test_appengine.py b/python2/httplib2test_appengine.py
index 9fad05a..d5c5786 100755
--- a/python2/httplib2test_appengine.py
+++ b/python2/httplib2test_appengine.py
@@ -5,11 +5,12 @@ import os
import sys
import unittest
-APP_ENGINE_PATH='/usr/local/google_appengine'
+APP_ENGINE_PATH = "/usr/local/google_appengine"
sys.path.insert(0, APP_ENGINE_PATH)
import dev_appserver
+
dev_appserver.fix_sys_path()
from google.appengine.ext import testbed
@@ -20,60 +21,65 @@ sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)))
class AberrationsTest(unittest.TestCase):
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_urlfetch_stub()
- def setUp(self):
- self.testbed = testbed.Testbed()
- self.testbed.activate()
- self.testbed.init_urlfetch_stub()
+ def tearDown(self):
+ self.testbed.deactivate()
- def tearDown(self):
- self.testbed.deactivate()
+ @mock.patch.dict("os.environ", {"SERVER_SOFTWARE": ""})
+ def testConnectionInit(self):
+ global httplib2
+ import httplib2
- @mock.patch.dict('os.environ', {'SERVER_SOFTWARE': ''})
- def testConnectionInit(self):
- global httplib2
- import httplib2
- self.assertNotEqual(
- httplib2.SCHEME_TO_CONNECTION['https'], httplib2.AppEngineHttpsConnection)
- self.assertNotEqual(
- httplib2.SCHEME_TO_CONNECTION['http'], httplib2.AppEngineHttpConnection)
- del globals()['httplib2']
+ self.assertNotEqual(
+ httplib2.SCHEME_TO_CONNECTION["https"], httplib2.AppEngineHttpsConnection
+ )
+ self.assertNotEqual(
+ httplib2.SCHEME_TO_CONNECTION["http"], httplib2.AppEngineHttpConnection
+ )
+ del globals()["httplib2"]
class AppEngineHttpTest(unittest.TestCase):
-
- def setUp(self):
- self.testbed = testbed.Testbed()
- self.testbed.activate()
- self.testbed.init_urlfetch_stub()
- global httplib2
- import httplib2
- reload(httplib2)
-
- def tearDown(self):
- self.testbed.deactivate()
- del globals()['httplib2']
-
- def testConnectionInit(self):
- self.assertEqual(
- httplib2.SCHEME_TO_CONNECTION['https'], httplib2.AppEngineHttpsConnection)
- self.assertEqual(
- httplib2.SCHEME_TO_CONNECTION['http'], httplib2.AppEngineHttpConnection)
-
- def testGet(self):
- http = httplib2.Http()
- response, content = http.request("http://www.google.com")
- self.assertEqual(httplib2.SCHEME_TO_CONNECTION['https'],
- httplib2.AppEngineHttpsConnection)
- self.assertEquals(1, len(http.connections))
- self.assertEquals(response.status, 200)
- self.assertEquals(response['status'], '200')
-
- def testProxyInfoIgnored(self):
- http = httplib2.Http(proxy_info=mock.MagicMock())
- response, content = http.request("http://www.google.com")
- self.assertEquals(response.status, 200)
-
-
-if __name__ == '__main__':
+ def setUp(self):
+ self.testbed = testbed.Testbed()
+ self.testbed.activate()
+ self.testbed.init_urlfetch_stub()
+ global httplib2
+ import httplib2
+
+ reload(httplib2)
+
+ def tearDown(self):
+ self.testbed.deactivate()
+ del globals()["httplib2"]
+
+ def testConnectionInit(self):
+ self.assertEqual(
+ httplib2.SCHEME_TO_CONNECTION["https"], httplib2.AppEngineHttpsConnection
+ )
+ self.assertEqual(
+ httplib2.SCHEME_TO_CONNECTION["http"], httplib2.AppEngineHttpConnection
+ )
+
+ def testGet(self):
+ http = httplib2.Http()
+ response, content = http.request("http://www.google.com")
+ self.assertEqual(
+ httplib2.SCHEME_TO_CONNECTION["https"], httplib2.AppEngineHttpsConnection
+ )
+ self.assertEquals(1, len(http.connections))
+ self.assertEquals(response.status, 200)
+ self.assertEquals(response["status"], "200")
+
+ def testProxyInfoIgnored(self):
+ http = httplib2.Http(proxy_info=mock.MagicMock())
+ response, content = http.request("http://www.google.com")
+ self.assertEquals(response.status, 200)
+
+
+if __name__ == "__main__":
unittest.main()
diff --git a/python2/ssl_protocol_test.py b/python2/ssl_protocol_test.py
index bac84c0..fedf7cf 100755
--- a/python2/ssl_protocol_test.py
+++ b/python2/ssl_protocol_test.py
@@ -8,50 +8,56 @@ import unittest
class TestSslProtocol(unittest.TestCase):
-
- def testSslCertValidationWithInvalidCaCert(self):
- if sys.version_info >= (2, 6):
- http = httplib2.Http(ca_certs='/nosuchfile')
- if sys.version_info >= (2, 7):
- with self.assertRaises(IOError):
- http.request('https://www.google.com/', 'GET')
- else:
- self.assertRaises(
- ssl.SSLError, http.request, 'https://www.google.com/', 'GET')
-
- def testSslCertValidationWithSelfSignedCaCert(self):
- if sys.version_info >= (2, 7):
- other_ca_certs = os.path.join(
- os.path.dirname(os.path.abspath(httplib2.__file__ )), 'test',
- 'other_cacerts.txt')
- http = httplib2.Http(ca_certs=other_ca_certs)
- if sys.platform != 'darwin':
- with self.assertRaises(httplib2.SSLHandshakeError):
- http.request('https://www.google.com/', 'GET')
-
- def testSslProtocolTlsV1AndShouldPass(self):
- http = httplib2.Http(ssl_version=ssl.PROTOCOL_TLSv1)
- urls = ['https://www.amazon.com',
- 'https://www.apple.com',
- 'https://www.twitter.com']
- for url in urls:
- if sys.version_info >= (2, 7):
- self.assertIsNotNone(http.request(uri=url))
-
- def testSslProtocolV3AndShouldFailDueToPoodle(self):
- http = httplib2.Http(ssl_version=ssl.PROTOCOL_SSLv3)
- urls = ['https://www.amazon.com',
- 'https://www.apple.com',
- 'https://www.twitter.com']
- for url in urls:
- if sys.version_info >= (2, 7):
- with self.assertRaises(httplib2.SSLHandshakeError):
- http.request(url)
- try:
- http.request(url)
- except httplib2.SSLHandshakeError as e:
- self.assertTrue('sslv3 alert handshake failure' in str(e))
-
-
-if __name__ == '__main__':
- unittest.main()
+ def testSslCertValidationWithInvalidCaCert(self):
+ if sys.version_info >= (2, 6):
+ http = httplib2.Http(ca_certs="/nosuchfile")
+ if sys.version_info >= (2, 7):
+ with self.assertRaises(IOError):
+ http.request("https://www.google.com/", "GET")
+ else:
+ self.assertRaises(
+ ssl.SSLError, http.request, "https://www.google.com/", "GET"
+ )
+
+ def testSslCertValidationWithSelfSignedCaCert(self):
+ if sys.version_info >= (2, 7):
+ other_ca_certs = os.path.join(
+ os.path.dirname(os.path.abspath(httplib2.__file__)),
+ "test",
+ "other_cacerts.txt",
+ )
+ http = httplib2.Http(ca_certs=other_ca_certs)
+ if sys.platform != "darwin":
+ with self.assertRaises(httplib2.SSLHandshakeError):
+ http.request("https://www.google.com/", "GET")
+
+ def testSslProtocolTlsV1AndShouldPass(self):
+ http = httplib2.Http(ssl_version=ssl.PROTOCOL_TLSv1)
+ urls = [
+ "https://www.amazon.com",
+ "https://www.apple.com",
+ "https://www.twitter.com",
+ ]
+ for url in urls:
+ if sys.version_info >= (2, 7):
+ self.assertIsNotNone(http.request(uri=url))
+
+ def testSslProtocolV3AndShouldFailDueToPoodle(self):
+ http = httplib2.Http(ssl_version=ssl.PROTOCOL_SSLv3)
+ urls = [
+ "https://www.amazon.com",
+ "https://www.apple.com",
+ "https://www.twitter.com",
+ ]
+ for url in urls:
+ if sys.version_info >= (2, 7):
+ with self.assertRaises(httplib2.SSLHandshakeError):
+ http.request(url)
+ try:
+ http.request(url)
+ except httplib2.SSLHandshakeError as e:
+ self.assertTrue("sslv3 alert handshake failure" in str(e))
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/python3/httplib2/__init__.py b/python3/httplib2/__init__.py
index 5dae2a5..b1b86f4 100644
--- a/python3/httplib2/__init__.py
+++ b/python3/httplib2/__init__.py
@@ -1,54 +1,47 @@
-"""
-httplib2
-
-A caching http interface that supports ETags and gzip
-to conserve bandwidth.
-
-Requires Python 3.0 or later
-
-Changelog:
-2009-05-28, Pilgrim: ported to Python 3
-2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
-
-"""
+# -*- coding: utf-8 -*-
+"""Small, fast HTTP client library for Python."""
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
+__contributors__ = [
+ "Thomas Broyer (t.broyer@ltgt.net)",
"James Antill",
"Xavier Verges Farrero",
"Jonathan Feinberg",
"Blair Zajac",
"Sam Ruby",
"Louis Nyffenegger",
- "Mark Pilgrim"]
+ "Mark Pilgrim",
+ "Alex Yu",
+]
__license__ = "MIT"
-__version__ = '0.11.3'
+__version__ = '0.12.0'
-import re
-import sys
+import base64
+import calendar
+import copy
import email
-import email.utils
-import email.message
import email.feedparser
-import io
+from email import header
+import email.message
+import email.utils
+import errno
+from gettext import gettext as _
import gzip
-import zlib
+from hashlib import md5 as _md5
+from hashlib import sha1 as _sha
+import hmac
import http.client
-import urllib.parse
-import base64
+import io
import os
-import copy
-import calendar
-import time
import random
-import errno
-from hashlib import sha1 as _sha, md5 as _md5
-import hmac
-from gettext import gettext as _
+import re
import socket
import ssl
-
+import sys
+import time
+import urllib.parse
+import zlib
try:
import socks
@@ -58,17 +51,26 @@ except ImportError:
from . import socks
from .iri2uri import iri2uri
-def has_timeout(timeout):
- if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
- return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
- return (timeout is not None)
-
-__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error',
- 'RedirectMissingLocation', 'RedirectLimit',
- 'FailedToDecompressContent', 'UnimplementedDigestAuthOptionError',
- 'UnimplementedHmacDigestAuthOptionError',
- 'debuglevel', 'RETRIES']
+def has_timeout(timeout):
+ if hasattr(socket, "_GLOBAL_DEFAULT_TIMEOUT"):
+ return timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT
+ return timeout is not None
+
+
+__all__ = [
+ "debuglevel",
+ "FailedToDecompressContent",
+ "Http",
+ "HttpLib2Error",
+ "ProxyInfo",
+ "RedirectLimit",
+ "RedirectMissingLocation",
+ "Response",
+ "RETRIES",
+ "UnimplementedDigestAuthOptionError",
+ "UnimplementedHmacDigestAuthOptionError",
+]
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
@@ -76,8 +78,11 @@ debuglevel = 0
# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
RETRIES = 2
+
# All exceptions raised here derive from HttpLib2Error
-class HttpLib2Error(Exception): pass
+class HttpLib2Error(Exception):
+ pass
+
# Some exceptions can be caught and optionally
# be turned back into responses.
@@ -87,17 +92,41 @@ class HttpLib2ErrorWithResponse(HttpLib2Error):
self.content = content
HttpLib2Error.__init__(self, desc)
-class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
-class RedirectLimit(HttpLib2ErrorWithResponse): pass
-class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
-class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
-class MalformedHeader(HttpLib2Error): pass
-class RelativeURIError(HttpLib2Error): pass
-class ServerNotFoundError(HttpLib2Error): pass
+class RedirectMissingLocation(HttpLib2ErrorWithResponse):
+ pass
+
+
+class RedirectLimit(HttpLib2ErrorWithResponse):
+ pass
+
+
+class FailedToDecompressContent(HttpLib2ErrorWithResponse):
+ pass
+
+
+class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse):
+ pass
+
+
+class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse):
+ pass
+
+
+class MalformedHeader(HttpLib2Error):
+ pass
+
+
+class RelativeURIError(HttpLib2Error):
+ pass
+
+
+class ServerNotFoundError(HttpLib2Error):
+ pass
+
-class ProxiesUnavailableError(HttpLib2Error): pass
+class ProxiesUnavailableError(HttpLib2Error):
+ pass
# Open Items:
@@ -114,7 +143,6 @@ class ProxiesUnavailableError(HttpLib2Error): pass
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
-
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
@@ -122,31 +150,45 @@ class ProxiesUnavailableError(HttpLib2Error): pass
DEFAULT_MAX_REDIRECTS = 5
# Which headers are hop-by-hop headers by default
-HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
-
-# Default CA certificates file bundled with httplib2.
-CA_CERTS = os.path.join(
- os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
+HOP_BY_HOP = [
+ "connection",
+ "keep-alive",
+ "proxy-authenticate",
+ "proxy-authorization",
+ "te",
+ "trailers",
+ "transfer-encoding",
+ "upgrade",
+]
+
+from httplib2 import certs
+CA_CERTS = certs.where()
# PROTOCOL_TLS is python 3.5.3+. PROTOCOL_SSLv23 is deprecated.
# Both PROTOCOL_TLS and PROTOCOL_SSLv23 are equivalent and means:
# > Selects the highest protocol version that both the client and server support.
# > Despite the name, this option can select “TLS” protocols as well as “SSL”.
# source: https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_TLS
-DEFAULT_TLS_VERSION = getattr(ssl, 'PROTOCOL_TLS', None) or getattr(ssl, 'PROTOCOL_SSLv23')
+DEFAULT_TLS_VERSION = getattr(ssl, "PROTOCOL_TLS", None) or getattr(
+ ssl, "PROTOCOL_SSLv23"
+)
-def _build_ssl_context(disable_ssl_certificate_validation, ca_certs, cert_file=None, key_file=None):
- if not hasattr(ssl, 'SSLContext'):
+def _build_ssl_context(
+ disable_ssl_certificate_validation, ca_certs, cert_file=None, key_file=None
+):
+ if not hasattr(ssl, "SSLContext"):
raise RuntimeError("httplib2 requires Python 3.2+ for ssl.SSLContext")
context = ssl.SSLContext(DEFAULT_TLS_VERSION)
- context.verify_mode = ssl.CERT_NONE if disable_ssl_certificate_validation else ssl.CERT_REQUIRED
+ context.verify_mode = (
+ ssl.CERT_NONE if disable_ssl_certificate_validation else ssl.CERT_REQUIRED
+ )
# check_hostname requires python 3.4+
# we will perform the equivalent in HTTPSConnectionWithTimeout.connect() by calling ssl.match_hostname
# if check_hostname is not supported.
- if hasattr(context, 'check_hostname'):
+ if hasattr(context, "check_hostname"):
context.check_hostname = not disable_ssl_certificate_validation
context.load_verify_locations(ca_certs)
@@ -156,13 +198,16 @@ def _build_ssl_context(disable_ssl_certificate_validation, ca_certs, cert_file=N
return context
+
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
- hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
+ hopbyhop.extend([x.strip() for x in response.get("connection", "").split(",")])
return [header for header in list(response.keys()) if header not in hopbyhop]
+
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
@@ -171,6 +216,7 @@ def parse_uri(uri):
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
@@ -188,55 +234,71 @@ def urlnorm(uri):
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
-re_url_scheme = re.compile(br'^\w+://')
-re_url_scheme_s = re.compile(r'^\w+://')
-re_slash = re.compile(br'[?/:|]+')
+re_url_scheme = re.compile(r"^\w+://")
+re_unsafe = re.compile(r"[^\w\-_.()=!]+", re.ASCII)
+
def safename(filename):
"""Return a filename suitable for the cache.
-
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
+ if isinstance(filename, bytes):
+ filename_bytes = filename
+ filename = filename.decode("utf-8")
+ else:
+ filename_bytes = filename.encode("utf-8")
+ filemd5 = _md5(filename_bytes).hexdigest()
+ filename = re_url_scheme.sub("", filename)
+ filename = re_unsafe.sub("", filename)
+
+ # limit length of filename (vital for Windows)
+ # https://github.com/httplib2/httplib2/pull/74
+ # C:\Users\ <username> \AppData\Local\Temp\ <safe_filename> , <md5>
+ # 9 chars + max 104 chars + 20 chars + x + 1 + 32 = max 259 chars
+ # Thus max safe filename x = 93 chars. Let it be 90 to make a round sum:
+ filename = filename[:90]
+
+ return ",".join((filename, filemd5))
+
+
+NORMALIZE_SPACE = re.compile(r"(?:\r\n)?[ \t]+")
+
- try:
- if re_url_scheme_s.match(filename):
- if isinstance(filename,bytes):
- filename = filename.decode('utf-8')
- filename = filename.encode('idna')
- else:
- filename = filename.encode('idna')
- except UnicodeError:
- pass
- if isinstance(filename,str):
- filename=filename.encode('utf-8')
- filemd5 = _md5(filename).hexdigest().encode('utf-8')
- filename = re_url_scheme.sub(b"", filename)
- filename = re_slash.sub(b",", filename)
-
- # limit length of filename
- if len(filename)>200:
- filename=filename[:200]
- return b",".join((filename, filemd5)).decode('utf-8')
-
-NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers):
- return dict([ (_convert_byte_str(key).lower(), NORMALIZE_SPACE.sub(_convert_byte_str(value), ' ').strip()) for (key, value) in headers.items()])
+ return dict(
+ [
+ (
+ _convert_byte_str(key).lower(),
+ NORMALIZE_SPACE.sub(_convert_byte_str(value), " ").strip(),
+ )
+ for (key, value) in headers.items()
+ ]
+ )
+
def _convert_byte_str(s):
if not isinstance(s, str):
- return str(s, 'utf-8')
+ return str(s, "utf-8")
return s
+
def _parse_cache_control(headers):
retval = {}
- if 'cache-control' in headers:
- parts = headers['cache-control'].split(',')
- parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
- parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
+ if "cache-control" in headers:
+ parts = headers["cache-control"].split(",")
+ parts_with_args = [
+ tuple([x.strip().lower() for x in part.split("=", 1)])
+ for part in parts
+ if -1 != part.find("=")
+ ]
+ parts_wo_args = [
+ (name.strip().lower(), 1) for name in parts if -1 == name.find("=")
+ ]
retval = dict(parts_with_args + parts_wo_args)
return retval
+
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
@@ -248,21 +310,29 @@ USE_WWW_AUTH_STRICT_PARSING = 0
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
-WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
-WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
-UNQUOTE_PAIRS = re.compile(r'\\(.)')
-def _parse_www_authenticate(headers, headername='www-authenticate'):
+WWW_AUTH_STRICT = re.compile(
+ r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$"
+)
+WWW_AUTH_RELAXED = re.compile(
+ r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$"
+)
+UNQUOTE_PAIRS = re.compile(r"\\(.)")
+
+
+def _parse_www_authenticate(headers, headername="www-authenticate"):
"""Returns a dictionary of dictionaries, one dict
per auth_scheme."""
retval = {}
if headername in headers:
try:
authenticate = headers[headername].strip()
- www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
+ www_auth = (
+ USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
+ )
while authenticate:
# Break off the scheme at the beginning of the line
- if headername == 'authentication-info':
- (auth_scheme, the_rest) = ('digest', authenticate)
+ if headername == "authentication-info":
+ (auth_scheme, the_rest) = ("digest", authenticate)
else:
(auth_scheme, the_rest) = authenticate.split(" ", 1)
# Now loop over all the key value pairs that come after the scheme,
@@ -272,7 +342,9 @@ def _parse_www_authenticate(headers, headername='www-authenticate'):
while match:
if match and len(match.groups()) == 3:
(key, value, the_rest) = match.groups()
- auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
+ auth_params[key.lower()] = UNQUOTE_PAIRS.sub(
+ r"\1", value
+ ) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
match = www_auth.search(the_rest)
retval[auth_scheme.lower()] = auth_params
authenticate = the_rest.strip()
@@ -313,41 +385,44 @@ def _entry_disposition(response_headers, request_headers):
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
- if 'pragma' in request_headers and request_headers['pragma'].lower().find('no-cache') != -1:
+ if (
+ "pragma" in request_headers
+ and request_headers["pragma"].lower().find("no-cache") != -1
+ ):
retval = "TRANSPARENT"
- if 'cache-control' not in request_headers:
- request_headers['cache-control'] = 'no-cache'
- elif 'no-cache' in cc:
+ if "cache-control" not in request_headers:
+ request_headers["cache-control"] = "no-cache"
+ elif "no-cache" in cc:
retval = "TRANSPARENT"
- elif 'no-cache' in cc_response:
+ elif "no-cache" in cc_response:
retval = "STALE"
- elif 'only-if-cached' in cc:
+ elif "only-if-cached" in cc:
retval = "FRESH"
- elif 'date' in response_headers:
- date = calendar.timegm(email.utils.parsedate_tz(response_headers['date']))
+ elif "date" in response_headers:
+ date = calendar.timegm(email.utils.parsedate_tz(response_headers["date"]))
now = time.time()
current_age = max(0, now - date)
- if 'max-age' in cc_response:
+ if "max-age" in cc_response:
try:
- freshness_lifetime = int(cc_response['max-age'])
+ freshness_lifetime = int(cc_response["max-age"])
except ValueError:
freshness_lifetime = 0
- elif 'expires' in response_headers:
- expires = email.utils.parsedate_tz(response_headers['expires'])
+ elif "expires" in response_headers:
+ expires = email.utils.parsedate_tz(response_headers["expires"])
if None == expires:
freshness_lifetime = 0
else:
freshness_lifetime = max(0, calendar.timegm(expires) - date)
else:
freshness_lifetime = 0
- if 'max-age' in cc:
+ if "max-age" in cc:
try:
- freshness_lifetime = int(cc['max-age'])
+ freshness_lifetime = int(cc["max-age"])
except ValueError:
freshness_lifetime = 0
- if 'min-fresh' in cc:
+ if "min-fresh" in cc:
try:
- min_fresh = int(cc['min-fresh'])
+ min_fresh = int(cc["min-fresh"])
except ValueError:
min_fresh = 0
current_age += min_fresh
@@ -355,60 +430,69 @@ def _entry_disposition(response_headers, request_headers):
retval = "FRESH"
return retval
+
def _decompressContent(response, new_content):
content = new_content
try:
- encoding = response.get('content-encoding', None)
- if encoding in ['gzip', 'deflate']:
- if encoding == 'gzip':
+ encoding = response.get("content-encoding", None)
+ if encoding in ["gzip", "deflate"]:
+ if encoding == "gzip":
content = gzip.GzipFile(fileobj=io.BytesIO(new_content)).read()
- if encoding == 'deflate':
+ if encoding == "deflate":
content = zlib.decompress(content, -zlib.MAX_WBITS)
- response['content-length'] = str(len(content))
+ response["content-length"] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere.
- response['-content-encoding'] = response['content-encoding']
- del response['content-encoding']
+ response["-content-encoding"] = response["content-encoding"]
+ del response["content-encoding"]
except (IOError, zlib.error):
content = ""
- raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
+ raise FailedToDecompressContent(
+ _("Content purported to be compressed with %s but failed to decompress.")
+ % response.get("content-encoding"),
+ response,
+ content,
+ )
return content
+
def _bind_write_headers(msg):
- from email.header import Header
- def _write_headers(self):
- # Self refers to the Generator object
- for h, v in msg.items():
- print('%s:' % h, end=' ', file=self._fp)
- if isinstance(v, Header):
- print(v.encode(maxlinelen=self._maxheaderlen), file=self._fp)
- else:
- # Header's got lots of smarts, so use it.
- header = Header(v, maxlinelen=self._maxheaderlen, charset='utf-8',
- header_name=h)
- print(header.encode(), file=self._fp)
- # A blank line always separates headers from body
- print(file=self._fp)
- return _write_headers
+ def _write_headers(self):
+ # Self refers to the Generator object.
+ for h, v in msg.items():
+ print("%s:" % h, end=" ", file=self._fp)
+ if isinstance(v, header.Header):
+ print(v.encode(maxlinelen=self._maxheaderlen), file=self._fp)
+ else:
+ # email.Header got lots of smarts, so use it.
+ headers = header.Header(
+ v, maxlinelen=self._maxheaderlen, charset="utf-8", header_name=h
+ )
+ print(headers.encode(), file=self._fp)
+ # A blank line always separates headers from body.
+ print(file=self._fp)
+
+ return _write_headers
+
def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey:
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
- if 'no-store' in cc or 'no-store' in cc_response:
+ if "no-store" in cc or "no-store" in cc_response:
cache.delete(cachekey)
else:
info = email.message.Message()
for key, value in response_headers.items():
- if key not in ['status','content-encoding','transfer-encoding']:
+ if key not in ["status", "content-encoding", "transfer-encoding"]:
info[key] = value
# Add annotations to the cache to indicate what headers
# are variant for this request.
- vary = response_headers.get('vary', None)
+ vary = response_headers.get("vary", None)
if vary:
- vary_headers = vary.lower().replace(' ', '').split(',')
+ vary_headers = vary.lower().replace(" ", "").split(",")
for header in vary_headers:
- key = '-varied-%s' % header
+ key = "-varied-%s" % header
try:
info[key] = request_headers[header]
except KeyError:
@@ -418,25 +502,36 @@ def _updateCache(request_headers, response_headers, content, cache, cachekey):
if status == 304:
status = 200
- status_header = 'status: %d\r\n' % status
+ status_header = "status: %d\r\n" % status
try:
header_str = info.as_string()
except UnicodeEncodeError:
- setattr(info, '_write_headers', _bind_write_headers(info))
+ setattr(info, "_write_headers", _bind_write_headers(info))
header_str = info.as_string()
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
- text = b"".join([status_header.encode('utf-8'), header_str.encode('utf-8'), content])
+ text = b"".join(
+ [status_header.encode("utf-8"), header_str.encode("utf-8"), content]
+ )
cache.set(cachekey, text)
+
def _cnonce():
- dig = _md5(("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).encode('utf-8')).hexdigest()
+ dig = _md5(
+ (
+ "%s:%s"
+ % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])
+ ).encode("utf-8")
+ ).hexdigest()
return dig[:16]
+
def _wsse_username_token(cnonce, iso_now, password):
- return base64.b64encode(_sha(("%s%s%s" % (cnonce, iso_now, password)).encode('utf-8')).digest()).strip()
+ return base64.b64encode(
+ _sha(("%s%s%s" % (cnonce, iso_now, password)).encode("utf-8")).digest()
+ ).strip()
# For credentials we need two things, first
@@ -447,8 +542,11 @@ def _wsse_username_token(cnonce, iso_now, password):
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
+
class Authentication(object):
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
self.path = path
self.host = host
@@ -457,7 +555,7 @@ class Authentication(object):
def depth(self, request_uri):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
- return request_uri[len(self.path):].count("/")
+ return request_uri[len(self.path) :].count("/")
def inscope(self, host, request_uri):
# XXX Should we normalize the request_uri?
@@ -502,105 +600,169 @@ class Authentication(object):
class BasicAuthentication(Authentication):
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
- headers['authorization'] = 'Basic ' + base64.b64encode(("%s:%s" % self.credentials).encode('utf-8')).strip().decode('utf-8')
+ headers["authorization"] = "Basic " + base64.b64encode(
+ ("%s:%s" % self.credentials).encode("utf-8")
+ ).strip().decode("utf-8")
class DigestAuthentication(Authentication):
"""Only do qop='auth' and MD5, since that
is all Apache currently implements"""
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
- challenge = _parse_www_authenticate(response, 'www-authenticate')
- self.challenge = challenge['digest']
- qop = self.challenge.get('qop', 'auth')
- self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
- if self.challenge['qop'] is None:
- raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
- self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
- if self.challenge['algorithm'] != 'MD5':
- raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
- self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
- self.challenge['nc'] = 1
-
- def request(self, method, request_uri, headers, content, cnonce = None):
+
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
+ challenge = _parse_www_authenticate(response, "www-authenticate")
+ self.challenge = challenge["digest"]
+ qop = self.challenge.get("qop", "auth")
+ self.challenge["qop"] = (
+ ("auth" in [x.strip() for x in qop.split()]) and "auth" or None
+ )
+ if self.challenge["qop"] is None:
+ raise UnimplementedDigestAuthOptionError(
+ _("Unsupported value for qop: %s." % qop)
+ )
+ self.challenge["algorithm"] = self.challenge.get("algorithm", "MD5").upper()
+ if self.challenge["algorithm"] != "MD5":
+ raise UnimplementedDigestAuthOptionError(
+ _("Unsupported value for algorithm: %s." % self.challenge["algorithm"])
+ )
+ self.A1 = "".join(
+ [
+ self.credentials[0],
+ ":",
+ self.challenge["realm"],
+ ":",
+ self.credentials[1],
+ ]
+ )
+ self.challenge["nc"] = 1
+
+ def request(self, method, request_uri, headers, content, cnonce=None):
"""Modify the request headers"""
- H = lambda x: _md5(x.encode('utf-8')).hexdigest()
+ H = lambda x: _md5(x.encode("utf-8")).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
- self.challenge['cnonce'] = cnonce or _cnonce()
- request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
- self.challenge['nonce'],
- '%08x' % self.challenge['nc'],
- self.challenge['cnonce'],
- self.challenge['qop'], H(A2)))
- headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
- self.credentials[0],
- self.challenge['realm'],
- self.challenge['nonce'],
- request_uri,
- self.challenge['algorithm'],
- request_digest,
- self.challenge['qop'],
- self.challenge['nc'],
- self.challenge['cnonce'])
- if self.challenge.get('opaque'):
- headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
- self.challenge['nc'] += 1
+ self.challenge["cnonce"] = cnonce or _cnonce()
+ request_digest = '"%s"' % KD(
+ H(self.A1),
+ "%s:%s:%s:%s:%s"
+ % (
+ self.challenge["nonce"],
+ "%08x" % self.challenge["nc"],
+ self.challenge["cnonce"],
+ self.challenge["qop"],
+ H(A2),
+ ),
+ )
+ headers["authorization"] = (
+ 'Digest username="%s", realm="%s", nonce="%s", '
+ 'uri="%s", algorithm=%s, response=%s, qop=%s, '
+ 'nc=%08x, cnonce="%s"'
+ ) % (
+ self.credentials[0],
+ self.challenge["realm"],
+ self.challenge["nonce"],
+ request_uri,
+ self.challenge["algorithm"],
+ request_digest,
+ self.challenge["qop"],
+ self.challenge["nc"],
+ self.challenge["cnonce"],
+ )
+ if self.challenge.get("opaque"):
+ headers["authorization"] += ', opaque="%s"' % self.challenge["opaque"]
+ self.challenge["nc"] += 1
def response(self, response, content):
- if 'authentication-info' not in response:
- challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
- if 'true' == challenge.get('stale'):
- self.challenge['nonce'] = challenge['nonce']
- self.challenge['nc'] = 1
+ if "authentication-info" not in response:
+ challenge = _parse_www_authenticate(response, "www-authenticate").get(
+ "digest", {}
+ )
+ if "true" == challenge.get("stale"):
+ self.challenge["nonce"] = challenge["nonce"]
+ self.challenge["nc"] = 1
return True
else:
- updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
+ updated_challenge = _parse_www_authenticate(
+ response, "authentication-info"
+ ).get("digest", {})
- if 'nextnonce' in updated_challenge:
- self.challenge['nonce'] = updated_challenge['nextnonce']
- self.challenge['nc'] = 1
+ if "nextnonce" in updated_challenge:
+ self.challenge["nonce"] = updated_challenge["nextnonce"]
+ self.challenge["nc"] = 1
return False
class HmacDigestAuthentication(Authentication):
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
+
__author__ = "Thomas Broyer (t.broyer@ltgt.net)"
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
- challenge = _parse_www_authenticate(response, 'www-authenticate')
- self.challenge = challenge['hmacdigest']
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
+ challenge = _parse_www_authenticate(response, "www-authenticate")
+ self.challenge = challenge["hmacdigest"]
# TODO: self.challenge['domain']
- self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
- if self.challenge['reason'] not in ['unauthorized', 'integrity']:
- self.challenge['reason'] = 'unauthorized'
- self.challenge['salt'] = self.challenge.get('salt', '')
- if not self.challenge.get('snonce'):
- raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
- self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
- if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
- raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
- self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
- if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
- raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
- if self.challenge['algorithm'] == 'HMAC-MD5':
+ self.challenge["reason"] = self.challenge.get("reason", "unauthorized")
+ if self.challenge["reason"] not in ["unauthorized", "integrity"]:
+ self.challenge["reason"] = "unauthorized"
+ self.challenge["salt"] = self.challenge.get("salt", "")
+ if not self.challenge.get("snonce"):
+ raise UnimplementedHmacDigestAuthOptionError(
+ _("The challenge doesn't contain a server nonce, or this one is empty.")
+ )
+ self.challenge["algorithm"] = self.challenge.get("algorithm", "HMAC-SHA-1")
+ if self.challenge["algorithm"] not in ["HMAC-SHA-1", "HMAC-MD5"]:
+ raise UnimplementedHmacDigestAuthOptionError(
+ _("Unsupported value for algorithm: %s." % self.challenge["algorithm"])
+ )
+ self.challenge["pw-algorithm"] = self.challenge.get("pw-algorithm", "SHA-1")
+ if self.challenge["pw-algorithm"] not in ["SHA-1", "MD5"]:
+ raise UnimplementedHmacDigestAuthOptionError(
+ _(
+ "Unsupported value for pw-algorithm: %s."
+ % self.challenge["pw-algorithm"]
+ )
+ )
+ if self.challenge["algorithm"] == "HMAC-MD5":
self.hashmod = _md5
else:
self.hashmod = _sha
- if self.challenge['pw-algorithm'] == 'MD5':
+ if self.challenge["pw-algorithm"] == "MD5":
self.pwhashmod = _md5
else:
self.pwhashmod = _sha
- self.key = "".join([self.credentials[0], ":",
- self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
- ":", self.challenge['realm']])
+ self.key = "".join(
+ [
+ self.credentials[0],
+ ":",
+ self.pwhashmod.new(
+ "".join([self.credentials[1], self.challenge["salt"]])
+ )
+ .hexdigest()
+ .lower(),
+ ":",
+ self.challenge["realm"],
+ ]
+ )
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
@@ -608,23 +770,38 @@ class HmacDigestAuthentication(Authentication):
keys = _get_end2end_headers(headers)
keylist = "".join(["%s " % k for k in keys])
headers_val = "".join([headers[k] for k in keys])
- created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
+ created = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
- request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
- request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
- headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
- self.credentials[0],
- self.challenge['realm'],
- self.challenge['snonce'],
- cnonce,
- request_uri,
- created,
- request_digest,
- keylist)
+ request_digest = "%s:%s:%s:%s:%s" % (
+ method,
+ request_uri,
+ cnonce,
+ self.challenge["snonce"],
+ headers_val,
+ )
+ request_digest = (
+ hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
+ )
+ headers["authorization"] = (
+ 'HMACDigest username="%s", realm="%s", snonce="%s",'
+ ' cnonce="%s", uri="%s", created="%s", '
+ 'response="%s", headers="%s"'
+ ) % (
+ self.credentials[0],
+ self.challenge["realm"],
+ self.challenge["snonce"],
+ cnonce,
+ request_uri,
+ created,
+ request_digest,
+ keylist,
+ )
def response(self, response, content):
- challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
- if challenge.get('reason') in ['integrity', 'stale']:
+ challenge = _parse_www_authenticate(response, "www-authenticate").get(
+ "hmacdigest", {}
+ )
+ if challenge.get("reason") in ["integrity", "stale"]:
return True
return False
@@ -637,49 +814,69 @@ class WsseAuthentication(Authentication):
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
+
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
- headers['authorization'] = 'WSSE profile="UsernameToken"'
+ headers["authorization"] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
- headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
- self.credentials[0],
- password_digest,
- cnonce,
- iso_now)
+ headers["X-WSSE"] = (
+ 'UsernameToken Username="%s", PasswordDigest="%s", '
+ 'Nonce="%s", Created="%s"'
+ ) % (self.credentials[0], password_digest, cnonce, iso_now)
+
class GoogleLoginAuthentication(Authentication):
- def __init__(self, credentials, host, request_uri, headers, response, content, http):
+ def __init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ ):
from urllib.parse import urlencode
- Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
- challenge = _parse_www_authenticate(response, 'www-authenticate')
- service = challenge['googlelogin'].get('service', 'xapi')
+
+ Authentication.__init__(
+ self, credentials, host, request_uri, headers, response, content, http
+ )
+ challenge = _parse_www_authenticate(response, "www-authenticate")
+ service = challenge["googlelogin"].get("service", "xapi")
# Bloggger actually returns the service in the challenge
# For the rest we guess based on the URI
- if service == 'xapi' and request_uri.find("calendar") > 0:
+ if service == "xapi" and request_uri.find("calendar") > 0:
service = "cl"
# No point in guessing Base or Spreadsheet
- #elif request_uri.find("spreadsheets") > 0:
+ # elif request_uri.find("spreadsheets") > 0:
# service = "wise"
- auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
- resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
- lines = content.split('\n')
+ auth = dict(
+ Email=credentials[0],
+ Passwd=credentials[1],
+ service=service,
+ source=headers["user-agent"],
+ )
+ resp, content = self.http.request(
+ "https://www.google.com/accounts/ClientLogin",
+ method="POST",
+ body=urlencode(auth),
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
+ )
+ lines = content.split("\n")
d = dict([tuple(line.split("=", 1)) for line in lines if line])
if resp.status == 403:
self.Auth = ""
else:
- self.Auth = d['Auth']
+ self.Auth = d["Auth"]
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
- headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
+ headers["authorization"] = "GoogleLogin Auth=" + self.Auth
AUTH_SCHEME_CLASSES = {
@@ -687,17 +884,21 @@ AUTH_SCHEME_CLASSES = {
"wsse": WsseAuthentication,
"digest": DigestAuthentication,
"hmacdigest": HmacDigestAuthentication,
- "googlelogin": GoogleLoginAuthentication
+ "googlelogin": GoogleLoginAuthentication,
}
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
+
class FileCache(object):
"""Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
- def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
+
+ def __init__(
+ self, cache, safe=safename
+ ): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
if not os.path.exists(cache):
@@ -725,6 +926,7 @@ class FileCache(object):
if os.path.exists(cacheFullPath):
os.remove(cacheFullPath)
+
class Credentials(object):
def __init__(self):
self.credentials = []
@@ -740,9 +942,11 @@ class Credentials(object):
if cdomain == "" or domain == cdomain:
yield (name, password)
+
class KeyCerts(Credentials):
"""Identical to Credentials except that
name/password are mapped to key/cert."""
+
pass
@@ -751,98 +955,117 @@ class AllHosts(object):
class ProxyInfo(object):
- """Collect information required to use a proxy."""
- bypass_hosts = ()
+ """Collect information required to use a proxy."""
+
+ bypass_hosts = ()
+
+ def __init__(
+ self,
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns=True,
+ proxy_user=None,
+ proxy_pass=None,
+ proxy_headers=None,
+ ):
+ """Args:
- def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=True, proxy_user=None, proxy_pass=None, proxy_headers=None):
- """
- Args:
proxy_type: The type of proxy server. This must be set to one of
- socks.PROXY_TYPE_XXX constants. For example:
-
- p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
- proxy_host='localhost', proxy_port=8000)
-
+ socks.PROXY_TYPE_XXX constants. For example: p =
+ ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost',
+ proxy_port=8000)
proxy_host: The hostname or IP address of the proxy server.
-
proxy_port: The port that the proxy server is running on.
-
proxy_rdns: If True (default), DNS queries will not be performed
locally, and instead, handed to the proxy to resolve. This is useful
- if the network does not allow resolution of non-local names. In
+ if the network does not allow resolution of non-local names. In
httplib2 0.9 and earlier, this defaulted to False.
-
proxy_user: The username used to authenticate with the proxy server.
-
proxy_pass: The password used to authenticate with the proxy server.
+ proxy_headers: Additional or modified headers for the proxy connect
+ request.
+ """
+ self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass, self.proxy_headers = (
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+
+ def astuple(self):
+ return (
+ self.proxy_type,
+ self.proxy_host,
+ self.proxy_port,
+ self.proxy_rdns,
+ self.proxy_user,
+ self.proxy_pass,
+ self.proxy_headers,
+ )
+
+ def isgood(self):
+ return socks and (self.proxy_host != None) and (self.proxy_port != None)
+
+ def applies_to(self, hostname):
+ return not self.bypass_host(hostname)
+
+ def bypass_host(self, hostname):
+ """Has this host been excluded from the proxy config"""
+ if self.bypass_hosts is AllHosts:
+ return True
- proxy_headers: Additional or modified headers for the proxy connect request.
- """
- self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass, self.proxy_headers = proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers
-
- def astuple(self):
- return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns,
- self.proxy_user, self.proxy_pass, self.proxy_headers)
-
- def isgood(self):
- return socks and (self.proxy_host != None) and (self.proxy_port != None)
-
- def applies_to(self, hostname):
- return not self.bypass_host(hostname)
-
- def bypass_host(self, hostname):
- """Has this host been excluded from the proxy config"""
- if self.bypass_hosts is AllHosts:
- return True
-
- hostname = '.' + hostname.lstrip('.')
- for skip_name in self.bypass_hosts:
- # *.suffix
- if skip_name.startswith('.') and hostname.endswith(skip_name):
- return True
- # exact match
- if hostname == '.' + skip_name:
- return True
- return False
+ hostname = "." + hostname.lstrip(".")
+ for skip_name in self.bypass_hosts:
+ # *.suffix
+ if skip_name.startswith(".") and hostname.endswith(skip_name):
+ return True
+ # exact match
+ if hostname == "." + skip_name:
+ return True
+ return False
- def __repr__(self):
- return (
- '<ProxyInfo type={p.proxy_type} host:port={p.proxy_host}:{p.proxy_port} rdns={p.proxy_rdns}' +
- ' user={p.proxy_user} headers={p.proxy_headers}>').format(p=self)
+ def __repr__(self):
+ return (
+ "<ProxyInfo type={p.proxy_type} "
+ "host:port={p.proxy_host}:{p.proxy_port} rdns={p.proxy_rdns}"
+ + " user={p.proxy_user} headers={p.proxy_headers}>"
+ ).format(p=self)
-def proxy_info_from_environment(method='http'):
- """
- Read proxy info from the environment variables.
+def proxy_info_from_environment(method="http"):
+ """Read proxy info from the environment variables.
"""
- if method not in ('http', 'https'):
+ if method not in ("http", "https"):
return
- env_var = method + '_proxy'
+ env_var = method + "_proxy"
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url:
return
return proxy_info_from_url(url, method, noproxy=None)
-def proxy_info_from_url(url, method='http', noproxy=None):
- """
- Construct a ProxyInfo from a URL (such as http_proxy env var)
+def proxy_info_from_url(url, method="http", noproxy=None):
+ """Construct a ProxyInfo from a URL (such as http_proxy env var)
"""
url = urllib.parse.urlparse(url)
username = None
password = None
port = None
- if '@' in url[1]:
- ident, host_port = url[1].split('@', 1)
- if ':' in ident:
- username, password = ident.split(':', 1)
+ if "@" in url[1]:
+ ident, host_port = url[1].split("@", 1)
+ if ":" in ident:
+ username, password = ident.split(":", 1)
else:
password = ident
else:
host_port = url[1]
- if ':' in host_port:
- host, port = host_port.split(':', 1)
+ if ":" in host_port:
+ host, port = host_port.split(":", 1)
else:
host = host_port
@@ -864,12 +1087,12 @@ def proxy_info_from_url(url, method='http', noproxy=None):
bypass_hosts = []
# If not given an explicit noproxy value, respect values in env vars.
if noproxy is None:
- noproxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
+ noproxy = os.environ.get("no_proxy", os.environ.get("NO_PROXY", ""))
# Special case: A single '*' character means all hosts should be bypassed.
- if noproxy == '*':
+ if noproxy == "*":
bypass_hosts = AllHosts
elif noproxy.strip():
- bypass_hosts = noproxy.split(',')
+ bypass_hosts = noproxy.split(",")
bypass_hosts = tuple(filter(bool, bypass_hosts)) # To exclude empty string.
pi.bypass_hosts = bypass_hosts
@@ -888,21 +1111,23 @@ class HTTPConnectionWithTimeout(http.client.HTTPConnection):
"""
def __init__(self, host, port=None, timeout=None, proxy_info=None):
- http.client.HTTPConnection.__init__(self, host, port=port,
- timeout=timeout)
+ http.client.HTTPConnection.__init__(self, host, port=port, timeout=timeout)
self.proxy_info = proxy_info
if proxy_info and not isinstance(proxy_info, ProxyInfo):
- self.proxy_info = proxy_info('http')
+ self.proxy_info = proxy_info("http")
def connect(self):
"""Connect to the host and port specified in __init__."""
if self.proxy_info and socks is None:
raise ProxiesUnavailableError(
- 'Proxy support missing but proxy use was requested!')
+ "Proxy support missing but proxy use was requested!"
+ )
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
- proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = self.proxy_info.astuple()
+ proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = (
+ self.proxy_info.astuple()
+ )
host = proxy_host
port = proxy_port
@@ -920,7 +1145,14 @@ class HTTPConnectionWithTimeout(http.client.HTTPConnection):
try:
if use_proxy:
self.sock = socks.socksocket(af, socktype, proto)
- self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
+ self.sock.setproxy(
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ )
else:
self.sock = socket.socket(af, socktype, proto)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
@@ -928,22 +1160,44 @@ class HTTPConnectionWithTimeout(http.client.HTTPConnection):
self.sock.settimeout(self.timeout)
if self.debuglevel > 0:
print(
- "connect: ({0}, {1}) ************".format(self.host, self.port))
+ "connect: ({0}, {1}) ************".format(self.host, self.port)
+ )
if use_proxy:
print(
- "proxy: {0} ************".format(str(
- (proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))))
+ "proxy: {0} ************".format(
+ str(
+ (
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+ )
+ )
+ )
self.sock.connect((self.host, self.port) + sa[2:])
except socket.error as e:
socket_err = e
if self.debuglevel > 0:
- print(
- "connect fail: ({0}, {1})".format(self.host, self.port))
+ print("connect fail: ({0}, {1})".format(self.host, self.port))
if use_proxy:
print(
- "proxy: {0}".format(str(
- (proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))))
+ "proxy: {0}".format(
+ str(
+ (
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+ )
+ )
+ )
if self.sock:
self.sock.close()
self.sock = None
@@ -954,8 +1208,7 @@ class HTTPConnectionWithTimeout(http.client.HTTPConnection):
class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
- """
- This class allows communication via SSL.
+ """This class allows communication via SSL.
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
@@ -963,26 +1216,44 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- timeout=None, proxy_info=None,
- ca_certs=None, disable_ssl_certificate_validation=False):
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ timeout=None,
+ proxy_info=None,
+ ca_certs=None,
+ disable_ssl_certificate_validation=False,
+ ):
self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
self.ca_certs = ca_certs if ca_certs else CA_CERTS
self.proxy_info = proxy_info
if proxy_info and not isinstance(proxy_info, ProxyInfo):
- self.proxy_info = proxy_info('https')
-
- context = _build_ssl_context(self.disable_ssl_certificate_validation, self.ca_certs, cert_file, key_file)
- super(HTTPSConnectionWithTimeout, self).__init__(host, port=port, key_file=key_file, cert_file=cert_file,
- timeout=timeout, context=context)
+ self.proxy_info = proxy_info("https")
+
+ context = _build_ssl_context(
+ self.disable_ssl_certificate_validation, self.ca_certs, cert_file, key_file
+ )
+ super(HTTPSConnectionWithTimeout, self).__init__(
+ host,
+ port=port,
+ key_file=key_file,
+ cert_file=cert_file,
+ timeout=timeout,
+ context=context,
+ )
def connect(self):
"""Connect to a host on a given (SSL) port."""
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
- proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = self.proxy_info.astuple()
+ proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = (
+ self.proxy_info.astuple()
+ )
host = proxy_host
port = proxy_port
@@ -1002,7 +1273,14 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
if use_proxy:
sock = socks.socksocket(family, socktype, proto)
- sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
+ sock.setproxy(
+ proxy_type,
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ )
else:
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
@@ -1013,7 +1291,10 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
self.sock = self._context.wrap_socket(sock, server_hostname=self.host)
# Python 3.3 compatibility: emulate the check_hostname behavior
- if not hasattr(self._context, 'check_hostname') and not self.disable_ssl_certificate_validation:
+ if (
+ not hasattr(self._context, "check_hostname")
+ and not self.disable_ssl_certificate_validation
+ ):
try:
ssl.match_hostname(self.sock.getpeercert(), self.host)
except Exception:
@@ -1024,8 +1305,20 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
if self.debuglevel > 0:
print("connect: ({0}, {1})".format(self.host, self.port))
if use_proxy:
- print("proxy: {0}".format(str(
- (proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))))
+ print(
+ "proxy: {0}".format(
+ str(
+ (
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+ )
+ )
+ )
except (ssl.SSLError, ssl.CertificateError) as e:
if sock:
sock.close()
@@ -1040,7 +1333,20 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
if self.debuglevel > 0:
print("connect fail: ({0}, {1})".format((self.host, self.port)))
if use_proxy:
- print("proxy: {0}".format(str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers))))
+ print(
+ "proxy: {0}".format(
+ str(
+ (
+ proxy_host,
+ proxy_port,
+ proxy_rdns,
+ proxy_user,
+ proxy_pass,
+ proxy_headers,
+ )
+ )
+ )
+ )
if self.sock:
self.sock.close()
self.sock = None
@@ -1051,10 +1357,11 @@ class HTTPSConnectionWithTimeout(http.client.HTTPSConnection):
SCHEME_TO_CONNECTION = {
- 'http': HTTPConnectionWithTimeout,
- 'https': HTTPSConnectionWithTimeout,
+ "http": HTTPConnectionWithTimeout,
+ "https": HTTPSConnectionWithTimeout,
}
+
class Http(object):
"""An HTTP client that handles:
@@ -1069,9 +1376,15 @@ class Http(object):
and more.
"""
- def __init__(self, cache=None, timeout=None,
- proxy_info=proxy_info_from_environment,
- ca_certs=None, disable_ssl_certificate_validation=False):
+
+ def __init__(
+ self,
+ cache=None,
+ timeout=None,
+ proxy_info=proxy_info_from_environment,
+ ca_certs=None,
+ disable_ssl_certificate_validation=False,
+ ):
"""If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the
same interface as FileCache.
@@ -1097,8 +1410,7 @@ class Http(object):
"""
self.proxy_info = proxy_info
self.ca_certs = ca_certs
- self.disable_ssl_certificate_validation = \
- disable_ssl_certificate_validation
+ self.disable_ssl_certificate_validation = disable_ssl_certificate_validation
# Map domain name to an httplib connection
self.connections = {}
# The location of the cache, for now a directory
@@ -1141,10 +1453,10 @@ class Http(object):
state_dict = copy.copy(self.__dict__)
# In case request is augmented by some foreign object such as
# credentials which handle auth
- if 'request' in state_dict:
- del state_dict['request']
- if 'connections' in state_dict:
- del state_dict['connections']
+ if "request" in state_dict:
+ del state_dict["request"]
+ if "connections" in state_dict:
+ del state_dict["connections"]
return state_dict
def __setstate__(self, state):
@@ -1155,11 +1467,13 @@ class Http(object):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
- challenges = _parse_www_authenticate(response, 'www-authenticate')
+ challenges = _parse_www_authenticate(response, "www-authenticate")
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if scheme in challenges:
- yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
+ yield AUTH_SCHEME_CLASSES[scheme](
+ cred, host, request_uri, headers, response, content, self
+ )
def add_credentials(self, name, password, domain=""):
"""Add a name and password that will be used
@@ -1193,20 +1507,22 @@ class Http(object):
conn.close()
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
except socket.error as e:
- errno_ = (e.args[0].errno if isinstance(e.args[0], socket.error) else e.errno)
+ errno_ = (
+ e.args[0].errno if isinstance(e.args[0], socket.error) else e.errno
+ )
if errno_ in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
continue # retry on potentially transient errors
raise
except http.client.HTTPException:
if conn.sock is None:
- if i < RETRIES-1:
+ if i < RETRIES - 1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
- if i < RETRIES-1:
+ if i < RETRIES - 1:
conn.close()
conn.connect()
continue
@@ -1251,76 +1567,121 @@ class Http(object):
break
return (response, content)
-
- def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
+ def _request(
+ self,
+ conn,
+ host,
+ absolute_uri,
+ request_uri,
+ method,
+ body,
+ headers,
+ redirections,
+ cachekey,
+ ):
"""Do the actual request using the connection object
and also follow one level of redirects if necessary"""
- auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
+ auths = [
+ (auth.depth(request_uri), auth)
+ for auth in self.authorizations
+ if auth.inscope(host, request_uri)
+ ]
auth = auths and sorted(auths)[0][1] or None
if auth:
auth.request(method, request_uri, headers, body)
- (response, content) = self._conn_request(conn, request_uri, method, body, headers)
+ (response, content) = self._conn_request(
+ conn, request_uri, method, body, headers
+ )
if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
- (response, content) = self._conn_request(conn, request_uri, method, body, headers )
+ (response, content) = self._conn_request(
+ conn, request_uri, method, body, headers
+ )
response._stale_digest = 1
if response.status == 401:
- for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
+ for authorization in self._auth_from_challenge(
+ host, request_uri, headers, response, content
+ ):
authorization.request(method, request_uri, headers, body)
- (response, content) = self._conn_request(conn, request_uri, method, body, headers, )
+ (response, content) = self._conn_request(
+ conn, request_uri, method, body, headers
+ )
if response.status != 401:
self.authorizations.append(authorization)
authorization.response(response, body)
break
- if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
+ if (
+ self.follow_all_redirects
+ or (method in ["GET", "HEAD"])
+ or response.status == 303
+ ):
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
- if 'location' not in response and response.status != 300:
- raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
+ if "location" not in response and response.status != 300:
+ raise RedirectMissingLocation(
+ _(
+ "Redirected but the response is missing a Location: header."
+ ),
+ response,
+ content,
+ )
# Fix-up relative redirects (which violate an RFC 2616 MUST)
- if 'location' in response:
- location = response['location']
+ if "location" in response:
+ location = response["location"]
(scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None:
- response['location'] = urllib.parse.urljoin(absolute_uri, location)
+ response["location"] = urllib.parse.urljoin(
+ absolute_uri, location
+ )
if response.status == 301 and method in ["GET", "HEAD"]:
- response['-x-permanent-redirect-url'] = response['location']
- if 'content-location' not in response:
- response['content-location'] = absolute_uri
+ response["-x-permanent-redirect-url"] = response["location"]
+ if "content-location" not in response:
+ response["content-location"] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
- if 'if-none-match' in headers:
- del headers['if-none-match']
- if 'if-modified-since' in headers:
- del headers['if-modified-since']
- if 'authorization' in headers and not self.forward_authorization_headers:
- del headers['authorization']
- if 'location' in response:
- location = response['location']
+ if "if-none-match" in headers:
+ del headers["if-none-match"]
+ if "if-modified-since" in headers:
+ del headers["if-modified-since"]
+ if (
+ "authorization" in headers
+ and not self.forward_authorization_headers
+ ):
+ del headers["authorization"]
+ if "location" in response:
+ location = response["location"]
old_response = copy.deepcopy(response)
- if 'content-location' not in old_response:
- old_response['content-location'] = absolute_uri
+ if "content-location" not in old_response:
+ old_response["content-location"] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
- redirect_method = "GET"
- body = None
+ redirect_method = "GET"
+ body = None
(response, content) = self.request(
- location, method=redirect_method, body=body,
- headers=headers, redirections=redirections - 1)
+ location,
+ method=redirect_method,
+ body=body,
+ headers=headers,
+ redirections=redirections - 1,
+ )
response.previous = old_response
else:
- raise RedirectLimit("Redirected more times than redirection_limit allows.", response, content)
+ raise RedirectLimit(
+ "Redirected more times than redirection_limit allows.",
+ response,
+ content,
+ )
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
- if 'content-location' not in response:
- response['content-location'] = absolute_uri
+ if "content-location" not in response:
+ response["content-location"] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
@@ -1328,12 +1689,19 @@ class Http(object):
def _normalize_headers(self, headers):
return _normalize_headers(headers)
-# Need to catch and rebrand some exceptions
-# Then need to optionally turn all exceptions into status codes
-# including all socket.* and httplib.* exceptions.
-
-
- def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
+ # Need to catch and rebrand some exceptions
+ # Then need to optionally turn all exceptions into status codes
+ # including all socket.* and httplib.* exceptions.
+
+ def request(
+ self,
+ uri,
+ method="GET",
+ body=None,
+ headers=None,
+ redirections=DEFAULT_MAX_REDIRECTS,
+ connection_type=None,
+ ):
""" Performs a single HTTP request.
The 'uri' is the URI of the HTTP resource and can begin
with either 'http' or 'https'. The value of 'uri' must be an absolute URI.
@@ -1360,48 +1728,46 @@ a string that contains the response entity body.
else:
headers = self._normalize_headers(headers)
- if 'user-agent' not in headers:
- headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
+ if "user-agent" not in headers:
+ headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
- domain_port = authority.split(":")[0:2]
- if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
- scheme = 'https'
- authority = domain_port[0]
-
- conn_key = scheme+":"+authority
- if conn_key in self.connections:
- conn = self.connections[conn_key]
- else:
+
+ conn_key = scheme + ":" + authority
+ conn = self.connections.get(conn_key)
+ if conn is None:
if not connection_type:
connection_type = SCHEME_TO_CONNECTION[scheme]
certs = list(self.certificates.iter(authority))
if issubclass(connection_type, HTTPSConnectionWithTimeout):
if certs:
conn = self.connections[conn_key] = connection_type(
- authority, key_file=certs[0][0],
- cert_file=certs[0][1], timeout=self.timeout,
- proxy_info=self.proxy_info,
- ca_certs=self.ca_certs,
- disable_ssl_certificate_validation=
- self.disable_ssl_certificate_validation)
+ authority,
+ key_file=certs[0][0],
+ cert_file=certs[0][1],
+ timeout=self.timeout,
+ proxy_info=self.proxy_info,
+ ca_certs=self.ca_certs,
+ disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+ )
else:
conn = self.connections[conn_key] = connection_type(
- authority, timeout=self.timeout,
- proxy_info=self.proxy_info,
- ca_certs=self.ca_certs,
- disable_ssl_certificate_validation=
- self.disable_ssl_certificate_validation)
+ authority,
+ timeout=self.timeout,
+ proxy_info=self.proxy_info,
+ ca_certs=self.ca_certs,
+ disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,
+ )
else:
conn = self.connections[conn_key] = connection_type(
- authority, timeout=self.timeout,
- proxy_info=self.proxy_info)
+ authority, timeout=self.timeout, proxy_info=self.proxy_info
+ )
conn.set_debuglevel(debuglevel)
- if 'range' not in headers and 'accept-encoding' not in headers:
- headers['accept-encoding'] = 'gzip, deflate'
+ if "range" not in headers and "accept-encoding" not in headers:
+ headers["accept-encoding"] = "gzip, deflate"
info = email.message.Message()
cached_value = None
@@ -1410,12 +1776,13 @@ a string that contains the response entity body.
cached_value = self.cache.get(cachekey)
if cached_value:
try:
- info, content = cached_value.split(b'\r\n\r\n', 1)
+ info, content = cached_value.split(b"\r\n\r\n", 1)
info = email.message_from_bytes(info)
for k, v in info.items():
- if v.startswith('=?') and v.endswith('?='):
- info.replace_header(k,
- str(*email.header.decode_header(v)[0]))
+ if v.startswith("=?") and v.endswith("?="):
+ info.replace_header(
+ k, str(*email.header.decode_header(v)[0])
+ )
except (IndexError, ValueError):
self.cache.delete(cachekey)
cachekey = None
@@ -1423,9 +1790,15 @@ a string that contains the response entity body.
else:
cachekey = None
- if method in self.optimistic_concurrency_methods and self.cache and 'etag' in info and not self.ignore_etag and 'if-match' not in headers:
+ if (
+ method in self.optimistic_concurrency_methods
+ and self.cache
+ and "etag" in info
+ and not self.ignore_etag
+ and "if-match" not in headers
+ ):
# http://www.w3.org/1999/04/Editing/
- headers['if-match'] = info['etag']
+ headers["if-match"] = info["etag"]
if method not in ["GET", "HEAD"] and self.cache and cachekey:
# RFC 2616 Section 13.10
@@ -1433,24 +1806,36 @@ a string that contains the response entity body.
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
- if method in ['GET', 'HEAD'] and 'vary' in info:
- vary = info['vary']
- vary_headers = vary.lower().replace(' ', '').split(',')
+ if method in ["GET", "HEAD"] and "vary" in info:
+ vary = info["vary"]
+ vary_headers = vary.lower().replace(" ", "").split(",")
for header in vary_headers:
- key = '-varied-%s' % header
+ key = "-varied-%s" % header
value = info[key]
if headers.get(header, None) != value:
- cached_value = None
- break
-
- if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
- if '-x-permanent-redirect-url' in info:
+ cached_value = None
+ break
+
+ if (
+ cached_value
+ and method in ["GET", "HEAD"]
+ and self.cache
+ and "range" not in headers
+ ):
+ if "-x-permanent-redirect-url" in info:
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
- raise RedirectLimit("Redirected more times than redirection_limit allows.", {}, "")
+ raise RedirectLimit(
+ "Redirected more times than redirection_limit allows.",
+ {},
+ "",
+ )
(response, new_content) = self.request(
- info['-x-permanent-redirect-url'], method='GET',
- headers=headers, redirections=redirections - 1)
+ info["-x-permanent-redirect-url"],
+ method="GET",
+ headers=headers,
+ redirections=redirections - 1,
+ )
response.previous = Response(info)
response.previous.fromcache = True
else:
@@ -1466,7 +1851,7 @@ a string that contains the response entity body.
if entry_disposition == "FRESH":
if not cached_value:
- info['status'] = '504'
+ info["status"] = "504"
content = b""
response = Response(info)
if cached_value:
@@ -1474,14 +1859,28 @@ a string that contains the response entity body.
return (response, content)
if entry_disposition == "STALE":
- if 'etag' in info and not self.ignore_etag and not 'if-none-match' in headers:
- headers['if-none-match'] = info['etag']
- if 'last-modified' in info and not 'last-modified' in headers:
- headers['if-modified-since'] = info['last-modified']
+ if (
+ "etag" in info
+ and not self.ignore_etag
+ and not "if-none-match" in headers
+ ):
+ headers["if-none-match"] = info["etag"]
+ if "last-modified" in info and not "last-modified" in headers:
+ headers["if-modified-since"] = info["last-modified"]
elif entry_disposition == "TRANSPARENT":
pass
- (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
+ (response, new_content) = self._request(
+ conn,
+ authority,
+ uri,
+ request_uri,
+ method,
+ body,
+ headers,
+ redirections,
+ cachekey,
+ )
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
@@ -1494,7 +1893,9 @@ a string that contains the response entity body.
merged_response = Response(info)
if hasattr(response, "_stale_digest"):
merged_response._stale_digest = response._stale_digest
- _updateCache(headers, merged_response, content, self.cache, cachekey)
+ _updateCache(
+ headers, merged_response, content, self.cache, cachekey
+ )
response = merged_response
response.status = 200
response.fromcache = True
@@ -1506,12 +1907,22 @@ a string that contains the response entity body.
content = new_content
else:
cc = _parse_cache_control(headers)
- if 'only-if-cached'in cc:
- info['status'] = '504'
+ if "only-if-cached" in cc:
+ info["status"] = "504"
response = Response(info)
content = b""
else:
- (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
+ (response, content) = self._request(
+ conn,
+ authority,
+ uri,
+ request_uri,
+ method,
+ body,
+ headers,
+ redirections,
+ cachekey,
+ )
except Exception as e:
if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse):
@@ -1521,40 +1932,43 @@ a string that contains the response entity body.
response.reason = str(e)
elif isinstance(e, socket.timeout):
content = b"Request Timeout"
- response = Response({
- "content-type": "text/plain",
- "status": "408",
- "content-length": len(content)
- })
+ response = Response(
+ {
+ "content-type": "text/plain",
+ "status": "408",
+ "content-length": len(content),
+ }
+ )
response.reason = "Request Timeout"
else:
- content = str(e).encode('utf-8')
- response = Response({
- "content-type": "text/plain",
- "status": "400",
- "content-length": len(content)
- })
+ content = str(e).encode("utf-8")
+ response = Response(
+ {
+ "content-type": "text/plain",
+ "status": "400",
+ "content-length": len(content),
+ }
+ )
response.reason = "Bad Request"
else:
raise
-
return (response, content)
-
class Response(dict):
"""An object more like email.message than httplib.HTTPResponse."""
"""Is this response from our local cache"""
fromcache = False
+ """HTTP protocol version used by server.
- """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
+ 10 for HTTP/1.0, 11 for HTTP/1.1.
+ """
version = 11
"Status code returned by server. "
status = 200
-
"""Reason phrase returned by server."""
reason = "Ok"
@@ -1568,24 +1982,23 @@ class Response(dict):
key = key.lower()
prev = self.get(key)
if prev is not None:
- value = ', '.join((prev, value))
+ value = ", ".join((prev, value))
self[key] = value
self.status = info.status
- self['status'] = str(self.status)
+ self["status"] = str(self.status)
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.message.Message):
for key, value in list(info.items()):
self[key.lower()] = value
- self.status = int(self['status'])
+ self.status = int(self["status"])
else:
for key, value in info.items():
self[key.lower()] = value
- self.status = int(self.get('status', self.status))
-
+ self.status = int(self.get("status", self.status))
def __getattr__(self, name):
- if name == 'dict':
+ if name == "dict":
return self
else:
raise AttributeError(name)
diff --git a/python3/httplib2/certs.py b/python3/httplib2/certs.py
new file mode 100644
index 0000000..59d1ffc
--- /dev/null
+++ b/python3/httplib2/certs.py
@@ -0,0 +1,42 @@
+"""Utilities for certificate management."""
+
+import os
+
+certifi_available = False
+certifi_where = None
+try:
+ from certifi import where as certifi_where
+ certifi_available = True
+except ImportError:
+ pass
+
+custom_ca_locater_available = False
+custom_ca_locater_where = None
+try:
+ from ca_certs_locater import get as custom_ca_locater_where
+ custom_ca_locater_available = True
+except ImportError:
+ pass
+
+
+BUILTIN_CA_CERTS = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "cacerts.txt"
+)
+
+
+def where():
+ env = os.environ.get("HTTPLIB2_CA_CERTS")
+ if env is not None:
+ if os.path.isfile(env):
+ return env
+ else:
+ raise RuntimeError("Environment variable HTTPLIB2_CA_CERTS not a valid file")
+ if custom_ca_locater_available:
+ return custom_ca_locater_where()
+ if certifi_available:
+ return certifi_where()
+ return BUILTIN_CA_CERTS
+
+
+if __name__ == "__main__":
+ print(where())
diff --git a/python3/httplib2/iri2uri.py b/python3/httplib2/iri2uri.py
index 98985f8..86e361e 100644
--- a/python3/httplib2/iri2uri.py
+++ b/python3/httplib2/iri2uri.py
@@ -1,110 +1,124 @@
-"""
-iri2uri
-
-Converts an IRI to a URI.
-
-"""
-__author__ = "Joe Gregorio (joe@bitworking.org)"
-__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = []
-__version__ = "1.0.0"
-__license__ = "MIT"
-__history__ = """
-"""
-
-import urllib.parse
-
-
-# Convert an IRI to a URI following the rules in RFC 3987
-#
-# The characters we need to enocde and escape are defined in the spec:
-#
-# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
-# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
-# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
-# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
-# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
-# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
-# / %xD0000-DFFFD / %xE1000-EFFFD
-
-escape_range = [
- (0xA0, 0xD7FF),
- (0xE000, 0xF8FF),
- (0xF900, 0xFDCF),
- (0xFDF0, 0xFFEF),
- (0x10000, 0x1FFFD),
- (0x20000, 0x2FFFD),
- (0x30000, 0x3FFFD),
- (0x40000, 0x4FFFD),
- (0x50000, 0x5FFFD),
- (0x60000, 0x6FFFD),
- (0x70000, 0x7FFFD),
- (0x80000, 0x8FFFD),
- (0x90000, 0x9FFFD),
- (0xA0000, 0xAFFFD),
- (0xB0000, 0xBFFFD),
- (0xC0000, 0xCFFFD),
- (0xD0000, 0xDFFFD),
- (0xE1000, 0xEFFFD),
- (0xF0000, 0xFFFFD),
- (0x100000, 0x10FFFD),
-]
-
-def encode(c):
- retval = c
- i = ord(c)
- for low, high in escape_range:
- if i < low:
- break
- if i >= low and i <= high:
- retval = "".join(["%%%2X" % o for o in c.encode('utf-8')])
- break
- return retval
-
-
-def iri2uri(uri):
- """Convert an IRI to a URI. Note that IRIs must be
- passed in a unicode strings. That is, do not utf-8 encode
- the IRI before passing it into the function."""
- if isinstance(uri ,str):
- (scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri)
- authority = authority.encode('idna').decode('utf-8')
- # For each character in 'ucschar' or 'iprivate'
- # 1. encode as utf-8
- # 2. then %-encode each octet of that utf-8
- uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment))
- uri = "".join([encode(c) for c in uri])
- return uri
-
-if __name__ == "__main__":
- import unittest
-
- class Test(unittest.TestCase):
-
- def test_uris(self):
- """Test that URIs are invariant under the transformation."""
- invariant = [
- "ftp://ftp.is.co.za/rfc/rfc1808.txt",
- "http://www.ietf.org/rfc/rfc2396.txt",
- "ldap://[2001:db8::7]/c=GB?objectClass?one",
- "mailto:John.Doe@example.com",
- "news:comp.infosystems.www.servers.unix",
- "tel:+1-816-555-1212",
- "telnet://192.0.2.16:80/",
- "urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
- for uri in invariant:
- self.assertEqual(uri, iri2uri(uri))
-
- def test_iri(self):
- """ Test that the right type of escaping is done for each part of the URI."""
- self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri("http://\N{COMET}.com/\N{COMET}"))
- self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri("http://bitworking.org/?fred=\N{COMET}"))
- self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri("http://bitworking.org/#\N{COMET}"))
- self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}"))
- self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
- self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
- self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
-
- unittest.main()
-
-
+# -*- coding: utf-8 -*-
+"""Converts an IRI to a URI."""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = []
+__version__ = "1.0.0"
+__license__ = "MIT"
+
+import urllib.parse
+
+# Convert an IRI to a URI following the rules in RFC 3987
+#
+# The characters we need to enocde and escape are defined in the spec:
+#
+# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
+# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
+# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
+# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
+# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
+# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
+# / %xD0000-DFFFD / %xE1000-EFFFD
+
+escape_range = [
+ (0xA0, 0xD7FF),
+ (0xE000, 0xF8FF),
+ (0xF900, 0xFDCF),
+ (0xFDF0, 0xFFEF),
+ (0x10000, 0x1FFFD),
+ (0x20000, 0x2FFFD),
+ (0x30000, 0x3FFFD),
+ (0x40000, 0x4FFFD),
+ (0x50000, 0x5FFFD),
+ (0x60000, 0x6FFFD),
+ (0x70000, 0x7FFFD),
+ (0x80000, 0x8FFFD),
+ (0x90000, 0x9FFFD),
+ (0xA0000, 0xAFFFD),
+ (0xB0000, 0xBFFFD),
+ (0xC0000, 0xCFFFD),
+ (0xD0000, 0xDFFFD),
+ (0xE1000, 0xEFFFD),
+ (0xF0000, 0xFFFFD),
+ (0x100000, 0x10FFFD),
+]
+
+
+def encode(c):
+ retval = c
+ i = ord(c)
+ for low, high in escape_range:
+ if i < low:
+ break
+ if i >= low and i <= high:
+ retval = "".join(["%%%2X" % o for o in c.encode("utf-8")])
+ break
+ return retval
+
+
+def iri2uri(uri):
+ """Convert an IRI to a URI. Note that IRIs must be
+ passed in a unicode strings. That is, do not utf-8 encode
+ the IRI before passing it into the function."""
+ if isinstance(uri, str):
+ (scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri)
+ authority = authority.encode("idna").decode("utf-8")
+ # For each character in 'ucschar' or 'iprivate'
+ # 1. encode as utf-8
+ # 2. then %-encode each octet of that utf-8
+ uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment))
+ uri = "".join([encode(c) for c in uri])
+ return uri
+
+
+if __name__ == "__main__":
+ import unittest
+
+ class Test(unittest.TestCase):
+ def test_uris(self):
+ """Test that URIs are invariant under the transformation."""
+ invariant = [
+ "ftp://ftp.is.co.za/rfc/rfc1808.txt",
+ "http://www.ietf.org/rfc/rfc2396.txt",
+ "ldap://[2001:db8::7]/c=GB?objectClass?one",
+ "mailto:John.Doe@example.com",
+ "news:comp.infosystems.www.servers.unix",
+ "tel:+1-816-555-1212",
+ "telnet://192.0.2.16:80/",
+ "urn:oasis:names:specification:docbook:dtd:xml:4.1.2",
+ ]
+ for uri in invariant:
+ self.assertEqual(uri, iri2uri(uri))
+
+ def test_iri(self):
+ """Test that the right type of escaping is done for each part of the URI."""
+ self.assertEqual(
+ "http://xn--o3h.com/%E2%98%84",
+ iri2uri("http://\N{COMET}.com/\N{COMET}"),
+ )
+ self.assertEqual(
+ "http://bitworking.org/?fred=%E2%98%84",
+ iri2uri("http://bitworking.org/?fred=\N{COMET}"),
+ )
+ self.assertEqual(
+ "http://bitworking.org/#%E2%98%84",
+ iri2uri("http://bitworking.org/#\N{COMET}"),
+ )
+ self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}"))
+ self.assertEqual(
+ "/fred?bar=%E2%98%9A#%E2%98%84",
+ iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"),
+ )
+ self.assertEqual(
+ "/fred?bar=%E2%98%9A#%E2%98%84",
+ iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")),
+ )
+ self.assertNotEqual(
+ "/fred?bar=%E2%98%9A#%E2%98%84",
+ iri2uri(
+ "/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8")
+ ),
+ )
+
+ unittest.main()
diff --git a/python3/httplib2/socks.py b/python3/httplib2/socks.py
index 7fc0591..24235df 100644
--- a/python3/httplib2/socks.py
+++ b/python3/httplib2/socks.py
@@ -1,4 +1,5 @@
"""SocksiPy - Python SOCKS module.
+
Version 1.00
Copyright 2006 Dan-Haim. All rights reserved.
@@ -24,20 +25,14 @@ OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
-
This module provides a standard socket-like interface for Python
for tunneling connections through SOCKS proxies.
-"""
-
-"""
-
-Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
-for use in PyLoris (http://pyloris.sourceforge.net/)
+Minor modifications made by Christopher Gilbert (http://motomastyle.com/) for
+use in PyLoris (http://pyloris.sourceforge.net/).
Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
-mainly to merge bug fixes found in Sourceforge
-
+mainly to merge bug fixes found in Sourceforge.
"""
import base64
@@ -45,8 +40,8 @@ import socket
import struct
import sys
-if getattr(socket, 'socket', None) is None:
- raise ImportError('socket.socket missing, proxy support unusable')
+if getattr(socket, "socket", None) is None:
+ raise ImportError("socket.socket missing, proxy support unusable")
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
@@ -56,21 +51,42 @@ PROXY_TYPE_HTTP_NO_TUNNEL = 4
_defaultproxy = None
_orgsocket = socket.socket
-class ProxyError(Exception): pass
-class GeneralProxyError(ProxyError): pass
-class Socks5AuthError(ProxyError): pass
-class Socks5Error(ProxyError): pass
-class Socks4Error(ProxyError): pass
-class HTTPError(ProxyError): pass
-_generalerrors = ("success",
+class ProxyError(Exception):
+ pass
+
+
+class GeneralProxyError(ProxyError):
+ pass
+
+
+class Socks5AuthError(ProxyError):
+ pass
+
+
+class Socks5Error(ProxyError):
+ pass
+
+
+class Socks4Error(ProxyError):
+ pass
+
+
+class HTTPError(ProxyError):
+ pass
+
+
+_generalerrors = (
+ "success",
"invalid data",
"not connected",
"not available",
"bad proxy type",
- "bad input")
+ "bad input",
+)
-_socks5errors = ("succeeded",
+_socks5errors = (
+ "succeeded",
"general SOCKS server failure",
"connection not allowed by ruleset",
"Network unreachable",
@@ -79,21 +95,30 @@ _socks5errors = ("succeeded",
"TTL expired",
"Command not supported",
"Address type not supported",
- "Unknown error")
+ "Unknown error",
+)
-_socks5autherrors = ("succeeded",
+_socks5autherrors = (
+ "succeeded",
"authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password",
- "unknown error")
+ "unknown error",
+)
-_socks4errors = ("request granted",
+_socks4errors = (
+ "request granted",
"request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
- "request rejected because the client program and identd report different user-ids",
- "unknown error")
+ "request rejected because the client program and identd report different "
+ "user-ids",
+ "unknown error",
+)
-def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
+
+def setdefaultproxy(
+ proxytype=None, addr=None, port=None, rdns=True, username=None, password=None
+):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
@@ -101,11 +126,14 @@ def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=No
global _defaultproxy
_defaultproxy = (proxytype, addr, port, rdns, username, password)
+
def wrapmodule(module):
"""wrapmodule(module)
+
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
- This will only work on modules that import socket directly into the namespace;
+ This will only work on modules that import socket directly into the
+ namespace;
most of the Python Standard Library falls into this category.
"""
if _defaultproxy != None:
@@ -113,6 +141,7 @@ def wrapmodule(module):
else:
raise GeneralProxyError((4, "no proxy specified"))
+
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
@@ -120,7 +149,9 @@ class socksocket(socket.socket):
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
- def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
+ def __init__(
+ self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None
+ ):
_orgsocket.__init__(self, family, type, proto, _sock)
if _defaultproxy != None:
self.__proxy = _defaultproxy
@@ -137,8 +168,9 @@ class socksocket(socket.socket):
"""
data = self.recv(count)
while len(data) < count:
- d = self.recv(count-len(data))
- if not d: raise GeneralProxyError((0, "connection closed unexpectedly"))
+ d = self.recv(count - len(data))
+ if not d:
+ raise GeneralProxyError((0, "connection closed unexpectedly"))
data = data + d
return data
@@ -167,18 +199,34 @@ class socksocket(socket.socket):
hdrs.remove(endpt)
host = host.split(" ")[1]
endpt = endpt.split(" ")
- if (self.__proxy[4] != None and self.__proxy[5] != None):
+ if self.__proxy[4] != None and self.__proxy[5] != None:
hdrs.insert(0, self.__getauthheader())
hdrs.insert(0, "Host: %s" % host)
hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
return "\r\n".join(hdrs)
def __getauthheader(self):
- auth = self.__proxy[4] + ":" + self.__proxy[5]
- return "Proxy-Authorization: Basic " + base64.b64encode(auth)
-
- def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None, headers=None):
+ username = self.__proxy[4]
+ password = self.__proxy[5]
+ if isinstance(username, str):
+ username = username.encode()
+ if isinstance(password, str):
+ password = password.encode()
+ auth = username + b":" + password
+ return "Proxy-Authorization: Basic " + base64.b64encode(auth).decode()
+
+ def setproxy(
+ self,
+ proxytype=None,
+ addr=None,
+ port=None,
+ rdns=True,
+ username=None,
+ password=None,
+ headers=None,
+ ):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
+
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
@@ -193,7 +241,8 @@ class socksocket(socket.socket):
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
- headers - Additional or modified headers for the proxy connect request.
+ headers - Additional or modified headers for the proxy connect
+ request.
"""
self.__proxy = (proxytype, addr, port, rdns, username, password, headers)
@@ -202,15 +251,15 @@ class socksocket(socket.socket):
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
- if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
+ if (self.__proxy[4] != None) and (self.__proxy[5] != None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
- self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02))
+ self.sendall(struct.pack("BBBB", 0x05, 0x02, 0x00, 0x02))
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
- self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00))
+ self.sendall(struct.pack("BBB", 0x05, 0x01, 0x00))
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
@@ -224,7 +273,13 @@ class socksocket(socket.socket):
elif chosenauth[1:2] == chr(0x02).encode():
# Okay, we need to perform a basic username/password
# authentication.
- self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
+ self.sendall(
+ chr(0x01).encode()
+ + chr(len(self.__proxy[4]))
+ + self.__proxy[4]
+ + chr(len(self.__proxy[5]))
+ + self.__proxy[5]
+ )
authstat = self.__recvall(2)
if authstat[0:1] != chr(0x01).encode():
# Bad response
@@ -243,7 +298,7 @@ class socksocket(socket.socket):
else:
raise GeneralProxyError((1, _generalerrors[1]))
# Now we can request the actual connection
- req = struct.pack('BBB', 0x05, 0x01, 0x00)
+ req = struct.pack("BBB", 0x05, 0x01, 0x00)
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
@@ -254,7 +309,12 @@ class socksocket(socket.socket):
if self.__proxy[3]:
# Resolve remotely
ipaddr = None
- req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr.encode()
+ req = (
+ req
+ + chr(0x03).encode()
+ + chr(len(destaddr)).encode()
+ + destaddr.encode()
+ )
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
@@ -269,7 +329,7 @@ class socksocket(socket.socket):
elif resp[1:2] != chr(0x00).encode():
# Connection failed
self.close()
- if ord(resp[1:2])<=8:
+ if ord(resp[1:2]) <= 8:
raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
else:
raise Socks5Error((9, _socks5errors[9]))
@@ -281,7 +341,7 @@ class socksocket(socket.socket):
boundaddr = self.__recvall(ord(resp[4:5]))
else:
self.close()
- raise GeneralProxyError((1,_generalerrors[1]))
+ raise GeneralProxyError((1, _generalerrors[1]))
boundport = struct.unpack(">H", self.__recvall(2))[0]
self.__proxysockname = (boundaddr, boundport)
if ipaddr != None:
@@ -308,7 +368,7 @@ class socksocket(socket.socket):
"""
return self.__proxypeername
- def __negotiatesocks4(self,destaddr,destport):
+ def __negotiatesocks4(self, destaddr, destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
@@ -340,7 +400,7 @@ class socksocket(socket.socket):
if resp[0:1] != chr(0x00).encode():
# Bad data
self.close()
- raise GeneralProxyError((1,_generalerrors[1]))
+ raise GeneralProxyError((1, _generalerrors[1]))
if resp[1:2] != chr(0x5A).encode():
# Server returned an error
self.close()
@@ -350,7 +410,10 @@ class socksocket(socket.socket):
else:
raise Socks4Error((94, _socks4errors[4]))
# Get the bound address/port
- self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
+ self.__proxysockname = (
+ socket.inet_ntoa(resp[4:]),
+ struct.unpack(">H", resp[2:4])[0],
+ )
if rmtrslv != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
@@ -365,18 +428,18 @@ class socksocket(socket.socket):
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
- headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
+ headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
wrote_host_header = False
wrote_auth_header = False
if self.__proxy[6] != None:
for key, val in self.__proxy[6].iteritems():
headers += [key, ": ", val, "\r\n"]
- wrote_host_header = (key.lower() == "host")
- wrote_auth_header = (key.lower() == "proxy-authorization")
+ wrote_host_header = key.lower() == "host"
+ wrote_auth_header = key.lower() == "proxy-authorization"
if not wrote_host_header:
headers += ["Host: ", destaddr, "\r\n"]
if not wrote_auth_header:
- if (self.__proxy[4] != None and self.__proxy[5] != None):
+ if self.__proxy[4] != None and self.__proxy[5] != None:
headers += [self.__getauthheader(), "\r\n"]
headers.append("\r\n")
self.sendall("".join(headers).encode())
@@ -409,7 +472,12 @@ class socksocket(socket.socket):
To select the proxy server use setproxy().
"""
# Do a minimal input check first
- if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], (str, bytes))) or (type(destpair[1]) != int):
+ if (
+ (not type(destpair) in (list, tuple))
+ or (len(destpair) < 2)
+ or (not isinstance(destpair[0], (str, bytes)))
+ or (type(destpair[1]) != int)
+ ):
raise GeneralProxyError((5, _generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] != None:
@@ -423,23 +491,23 @@ class socksocket(socket.socket):
portnum = self.__proxy[2]
else:
portnum = 1080
- _orgsocket.connect(self,(self.__proxy[1], portnum))
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks4(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
- _orgsocket.connect(self,(self.__proxy[1], portnum))
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatehttp(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
- _orgsocket.connect(self,(self.__proxy[1],portnum))
+ _orgsocket.connect(self, (self.__proxy[1], portnum))
if destpair[1] == 443:
- self.__negotiatehttp(destpair[0],destpair[1])
+ self.__negotiatehttp(destpair[0], destpair[1])
else:
self.__httptunnel = False
elif self.__proxy[0] == None:
diff --git a/python3/httplib2test.py b/python3/httplib2test.py
index a4afae9..c1fd484 100755
--- a/python3/httplib2test.py
+++ b/python3/httplib2test.py
@@ -1,1640 +1,1929 @@
-#!/usr/bin/env python3
-"""
-httplib2test
-
-A set of unit tests for httplib2.py.
-
-Requires Python 3.0 or later
-"""
-
-__author__ = "Joe Gregorio (joe@bitworking.org)"
-__copyright__ = "Copyright 2006, Joe Gregorio"
-__contributors__ = ["Mark Pilgrim"]
-__license__ = "MIT"
-__history__ = """ """
-__version__ = "0.2 ($Rev: 118 $)"
-
-import base64
-import http.client
-import httplib2
-import io
-import os
-import pickle
-import socket
-import ssl
-import sys
-import time
-import unittest
-import urllib.parse
-
-# The test resources base uri
-base = 'http://bitworking.org/projects/httplib2/test/'
-#base = 'http://localhost/projects/httplib2/test/'
-cacheDirName = ".cache"
-
-
-class CredentialsTest(unittest.TestCase):
- def test(self):
- c = httplib2.Credentials()
- c.add("joe", "password")
- self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0])
- self.assertEqual(("joe", "password"), list(c.iter(""))[0])
- c.add("fred", "password2", "wellformedweb.org")
- self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0])
- self.assertEqual(1, len(list(c.iter("bitworking.org"))))
- self.assertEqual(2, len(list(c.iter("wellformedweb.org"))))
- self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org")))
- c.clear()
- self.assertEqual(0, len(list(c.iter("bitworking.org"))))
- c.add("fred", "password2", "wellformedweb.org")
- self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org")))
- self.assertEqual(0, len(list(c.iter("bitworking.org"))))
- self.assertEqual(0, len(list(c.iter(""))))
-
-
-class ParserTest(unittest.TestCase):
- def testFromStd66(self):
- self.assertEqual( ('http', 'example.com', '', None, None ), httplib2.parse_uri("http://example.com"))
- self.assertEqual( ('https', 'example.com', '', None, None ), httplib2.parse_uri("https://example.com"))
- self.assertEqual( ('https', 'example.com:8080', '', None, None ), httplib2.parse_uri("https://example.com:8080"))
- self.assertEqual( ('http', 'example.com', '/', None, None ), httplib2.parse_uri("http://example.com/"))
- self.assertEqual( ('http', 'example.com', '/path', None, None ), httplib2.parse_uri("http://example.com/path"))
- self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', None ), httplib2.parse_uri("http://example.com/path?a=1&b=2"))
- self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"))
- self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"))
-
-
-class UrlNormTest(unittest.TestCase):
- def test(self):
- self.assertEqual( "http://example.org/", httplib2.urlnorm("http://example.org")[-1])
- self.assertEqual( "http://example.org/", httplib2.urlnorm("http://EXAMple.org")[-1])
- self.assertEqual( "http://example.org/?=b", httplib2.urlnorm("http://EXAMple.org?=b")[-1])
- self.assertEqual( "http://example.org/mypath?a=b", httplib2.urlnorm("http://EXAMple.org/mypath?a=b")[-1])
- self.assertEqual( "http://localhost:80/", httplib2.urlnorm("http://localhost:80")[-1])
- self.assertEqual( httplib2.urlnorm("http://localhost:80/"), httplib2.urlnorm("HTTP://LOCALHOST:80"))
- try:
- httplib2.urlnorm("/")
- self.fail("Non-absolute URIs should raise an exception")
- except httplib2.RelativeURIError:
- pass
-
-class UrlSafenameTest(unittest.TestCase):
- def test(self):
- # Test that different URIs end up generating different safe names
- self.assertEqual( "example.org,fred,a=b,58489f63a7a83c3b7794a6a398ee8b1f", httplib2.safename("http://example.org/fred/?a=b"))
- self.assertEqual( "example.org,fred,a=b,8c5946d56fec453071f43329ff0be46b", httplib2.safename("http://example.org/fred?/a=b"))
- self.assertEqual( "www.example.org,fred,a=b,499c44b8d844a011b67ea2c015116968", httplib2.safename("http://www.example.org/fred?/a=b"))
- self.assertEqual( httplib2.safename(httplib2.urlnorm("http://www")[-1]), httplib2.safename(httplib2.urlnorm("http://WWW")[-1]))
- self.assertEqual( "www.example.org,fred,a=b,692e843a333484ce0095b070497ab45d", httplib2.safename("https://www.example.org/fred?/a=b"))
- self.assertNotEqual( httplib2.safename("http://www"), httplib2.safename("https://www"))
- # Test the max length limits
- uri = "http://" + ("w" * 200) + ".org"
- uri2 = "http://" + ("w" * 201) + ".org"
- self.assertNotEqual( httplib2.safename(uri2), httplib2.safename(uri))
- # Max length should be 200 + 1 (",") + 32
- self.assertEqual(233, len(httplib2.safename(uri2)))
- self.assertEqual(233, len(httplib2.safename(uri)))
- # Unicode
- if sys.version_info >= (2,3):
- self.assertEqual( "xn--http,-4y1d.org,fred,a=b,579924c35db315e5a32e3d9963388193", httplib2.safename("http://\u2304.org/fred/?a=b"))
-
-class _MyResponse(io.BytesIO):
- def __init__(self, body, **kwargs):
- io.BytesIO.__init__(self, body)
- self.headers = kwargs
-
- def items(self):
- return self.headers.items()
-
- def iteritems(self):
- return iter(self.headers.items())
-
-
-class _MyHTTPConnection(object):
- "This class is just a mock of httplib.HTTPConnection used for testing"
-
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=None, proxy_info=None):
- self.host = host
- self.port = port
- self.timeout = timeout
- self.log = ""
- self.sock = None
-
- def set_debuglevel(self, level):
- pass
-
- def connect(self):
- "Connect to a host on a given port."
- pass
-
- def close(self):
- pass
-
- def request(self, method, request_uri, body, headers):
- pass
-
- def getresponse(self):
- return _MyResponse(b"the body", status="200")
-
-
-class _MyHTTPBadStatusConnection(object):
- "Mock of httplib.HTTPConnection that raises BadStatusLine."
-
- num_calls = 0
-
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=None, proxy_info=None):
- self.host = host
- self.port = port
- self.timeout = timeout
- self.log = ""
- self.sock = None
- _MyHTTPBadStatusConnection.num_calls = 0
-
- def set_debuglevel(self, level):
- pass
-
- def connect(self):
- pass
-
- def close(self):
- pass
-
- def request(self, method, request_uri, body, headers):
- pass
-
- def getresponse(self):
- _MyHTTPBadStatusConnection.num_calls += 1
- raise http.client.BadStatusLine("")
-
-
-class HttpTest(unittest.TestCase):
- def setUp(self):
- if os.path.exists(cacheDirName):
- [os.remove(os.path.join(cacheDirName, file)) for file in os.listdir(cacheDirName)]
- self.http = httplib2.Http(cacheDirName)
- self.http.clear_credentials()
-
- def testIPv6NoSSL(self):
- try:
- self.http.request("http://[::1]/")
- except socket.gaierror:
- self.fail("should get the address family right for IPv6")
- except socket.error:
- # Even if IPv6 isn't installed on a machine it should just raise socket.error
- pass
-
- def testIPv6SSL(self):
- try:
- self.http.request("https://[::1]/")
- except socket.gaierror:
- self.fail("should get the address family right for IPv6")
- except socket.error:
- # Even if IPv6 isn't installed on a machine it should just raise socket.error
- pass
-
- def testConnectionType(self):
- self.http.force_exception_to_status_code = False
- response, content = self.http.request("http://bitworking.org", connection_type=_MyHTTPConnection)
- self.assertEqual(response['content-location'], "http://bitworking.org")
- self.assertEqual(content, b"the body")
-
-
- def testBadStatusLineRetry(self):
- old_retries = httplib2.RETRIES
- httplib2.RETRIES = 1
- self.http.force_exception_to_status_code = False
- try:
- response, content = self.http.request("http://bitworking.org",
- connection_type=_MyHTTPBadStatusConnection)
- except http.client.BadStatusLine:
- self.assertEqual(2, _MyHTTPBadStatusConnection.num_calls)
- httplib2.RETRIES = old_retries
-
-
- def testGetUnknownServer(self):
- self.http.force_exception_to_status_code = False
- try:
- self.http.request("http://fred.bitworking.org/")
- self.fail("An httplib2.ServerNotFoundError Exception must be thrown on an unresolvable server.")
- except httplib2.ServerNotFoundError:
- pass
-
- # Now test with exceptions turned off
- self.http.force_exception_to_status_code = True
-
- (response, content) = self.http.request("http://fred.bitworking.org/")
- self.assertEqual(response['content-type'], 'text/plain')
- self.assertTrue(content.startswith(b"Unable to find"))
- self.assertEqual(response.status, 400)
-
- def testGetConnectionRefused(self):
- self.http.force_exception_to_status_code = False
- try:
- self.http.request("http://localhost:7777/")
- self.fail("An socket.error exception must be thrown on Connection Refused.")
- except socket.error:
- pass
-
- # Now test with exceptions turned off
- self.http.force_exception_to_status_code = True
-
- (response, content) = self.http.request("http://localhost:7777/")
- self.assertEqual(response['content-type'], 'text/plain')
- self.assertTrue(b"Connection refused" in content)
- self.assertEqual(response.status, 400)
-
- def testGetIRI(self):
- if sys.version_info >= (2,3):
- uri = urllib.parse.urljoin(base, "reflector/reflector.cgi?d=\N{CYRILLIC CAPITAL LETTER DJE}")
- (response, content) = self.http.request(uri, "GET")
- d = self.reflector(content)
- self.assertTrue('QUERY_STRING' in d)
- self.assertTrue(d['QUERY_STRING'].find('%D0%82') > 0)
-
- def testGetIsDefaultMethod(self):
- # Test that GET is the default method
- uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi")
- (response, content) = self.http.request(uri)
- self.assertEqual(response['x-method'], "GET")
-
- def testDifferentMethods(self):
- # Test that all methods can be used
- uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi")
- for method in ["GET", "PUT", "DELETE", "POST"]:
- (response, content) = self.http.request(uri, method, body=b" ")
- self.assertEqual(response['x-method'], method)
-
- def testHeadRead(self):
- # Test that we don't try to read the response of a HEAD request
- # since httplib blocks response.read() for HEAD requests.
- # Oddly enough this doesn't appear as a problem when doing HEAD requests
- # against Apache servers.
- uri = "http://www.google.com/"
- (response, content) = self.http.request(uri, "HEAD")
- self.assertEqual(response.status, 200)
- self.assertEqual(content, b"")
-
- def testGetNoCache(self):
- # Test that can do a GET w/o the cache turned on.
- http = httplib2.Http()
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
- (response, content) = http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.previous, None)
-
- def testGetOnlyIfCachedCacheHit(self):
- # Test that can do a GET with cache and 'only-if-cached'
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET")
- (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'})
- self.assertEqual(response.fromcache, True)
- self.assertEqual(response.status, 200)
-
- def testGetOnlyIfCachedCacheMiss(self):
- # Test that can do a GET with no cache with 'only-if-cached'
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'})
- self.assertEqual(response.fromcache, False)
- self.assertEqual(response.status, 504)
-
- def testGetOnlyIfCachedNoCacheAtAll(self):
- # Test that can do a GET with no cache with 'only-if-cached'
- # Of course, there might be an intermediary beyond us
- # that responds to the 'only-if-cached', so this
- # test can't really be guaranteed to pass.
- http = httplib2.Http()
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
- (response, content) = http.request(uri, "GET", headers={'cache-control': 'only-if-cached'})
- self.assertEqual(response.fromcache, False)
- self.assertEqual(response.status, 504)
-
- def testUserAgent(self):
- # Test that we provide a default user-agent
- uri = urllib.parse.urljoin(base, "user-agent/test.cgi")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertTrue(content.startswith(b"Python-httplib2/"))
-
- def testUserAgentNonDefault(self):
- # Test that the default user-agent can be over-ridden
-
- uri = urllib.parse.urljoin(base, "user-agent/test.cgi")
- (response, content) = self.http.request(uri, "GET", headers={'User-Agent': 'fred/1.0'})
- self.assertEqual(response.status, 200)
- self.assertTrue(content.startswith(b"fred/1.0"))
-
- def testGet300WithLocation(self):
- # Test the we automatically follow 300 redirects if a Location: header is provided
- uri = urllib.parse.urljoin(base, "300/with-location-header.asis")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 300)
- self.assertEqual(response.previous.fromcache, False)
-
- # Confirm that the intermediate 300 is not cached
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 300)
- self.assertEqual(response.previous.fromcache, False)
-
- def testGet300WithLocationNoRedirect(self):
- # Test the we automatically follow 300 redirects if a Location: header is provided
- self.http.follow_redirects = False
- uri = urllib.parse.urljoin(base, "300/with-location-header.asis")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 300)
-
- def testGet300WithoutLocation(self):
- # Not giving a Location: header in a 300 response is acceptable
- # In which case we just return the 300 response
- uri = urllib.parse.urljoin(base, "300/without-location-header.asis")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 300)
- self.assertTrue(response['content-type'].startswith("text/html"))
- self.assertEqual(response.previous, None)
-
- def testGet301(self):
- # Test that we automatically follow 301 redirects
- # and that we cache the 301 response
- uri = urllib.parse.urljoin(base, "301/onestep.asis")
- destination = urllib.parse.urljoin(base, "302/final-destination.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertTrue('content-location' in response)
- self.assertEqual(response['content-location'], destination)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 301)
- self.assertEqual(response.previous.fromcache, False)
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response['content-location'], destination)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 301)
- self.assertEqual(response.previous.fromcache, True)
-
- def testHead301(self):
- # Test that we automatically follow 301 redirects
- uri = urllib.parse.urljoin(base, "301/onestep.asis")
- (response, content) = self.http.request(uri, "HEAD")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.previous.status, 301)
- self.assertEqual(response.previous.fromcache, False)
-
- def testGet301NoRedirect(self):
- # Test that we automatically follow 301 redirects
- # and that we cache the 301 response
- self.http.follow_redirects = False
- uri = urllib.parse.urljoin(base, "301/onestep.asis")
- destination = urllib.parse.urljoin(base, "302/final-destination.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 301)
-
-
- def testGet302(self):
- # Test that we automatically follow 302 redirects
- # and that we DO NOT cache the 302 response
- uri = urllib.parse.urljoin(base, "302/onestep.asis")
- destination = urllib.parse.urljoin(base, "302/final-destination.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response['content-location'], destination)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 302)
- self.assertEqual(response.previous.fromcache, False)
-
- uri = urllib.parse.urljoin(base, "302/onestep.asis")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
- self.assertEqual(response['content-location'], destination)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 302)
- self.assertEqual(response.previous.fromcache, False)
- self.assertEqual(response.previous['content-location'], uri)
-
- uri = urllib.parse.urljoin(base, "302/twostep.asis")
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 302)
- self.assertEqual(response.previous.fromcache, False)
-
- def testGet302RedirectionLimit(self):
- # Test that we can set a lower redirection limit
- # and that we raise an exception when we exceed
- # that limit.
- self.http.force_exception_to_status_code = False
-
- uri = urllib.parse.urljoin(base, "302/twostep.asis")
- try:
- (response, content) = self.http.request(uri, "GET", redirections = 1)
- self.fail("This should not happen")
- except httplib2.RedirectLimit:
- pass
- except Exception as e:
- self.fail("Threw wrong kind of exception ")
-
- # Re-run the test with out the exceptions
- self.http.force_exception_to_status_code = True
-
- (response, content) = self.http.request(uri, "GET", redirections = 1)
- self.assertEqual(response.status, 500)
- self.assertTrue(response.reason.startswith("Redirected more"))
- self.assertEqual("302", response['status'])
- self.assertTrue(content.startswith(b"<html>"))
- self.assertTrue(response.previous != None)
-
- def testGet302NoLocation(self):
- # Test that we throw an exception when we get
- # a 302 with no Location: header.
- self.http.force_exception_to_status_code = False
- uri = urllib.parse.urljoin(base, "302/no-location.asis")
- try:
- (response, content) = self.http.request(uri, "GET")
- self.fail("Should never reach here")
- except httplib2.RedirectMissingLocation:
- pass
- except Exception as e:
- self.fail("Threw wrong kind of exception ")
-
- # Re-run the test with out the exceptions
- self.http.force_exception_to_status_code = True
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 500)
- self.assertTrue(response.reason.startswith("Redirected but"))
- self.assertEqual("302", response['status'])
- self.assertTrue(content.startswith(b"This is content"))
-
- def testGet301ViaHttps(self):
- # Google always redirects to http://google.com
- (response, content) = self.http.request("https://code.google.com/apis/", "GET")
- self.assertEqual(200, response.status)
- self.assertEqual(301, response.previous.status)
-
- def testGetViaHttps(self):
- # Test that we can handle HTTPS
- (response, content) = self.http.request("https://google.com/adsense/", "GET")
- self.assertEqual(200, response.status)
-
- def testGetViaHttpsSpecViolationOnLocation(self):
- # Test that we follow redirects through HTTPS
- # even if they violate the spec by including
- # a relative Location: header instead of an
- # absolute one.
- (response, content) = self.http.request("https://google.com/adsense", "GET")
- self.assertEqual(200, response.status)
- self.assertNotEqual(None, response.previous)
-
-
- def testGetViaHttpsKeyCert(self):
- # At this point I can only test
- # that the key and cert files are passed in
- # correctly to httplib. It would be nice to have
- # a real https endpoint to test against.
- http = httplib2.Http(timeout=2)
-
- http.add_certificate("akeyfile", "acertfile", "bitworking.org")
- try:
- (response, content) = http.request("https://bitworking.org", "GET")
- except AttributeError:
- self.assertEqual(http.connections["https:bitworking.org"].key_file, "akeyfile")
- self.assertEqual(http.connections["https:bitworking.org"].cert_file, "acertfile")
- except IOError:
- # Skip on 3.2
- pass
-
- try:
- (response, content) = http.request("https://notthere.bitworking.org", "GET")
- except httplib2.ServerNotFoundError:
- self.assertEqual(http.connections["https:notthere.bitworking.org"].key_file, None)
- self.assertEqual(http.connections["https:notthere.bitworking.org"].cert_file, None)
- except IOError:
- # Skip on 3.2
- pass
-
- def testSslCertValidation(self):
- # Test that we get an ssl.SSLError when specifying a non-existent CA
- # certs file.
- http = httplib2.Http(ca_certs='/nosuchfile')
- self.assertRaises(IOError,
- http.request, "https://www.google.com/", "GET")
-
- # Test that we get a SSLHandshakeError if we try to access
- # https://www.google.com, using a CA cert file that doesn't contain
- # the CA Google uses (i.e., simulating a cert that's not signed by a
- # trusted CA).
- other_ca_certs = os.path.join(
- os.path.dirname(os.path.abspath(httplib2.__file__ )),
- "test", "other_cacerts.txt")
- http = httplib2.Http(ca_certs=other_ca_certs)
- self.assertRaises(ssl.SSLError,
- http.request,"https://www.google.com/", "GET")
-
- def testSniHostnameValidation(self):
- self.http.request("https://google.com/", method="GET")
-
- def testGet303(self):
- # Do a follow-up GET on a Location: header
- # returned from a POST that gave a 303.
- uri = urllib.parse.urljoin(base, "303/303.cgi")
- (response, content) = self.http.request(uri, "POST", " ")
- self.assertEqual(response.status, 200)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 303)
-
- def testGet303NoRedirect(self):
- # Do a follow-up GET on a Location: header
- # returned from a POST that gave a 303.
- self.http.follow_redirects = False
- uri = urllib.parse.urljoin(base, "303/303.cgi")
- (response, content) = self.http.request(uri, "POST", " ")
- self.assertEqual(response.status, 303)
-
- def test303ForDifferentMethods(self):
- # Test that all methods can be used
- uri = urllib.parse.urljoin(base, "303/redirect-to-reflector.cgi")
- for (method, method_on_303) in [("PUT", "GET"), ("DELETE", "GET"), ("POST", "GET"), ("GET", "GET"), ("HEAD", "GET")]:
- (response, content) = self.http.request(uri, method, body=b" ")
- self.assertEqual(response['x-method'], method_on_303)
-
- def testGet304(self):
- # Test that we use ETags properly to validate our cache
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity'})
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'cache-control': 'must-revalidate'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
-
- cache_file_name = os.path.join(cacheDirName, httplib2.safename(httplib2.urlnorm(uri)[-1]))
- f = open(cache_file_name, "r")
- status_line = f.readline()
- f.close()
-
- self.assertTrue(status_line.startswith("status:"))
-
- (response, content) = self.http.request(uri, "HEAD", headers = {'accept-encoding': 'identity'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'range': 'bytes=0-0'})
- self.assertEqual(response.status, 206)
- self.assertEqual(response.fromcache, False)
-
- def testGetIgnoreEtag(self):
- # Test that we can forcibly ignore ETags
- uri = urllib.parse.urljoin(base, "reflector/reflector.cgi")
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'cache-control': 'max-age=0'})
- d = self.reflector(content)
- self.assertTrue('HTTP_IF_NONE_MATCH' in d)
-
- self.http.ignore_etag = True
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'cache-control': 'max-age=0'})
- d = self.reflector(content)
- self.assertEqual(response.fromcache, False)
- self.assertFalse('HTTP_IF_NONE_MATCH' in d)
-
- def testOverrideEtag(self):
- # Test that we can forcibly ignore ETags
- uri = urllib.parse.urljoin(base, "reflector/reflector.cgi")
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'cache-control': 'max-age=0'})
- d = self.reflector(content)
- self.assertTrue('HTTP_IF_NONE_MATCH' in d)
- self.assertNotEqual(d['HTTP_IF_NONE_MATCH'], "fred")
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'cache-control': 'max-age=0', 'if-none-match': 'fred'})
- d = self.reflector(content)
- self.assertTrue('HTTP_IF_NONE_MATCH' in d)
- self.assertEqual(d['HTTP_IF_NONE_MATCH'], "fred")
-
-#MAP-commented this out because it consistently fails
-# def testGet304EndToEnd(self):
-# # Test that end to end headers get overwritten in the cache
-# uri = urllib.parse.urljoin(base, "304/end2end.cgi")
-# (response, content) = self.http.request(uri, "GET")
-# self.assertNotEqual(response['etag'], "")
-# old_date = response['date']
-# time.sleep(2)
-#
-# (response, content) = self.http.request(uri, "GET", headers = {'Cache-Control': 'max-age=0'})
-# # The response should be from the cache, but the Date: header should be updated.
-# new_date = response['date']
-# self.assertNotEqual(new_date, old_date)
-# self.assertEqual(response.status, 200)
-# self.assertEqual(response.fromcache, True)
-
- def testGet304LastModified(self):
- # Test that we can still handle a 304
- # by only using the last-modified cache validator.
- uri = urllib.parse.urljoin(base, "304/last-modified-only/last-modified-only.txt")
- (response, content) = self.http.request(uri, "GET")
-
- self.assertNotEqual(response['last-modified'], "")
- (response, content) = self.http.request(uri, "GET")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
-
- def testGet307(self):
- # Test that we do follow 307 redirects but
- # do not cache the 307
- uri = urllib.parse.urljoin(base, "307/onestep.asis")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 307)
- self.assertEqual(response.previous.fromcache, False)
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
- self.assertEqual(content, b"This is the final destination.\n")
- self.assertEqual(response.previous.status, 307)
- self.assertEqual(response.previous.fromcache, False)
-
- def testGet410(self):
- # Test that we pass 410's through
- uri = urllib.parse.urljoin(base, "410/410.asis")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 410)
-
- def testVaryHeaderSimple(self):
- """
- RFC 2616 13.6
- When the cache receives a subsequent request whose Request-URI
- specifies one or more cache entries including a Vary header field,
- the cache MUST NOT use such a cache entry to construct a response
- to the new request unless all of the selecting request-headers
- present in the new request match the corresponding stored
- request-headers in the original request.
- """
- # test that the vary header is sent
- uri = urllib.parse.urljoin(base, "vary/accept.asis")
- (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
- self.assertEqual(response.status, 200)
- self.assertTrue('vary' in response)
-
- # get the resource again, from the cache since accept header in this
- # request is the same as the request
- (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True, msg="Should be from cache")
-
- # get the resource again, not from cache since Accept headers does not match
- (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False, msg="Should not be from cache")
-
- # get the resource again, without any Accept header, so again no match
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False, msg="Should not be from cache")
-
- def testNoVary(self):
- pass
- # when there is no vary, a different Accept header (e.g.) should not
- # impact if the cache is used
- # test that the vary header is not sent
- # uri = urllib.parse.urljoin(base, "vary/no-vary.asis")
- # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
- # self.assertEqual(response.status, 200)
- # self.assertFalse('vary' in response)
- #
- # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
- # self.assertEqual(response.status, 200)
- # self.assertEqual(response.fromcache, True, msg="Should be from cache")
- #
- # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'})
- # self.assertEqual(response.status, 200)
- # self.assertEqual(response.fromcache, True, msg="Should be from cache")
-
- def testVaryHeaderDouble(self):
- uri = urllib.parse.urljoin(base, "vary/accept-double.asis")
- (response, content) = self.http.request(uri, "GET", headers={
- 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'})
- self.assertEqual(response.status, 200)
- self.assertTrue('vary' in response)
-
- # we are from cache
- (response, content) = self.http.request(uri, "GET", headers={
- 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'})
- self.assertEqual(response.fromcache, True, msg="Should be from cache")
-
- (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
-
- # get the resource again, not from cache, varied headers don't match exact
- (response, content) = self.http.request(uri, "GET", headers={'Accept-Language': 'da'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False, msg="Should not be from cache")
-
- def testVaryUnusedHeader(self):
- # A header's value is not considered to vary if it's not used at all.
- uri = urllib.parse.urljoin(base, "vary/unused-header.asis")
- (response, content) = self.http.request(uri, "GET", headers={
- 'Accept': 'text/plain'})
- self.assertEqual(response.status, 200)
- self.assertTrue('vary' in response)
-
- # we are from cache
- (response, content) = self.http.request(uri, "GET", headers={
- 'Accept': 'text/plain',})
- self.assertEqual(response.fromcache, True, msg="Should be from cache")
-
- def testHeadGZip(self):
- # Test that we don't try to decompress a HEAD response
- uri = urllib.parse.urljoin(base, "gzip/final-destination.txt")
- (response, content) = self.http.request(uri, "HEAD")
- self.assertEqual(response.status, 200)
- self.assertNotEqual(int(response['content-length']), 0)
- self.assertEqual(content, b"")
-
- def testGetGZip(self):
- # Test that we support gzip compression
- uri = urllib.parse.urljoin(base, "gzip/final-destination.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertFalse('content-encoding' in response)
- self.assertTrue('-content-encoding' in response)
- self.assertEqual(int(response['content-length']), len(b"This is the final destination.\n"))
- self.assertEqual(content, b"This is the final destination.\n")
-
- def testPostAndGZipResponse(self):
- uri = urllib.parse.urljoin(base, "gzip/post.cgi")
- (response, content) = self.http.request(uri, "POST", body=" ")
- self.assertEqual(response.status, 200)
- self.assertFalse('content-encoding' in response)
- self.assertTrue('-content-encoding' in response)
-
- def testGetGZipFailure(self):
- # Test that we raise a good exception when the gzip fails
- self.http.force_exception_to_status_code = False
- uri = urllib.parse.urljoin(base, "gzip/failed-compression.asis")
- try:
- (response, content) = self.http.request(uri, "GET")
- self.fail("Should never reach here")
- except httplib2.FailedToDecompressContent:
- pass
- except Exception:
- self.fail("Threw wrong kind of exception")
-
- # Re-run the test with out the exceptions
- self.http.force_exception_to_status_code = True
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 500)
- self.assertTrue(response.reason.startswith("Content purported"))
-
- def testIndividualTimeout(self):
- uri = urllib.parse.urljoin(base, "timeout/timeout.cgi")
- http = httplib2.Http(timeout=1)
- http.force_exception_to_status_code = True
-
- (response, content) = http.request(uri)
- self.assertEqual(response.status, 408)
- self.assertTrue(response.reason.startswith("Request Timeout"))
- self.assertTrue(content.startswith(b"Request Timeout"))
-
-
- def testGetDeflate(self):
- # Test that we support deflate compression
- uri = urllib.parse.urljoin(base, "deflate/deflated.asis")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertFalse('content-encoding' in response)
- self.assertEqual(int(response['content-length']), len("This is the final destination."))
- self.assertEqual(content, b"This is the final destination.")
-
- def testGetDeflateFailure(self):
- # Test that we raise a good exception when the deflate fails
- self.http.force_exception_to_status_code = False
-
- uri = urllib.parse.urljoin(base, "deflate/failed-compression.asis")
- try:
- (response, content) = self.http.request(uri, "GET")
- self.fail("Should never reach here")
- except httplib2.FailedToDecompressContent:
- pass
- except Exception:
- self.fail("Threw wrong kind of exception")
-
- # Re-run the test with out the exceptions
- self.http.force_exception_to_status_code = True
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 500)
- self.assertTrue(response.reason.startswith("Content purported"))
-
- def testGetDuplicateHeaders(self):
- # Test that duplicate headers get concatenated via ','
- uri = urllib.parse.urljoin(base, "duplicate-headers/multilink.asis")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(content, b"This is content\n")
- self.assertEqual(response['link'].split(",")[0], '<http://bitworking.org>; rel="home"; title="BitWorking"')
-
- def testGetCacheControlNoCache(self):
- # Test Cache-Control: no-cache on requests
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'Cache-Control': 'no-cache'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
-
- def testGetCacheControlPragmaNoCache(self):
- # Test Pragma: no-cache on requests
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity'})
- self.assertNotEqual(response['etag'], "")
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
-
- (response, content) = self.http.request(uri, "GET", headers = {'accept-encoding': 'identity', 'Pragma': 'no-cache'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
-
- def testGetCacheControlNoStoreRequest(self):
- # A no-store request means that the response should not be stored.
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
-
- (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
-
- (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
-
- def testGetCacheControlNoStoreResponse(self):
- # A no-store response means that the response should not be stored.
- uri = urllib.parse.urljoin(base, "no-store/no-store.asis")
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
-
- def testGetCacheControlNoCacheNoStoreRequest(self):
- # Test that a no-store, no-cache clears the entry from the cache
- # even if it was cached previously.
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
-
- (response, content) = self.http.request(uri, "GET")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'})
- (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'})
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
-
- def testUpdateInvalidatesCache(self):
- # Test that calling PUT or DELETE on a
- # URI that is cache invalidates that cache.
- uri = urllib.parse.urljoin(base, "304/test_etag.txt")
-
- (response, content) = self.http.request(uri, "GET")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "DELETE")
- self.assertEqual(response.status, 405)
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.fromcache, False)
-
- def testUpdateUsesCachedETag(self):
- # Test that we natively support http://www.w3.org/1999/04/Editing/
- uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "PUT", body="foo")
- self.assertEqual(response.status, 200)
- (response, content) = self.http.request(uri, "PUT", body="foo")
- self.assertEqual(response.status, 412)
-
-
- def testUpdatePatchUsesCachedETag(self):
- # Test that we natively support http://www.w3.org/1999/04/Editing/
- uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "PATCH", body="foo")
- self.assertEqual(response.status, 200)
- (response, content) = self.http.request(uri, "PATCH", body="foo")
- self.assertEqual(response.status, 412)
-
- def testUpdateUsesCachedETagAndOCMethod(self):
- # Test that we natively support http://www.w3.org/1999/04/Editing/
- uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
- self.http.optimistic_concurrency_methods.append("DELETE")
- (response, content) = self.http.request(uri, "DELETE")
- self.assertEqual(response.status, 200)
-
-
- def testUpdateUsesCachedETagOverridden(self):
- # Test that we natively support http://www.w3.org/1999/04/Editing/
- uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
-
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, False)
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
- self.assertEqual(response.fromcache, True)
- (response, content) = self.http.request(uri, "PUT", body="foo", headers={'if-match': 'fred'})
- self.assertEqual(response.status, 412)
-
- def testBasicAuth(self):
- # Test Basic Authentication
- uri = urllib.parse.urljoin(base, "basic/file.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- uri = urllib.parse.urljoin(base, "basic/")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- self.http.add_credentials('joe', 'password')
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
- uri = urllib.parse.urljoin(base, "basic/file.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
- def testBasicAuthWithDomain(self):
- # Test Basic Authentication
- uri = urllib.parse.urljoin(base, "basic/file.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- uri = urllib.parse.urljoin(base, "basic/")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- self.http.add_credentials('joe', 'password', "example.org")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- uri = urllib.parse.urljoin(base, "basic/file.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- domain = urllib.parse.urlparse(base)[1]
- self.http.add_credentials('joe', 'password', domain)
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
- uri = urllib.parse.urljoin(base, "basic/file.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
-
-
-
-
-
- def testBasicAuthTwoDifferentCredentials(self):
- # Test Basic Authentication with multiple sets of credentials
- uri = urllib.parse.urljoin(base, "basic2/file.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- uri = urllib.parse.urljoin(base, "basic2/")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- self.http.add_credentials('fred', 'barney')
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
- uri = urllib.parse.urljoin(base, "basic2/file.txt")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
- def testBasicAuthNested(self):
- # Test Basic Authentication with resources
- # that are nested
- uri = urllib.parse.urljoin(base, "basic-nested/")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- uri = urllib.parse.urljoin(base, "basic-nested/subdir")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- # Now add in credentials one at a time and test.
- self.http.add_credentials('joe', 'password')
-
- uri = urllib.parse.urljoin(base, "basic-nested/")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
- uri = urllib.parse.urljoin(base, "basic-nested/subdir")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- self.http.add_credentials('fred', 'barney')
-
- uri = urllib.parse.urljoin(base, "basic-nested/")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
- uri = urllib.parse.urljoin(base, "basic-nested/subdir")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
- def testDigestAuth(self):
- # Test that we support Digest Authentication
- uri = urllib.parse.urljoin(base, "digest/")
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 401)
-
- self.http.add_credentials('joe', 'password')
- (response, content) = self.http.request(uri, "GET")
- self.assertEqual(response.status, 200)
-
- uri = urllib.parse.urljoin(base, "digest/file.txt")
- (response, content) = self.http.request(uri, "GET")
-
- def testDigestAuthNextNonceAndNC(self):
- # Test that if the server sets nextnonce that we reset
- # the nonce count back to 1
- uri = urllib.parse.urljoin(base, "digest/file.txt")
- self.http.add_credentials('joe', 'password')
- (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
- info = httplib2._parse_www_authenticate(response, 'authentication-info')
- self.assertEqual(response.status, 200)
- (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
- info2 = httplib2._parse_www_authenticate(response, 'authentication-info')
- self.assertEqual(response.status, 200)
-
- if 'nextnonce' in info:
- self.assertEqual(info2['nc'], 1)
-
- def testDigestAuthStale(self):
- # Test that we can handle a nonce becoming stale
- uri = urllib.parse.urljoin(base, "digest-expire/file.txt")
- self.http.add_credentials('joe', 'password')
- (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
- info = httplib2._parse_www_authenticate(response, 'authentication-info')
- self.assertEqual(response.status, 200)
-
- time.sleep(3)
- # Sleep long enough that the nonce becomes stale
-
- (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
- self.assertFalse(response.fromcache)
- self.assertTrue(response._stale_digest)
- info3 = httplib2._parse_www_authenticate(response, 'authentication-info')
- self.assertEqual(response.status, 200)
-
- def reflector(self, content):
- return dict( [tuple(x.split("=", 1)) for x in content.decode('utf-8').strip().split("\n")] )
-
- def testReflector(self):
- uri = urllib.parse.urljoin(base, "reflector/reflector.cgi")
- (response, content) = self.http.request(uri, "GET")
- d = self.reflector(content)
- self.assertTrue('HTTP_USER_AGENT' in d)
-
-
- def testConnectionClose(self):
- uri = "http://www.google.com/"
- (response, content) = self.http.request(uri, "GET")
- for c in self.http.connections.values():
- self.assertNotEqual(None, c.sock)
- (response, content) = self.http.request(uri, "GET", headers={"connection": "close"})
- for c in self.http.connections.values():
- self.assertEqual(None, c.sock)
-
- def testPickleHttp(self):
- pickled_http = pickle.dumps(self.http)
- new_http = pickle.loads(pickled_http)
-
- self.assertEqual(sorted(new_http.__dict__.keys()),
- sorted(self.http.__dict__.keys()))
- for key in new_http.__dict__:
- if key in ('certificates', 'credentials'):
- self.assertEqual(new_http.__dict__[key].credentials,
- self.http.__dict__[key].credentials)
- elif key == 'cache':
- self.assertEqual(new_http.__dict__[key].cache,
- self.http.__dict__[key].cache)
- else:
- self.assertEqual(new_http.__dict__[key],
- self.http.__dict__[key])
-
- def testPickleHttpWithConnection(self):
- self.http.request('http://bitworking.org',
- connection_type=_MyHTTPConnection)
- pickled_http = pickle.dumps(self.http)
- new_http = pickle.loads(pickled_http)
-
- self.assertEqual(list(self.http.connections.keys()),
- ['http:bitworking.org'])
- self.assertEqual(new_http.connections, {})
-
- def testPickleCustomRequestHttp(self):
- def dummy_request(*args, **kwargs):
- return new_request(*args, **kwargs)
- dummy_request.dummy_attr = 'dummy_value'
-
- self.http.request = dummy_request
- pickled_http = pickle.dumps(self.http)
- self.assertFalse(b"S'request'" in pickled_http)
-
-try:
- import memcache
- class HttpTestMemCached(HttpTest):
- def setUp(self):
- self.cache = memcache.Client(['127.0.0.1:11211'], debug=0)
- #self.cache = memcache.Client(['10.0.0.4:11211'], debug=1)
- self.http = httplib2.Http(self.cache)
- self.cache.flush_all()
- # Not exactly sure why the sleep is needed here, but
- # if not present then some unit tests that rely on caching
- # fail. Memcached seems to lose some sets immediately
- # after a flush_all if the set is to a value that
- # was previously cached. (Maybe the flush is handled async?)
- time.sleep(1)
- self.http.clear_credentials()
-except:
- pass
-
-
-
-# ------------------------------------------------------------------------
-
-class HttpPrivateTest(unittest.TestCase):
-
- def testParseCacheControl(self):
- # Test that we can parse the Cache-Control header
- self.assertEqual({}, httplib2._parse_cache_control({}))
- self.assertEqual({'no-cache': 1}, httplib2._parse_cache_control({'cache-control': ' no-cache'}))
- cc = httplib2._parse_cache_control({'cache-control': ' no-cache, max-age = 7200'})
- self.assertEqual(cc['no-cache'], 1)
- self.assertEqual(cc['max-age'], '7200')
- cc = httplib2._parse_cache_control({'cache-control': ' , '})
- self.assertEqual(cc[''], 1)
-
- try:
- cc = httplib2._parse_cache_control({'cache-control': 'Max-age=3600;post-check=1800,pre-check=3600'})
- self.assertTrue("max-age" in cc)
- except:
- self.fail("Should not throw exception")
-
-
-
-
- def testNormalizeHeaders(self):
- # Test that we normalize headers to lowercase
- h = httplib2._normalize_headers({'Cache-Control': 'no-cache', 'Other': 'Stuff'})
- self.assertTrue('cache-control' in h)
- self.assertTrue('other' in h)
- self.assertEqual('Stuff', h['other'])
-
- def testConvertByteStr(self):
- with self.assertRaises(TypeError):
- httplib2._convert_byte_str(4)
- self.assertEqual('Hello World', httplib2._convert_byte_str(b'Hello World'))
- self.assertEqual('Bye World', httplib2._convert_byte_str('Bye World'))
-
- def testExpirationModelTransparent(self):
- # Test that no-cache makes our request TRANSPARENT
- response_headers = {
- 'cache-control': 'max-age=7200'
- }
- request_headers = {
- 'cache-control': 'no-cache'
- }
- self.assertEqual("TRANSPARENT", httplib2._entry_disposition(response_headers, request_headers))
-
- def testMaxAgeNonNumeric(self):
- # Test that no-cache makes our request TRANSPARENT
- response_headers = {
- 'cache-control': 'max-age=fred, min-fresh=barney'
- }
- request_headers = {
- }
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
-
- def testExpirationModelNoCacheResponse(self):
- # The date and expires point to an entry that should be
- # FRESH, but the no-cache over-rides that.
- now = time.time()
- response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)),
- 'cache-control': 'no-cache'
- }
- request_headers = {
- }
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
- def testExpirationModelStaleRequestMustReval(self):
- # must-revalidate forces STALE
- self.assertEqual("STALE", httplib2._entry_disposition({}, {'cache-control': 'must-revalidate'}))
-
- def testExpirationModelStaleResponseMustReval(self):
- # must-revalidate forces STALE
- self.assertEqual("STALE", httplib2._entry_disposition({'cache-control': 'must-revalidate'}, {}))
-
- def testExpirationModelFresh(self):
- response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
- 'cache-control': 'max-age=2'
- }
- request_headers = {
- }
- self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
- time.sleep(3)
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
- def testExpirationMaxAge0(self):
- response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
- 'cache-control': 'max-age=0'
- }
- request_headers = {
- }
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
- def testExpirationModelDateAndExpires(self):
- now = time.time()
- response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)),
- }
- request_headers = {
- }
- self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
- time.sleep(3)
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
- def testExpiresZero(self):
- now = time.time()
- response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': "0",
- }
- request_headers = {
- }
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
- def testExpirationModelDateOnly(self):
- now = time.time()
- response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+3)),
- }
- request_headers = {
- }
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
- def testExpirationModelOnlyIfCached(self):
- response_headers = {
- }
- request_headers = {
- 'cache-control': 'only-if-cached',
- }
- self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
-
- def testExpirationModelMaxAgeBoth(self):
- now = time.time()
- response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'cache-control': 'max-age=2'
- }
- request_headers = {
- 'cache-control': 'max-age=0'
- }
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
- def testExpirationModelDateAndExpiresMinFresh1(self):
- now = time.time()
- response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)),
- }
- request_headers = {
- 'cache-control': 'min-fresh=2'
- }
- self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
-
- def testExpirationModelDateAndExpiresMinFresh2(self):
- now = time.time()
- response_headers = {
- 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
- 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)),
- }
- request_headers = {
- 'cache-control': 'min-fresh=2'
- }
- self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
-
- def testParseWWWAuthenticateEmpty(self):
- res = httplib2._parse_www_authenticate({})
- self.assertEqual(len(list(res.keys())), 0)
-
- def testParseWWWAuthenticate(self):
- # different uses of spaces around commas
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'})
- self.assertEqual(len(list(res.keys())), 1)
- self.assertEqual(len(list(res['test'].keys())), 5)
-
- # tokens with non-alphanum
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'})
- self.assertEqual(len(list(res.keys())), 1)
- self.assertEqual(len(list(res['t*!%#st'].keys())), 2)
-
- # quoted string with quoted pairs
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="a \\"test\\" realm"'})
- self.assertEqual(len(list(res.keys())), 1)
- self.assertEqual(res['test']['realm'], 'a "test" realm')
-
- def testParseWWWAuthenticateStrict(self):
- httplib2.USE_WWW_AUTH_STRICT_PARSING = 1;
- self.testParseWWWAuthenticate();
- httplib2.USE_WWW_AUTH_STRICT_PARSING = 0;
-
- def testParseWWWAuthenticateBasic(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me"'})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
-
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm="MD5"'})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
- self.assertEqual('MD5', basic['algorithm'])
-
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm=MD5'})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
- self.assertEqual('MD5', basic['algorithm'])
-
- def testParseWWWAuthenticateBasic2(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me",other="fred" '})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
- self.assertEqual('fred', basic['other'])
-
- def testParseWWWAuthenticateBasic3(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic REAlm="me" '})
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
-
-
- def testParseWWWAuthenticateDigest(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"'})
- digest = res['digest']
- self.assertEqual('testrealm@host.com', digest['realm'])
- self.assertEqual('auth,auth-int', digest['qop'])
-
-
- def testParseWWWAuthenticateMultiple(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41" Basic REAlm="me" '})
- digest = res['digest']
- self.assertEqual('testrealm@host.com', digest['realm'])
- self.assertEqual('auth,auth-int', digest['qop'])
- self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
- self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
-
- def testParseWWWAuthenticateMultiple2(self):
- # Handle an added comma between challenges, which might get thrown in if the challenges were
- # originally sent in separate www-authenticate headers.
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me" '})
- digest = res['digest']
- self.assertEqual('testrealm@host.com', digest['realm'])
- self.assertEqual('auth,auth-int', digest['qop'])
- self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
- self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
-
- def testParseWWWAuthenticateMultiple3(self):
- # Handle an added comma between challenges, which might get thrown in if the challenges were
- # originally sent in separate www-authenticate headers.
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'})
- digest = res['digest']
- self.assertEqual('testrealm@host.com', digest['realm'])
- self.assertEqual('auth,auth-int', digest['qop'])
- self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
- self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
- basic = res['basic']
- self.assertEqual('me', basic['realm'])
- wsse = res['wsse']
- self.assertEqual('foo', wsse['realm'])
- self.assertEqual('UsernameToken', wsse['profile'])
-
- def testParseWWWAuthenticateMultiple4(self):
- res = httplib2._parse_www_authenticate({ 'www-authenticate':
- 'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'})
- digest = res['digest']
- self.assertEqual('test-real.m@host.com', digest['realm'])
- self.assertEqual('\tauth,auth-int', digest['qop'])
- self.assertEqual('(*)&^&$%#', digest['nonce'])
-
- def testParseWWWAuthenticateMoreQuoteCombos(self):
- res = httplib2._parse_www_authenticate({'www-authenticate':'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'})
- digest = res['digest']
- self.assertEqual('myrealm', digest['realm'])
-
- def testParseWWWAuthenticateMalformed(self):
- try:
- res = httplib2._parse_www_authenticate({'www-authenticate':'OAuth "Facebook Platform" "invalid_token" "Invalid OAuth access token."'})
- self.fail("should raise an exception")
- except httplib2.MalformedHeader:
- pass
-
- def testDigestObject(self):
- credentials = ('joe', 'password')
- host = None
- request_uri = '/projects/httplib2/test/digest/'
- headers = {}
- response = {
- 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth"'
- }
- content = b""
-
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
- d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46")
- our_request = "authorization: %s" % headers['authorization']
- working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46"'
- self.assertEqual(our_request, working_request)
-
- def testDigestObjectWithOpaque(self):
- credentials = ('joe', 'password')
- host = None
- request_uri = '/projects/httplib2/test/digest/'
- headers = {}
- response = {
- 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", opaque="atestopaque"'
- }
- content = ""
-
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
- d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46")
- our_request = "authorization: %s" % headers['authorization']
- working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46", opaque="atestopaque"'
- self.assertEqual(our_request, working_request)
-
- def testDigestObjectStale(self):
- credentials = ('joe', 'password')
- host = None
- request_uri = '/projects/httplib2/test/digest/'
- headers = {}
- response = httplib2.Response({ })
- response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
- response.status = 401
- content = b""
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
- # Returns true to force a retry
- self.assertTrue( d.response(response, content) )
-
- def testDigestObjectAuthInfo(self):
- credentials = ('joe', 'password')
- host = None
- request_uri = '/projects/httplib2/test/digest/'
- headers = {}
- response = httplib2.Response({ })
- response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
- response['authentication-info'] = 'nextnonce="fred"'
- content = b""
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
- # Returns true to force a retry
- self.assertFalse( d.response(response, content) )
- self.assertEqual('fred', d.challenge['nonce'])
- self.assertEqual(1, d.challenge['nc'])
-
- def testWsseAlgorithm(self):
- digest = httplib2._wsse_username_token("d36e316282959a9ed4c89851497a717f", "2003-12-15T14:43:07Z", "taadtaadpstcsm")
- expected = b"quR/EWLAV4xLf9Zqyw4pDmfV9OY="
- self.assertEqual(expected, digest)
-
- def testEnd2End(self):
- # one end to end header
- response = {'content-type': 'application/atom+xml', 'te': 'deflate'}
- end2end = httplib2._get_end2end_headers(response)
- self.assertTrue('content-type' in end2end)
- self.assertTrue('te' not in end2end)
- self.assertTrue('connection' not in end2end)
-
- # one end to end header that gets eliminated
- response = {'connection': 'content-type', 'content-type': 'application/atom+xml', 'te': 'deflate'}
- end2end = httplib2._get_end2end_headers(response)
- self.assertTrue('content-type' not in end2end)
- self.assertTrue('te' not in end2end)
- self.assertTrue('connection' not in end2end)
-
- # Degenerate case of no headers
- response = {}
- end2end = httplib2._get_end2end_headers(response)
- self.assertEqual(0, len(end2end))
-
- # Degenerate case of connection referrring to a header not passed in
- response = {'connection': 'content-type'}
- end2end = httplib2._get_end2end_headers(response)
- self.assertEqual(0, len(end2end))
-
-
-class TestProxyInfo(unittest.TestCase):
- def setUp(self):
- self.orig_env = dict(os.environ)
-
- def tearDown(self):
- os.environ.clear()
- os.environ.update(self.orig_env)
-
- def test_from_url(self):
- pi = httplib2.proxy_info_from_url('http://myproxy.example.com')
- self.assertEqual(pi.proxy_host, 'myproxy.example.com')
- self.assertEqual(pi.proxy_port, 80)
- self.assertEqual(pi.proxy_user, None)
-
- def test_from_url_ident(self):
- pi = httplib2.proxy_info_from_url('http://zoidberg:fish@someproxy:99')
- self.assertEqual(pi.proxy_host, 'someproxy')
- self.assertEqual(pi.proxy_port, 99)
- self.assertEqual(pi.proxy_user, 'zoidberg')
- self.assertEqual(pi.proxy_pass, 'fish')
-
- def test_from_env(self):
- os.environ['http_proxy'] = 'http://myproxy.example.com:8080'
- pi = httplib2.proxy_info_from_environment()
- self.assertEqual(pi.proxy_host, 'myproxy.example.com')
- self.assertEqual(pi.proxy_port, 8080)
-
- def test_from_env_no_proxy(self):
- os.environ['http_proxy'] = 'http://myproxy.example.com:80'
- os.environ['https_proxy'] = 'http://myproxy.example.com:81'
- pi = httplib2.proxy_info_from_environment('https')
- self.assertEqual(pi.proxy_host, 'myproxy.example.com')
- self.assertEqual(pi.proxy_port, 81)
-
- def test_from_env_none(self):
- os.environ.clear()
- pi = httplib2.proxy_info_from_environment()
- self.assertEqual(pi, None)
-
- def test_proxy_headers(self):
- headers = {'key0': 'val0', 'key1': 'val1'}
- pi = httplib2.ProxyInfo(httplib2.socks.PROXY_TYPE_HTTP, 'localhost', 1234, proxy_headers = headers)
- self.assertEqual(pi.proxy_headers, headers)
-
- # regression: ensure that httplib2.HTTPConnectionWithTimeout initializes when proxy_info is not supplied
- def test_proxy_init(self):
- connection = httplib2.HTTPConnectionWithTimeout('www.google.com', 80)
- connection.request('GET', '/')
- connection.close()
-
-if __name__ == '__main__':
- unittest.main()
+#!/usr/bin/env python3
+"""A set of unit tests for httplib2.py."""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = ["Mark Pilgrim"]
+__license__ = "MIT"
+__version__ = "0.2 ($Rev: 118 $)"
+
+import base64
+import http.client
+import httplib2
+import io
+import os
+import pickle
+import socket
+import ssl
+import sys
+import time
+import unittest
+import urllib.parse
+
+base = "http://bitworking.org/projects/httplib2/test/"
+cacheDirName = ".cache"
+
+
+class CredentialsTest(unittest.TestCase):
+ def test(self):
+ c = httplib2.Credentials()
+ c.add("joe", "password")
+ self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0])
+ self.assertEqual(("joe", "password"), list(c.iter(""))[0])
+ c.add("fred", "password2", "wellformedweb.org")
+ self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0])
+ self.assertEqual(1, len(list(c.iter("bitworking.org"))))
+ self.assertEqual(2, len(list(c.iter("wellformedweb.org"))))
+ self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org")))
+ c.clear()
+ self.assertEqual(0, len(list(c.iter("bitworking.org"))))
+ c.add("fred", "password2", "wellformedweb.org")
+ self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org")))
+ self.assertEqual(0, len(list(c.iter("bitworking.org"))))
+ self.assertEqual(0, len(list(c.iter(""))))
+
+
+class ParserTest(unittest.TestCase):
+ def testFromStd66(self):
+ self.assertEqual(
+ ("http", "example.com", "", None, None),
+ httplib2.parse_uri("http://example.com"),
+ )
+ self.assertEqual(
+ ("https", "example.com", "", None, None),
+ httplib2.parse_uri("https://example.com"),
+ )
+ self.assertEqual(
+ ("https", "example.com:8080", "", None, None),
+ httplib2.parse_uri("https://example.com:8080"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/", None, None),
+ httplib2.parse_uri("http://example.com/"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/path", None, None),
+ httplib2.parse_uri("http://example.com/path"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/path", "a=1&b=2", None),
+ httplib2.parse_uri("http://example.com/path?a=1&b=2"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/path", "a=1&b=2", "fred"),
+ httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"),
+ )
+ self.assertEqual(
+ ("http", "example.com", "/path", "a=1&b=2", "fred"),
+ httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"),
+ )
+
+
+class UrlNormTest(unittest.TestCase):
+ def test(self):
+ self.assertEqual(
+ "http://example.org/", httplib2.urlnorm("http://example.org")[-1]
+ )
+ self.assertEqual(
+ "http://example.org/", httplib2.urlnorm("http://EXAMple.org")[-1]
+ )
+ self.assertEqual(
+ "http://example.org/?=b", httplib2.urlnorm("http://EXAMple.org?=b")[-1]
+ )
+ self.assertEqual(
+ "http://example.org/mypath?a=b",
+ httplib2.urlnorm("http://EXAMple.org/mypath?a=b")[-1],
+ )
+ self.assertEqual(
+ "http://localhost:80/", httplib2.urlnorm("http://localhost:80")[-1]
+ )
+ self.assertEqual(
+ httplib2.urlnorm("http://localhost:80/"),
+ httplib2.urlnorm("HTTP://LOCALHOST:80"),
+ )
+ try:
+ httplib2.urlnorm("/")
+ self.fail("Non-absolute URIs should raise an exception")
+ except httplib2.RelativeURIError:
+ pass
+
+
+class UrlSafenameTest(unittest.TestCase):
+ def test(self):
+ # Test that different URIs end up generating different safe names
+ self.assertEqual(
+ "example.org,fred,a=b,58489f63a7a83c3b7794a6a398ee8b1f",
+ httplib2.safename("http://example.org/fred/?a=b"),
+ )
+ self.assertEqual(
+ "example.org,fred,a=b,8c5946d56fec453071f43329ff0be46b",
+ httplib2.safename("http://example.org/fred?/a=b"),
+ )
+ self.assertEqual(
+ "www.example.org,fred,a=b,499c44b8d844a011b67ea2c015116968",
+ httplib2.safename("http://www.example.org/fred?/a=b"),
+ )
+ self.assertEqual(
+ httplib2.safename(httplib2.urlnorm("http://www")[-1]),
+ httplib2.safename(httplib2.urlnorm("http://WWW")[-1]),
+ )
+ self.assertEqual(
+ "www.example.org,fred,a=b,692e843a333484ce0095b070497ab45d",
+ httplib2.safename("https://www.example.org/fred?/a=b"),
+ )
+ self.assertNotEqual(
+ httplib2.safename("http://www"), httplib2.safename("https://www")
+ )
+ # Test the max length limits
+ uri = "http://" + ("w" * 200) + ".org"
+ uri2 = "http://" + ("w" * 201) + ".org"
+ self.assertNotEqual(httplib2.safename(uri2), httplib2.safename(uri))
+ # Max length should be 200 + 1 (",") + 32
+ self.assertEqual(233, len(httplib2.safename(uri2)))
+ self.assertEqual(233, len(httplib2.safename(uri)))
+ # Unicode
+ if sys.version_info >= (2, 3):
+ self.assertEqual(
+ "xn--http,-4y1d.org,fred,a=b,579924c35db315e5a32e3d9963388193",
+ httplib2.safename("http://\u2304.org/fred/?a=b"),
+ )
+
+
+class _MyResponse(io.BytesIO):
+ def __init__(self, body, **kwargs):
+ io.BytesIO.__init__(self, body)
+ self.headers = kwargs
+
+ def items(self):
+ return self.headers.items()
+
+ def iteritems(self):
+ return iter(self.headers.items())
+
+
+class _MyHTTPConnection(object):
+ "This class is just a mock of httplib.HTTPConnection used for testing"
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ strict=None,
+ timeout=None,
+ proxy_info=None,
+ ):
+ self.host = host
+ self.port = port
+ self.timeout = timeout
+ self.log = ""
+ self.sock = None
+
+ def set_debuglevel(self, level):
+ pass
+
+ def connect(self):
+ "Connect to a host on a given port."
+ pass
+
+ def close(self):
+ pass
+
+ def request(self, method, request_uri, body, headers):
+ pass
+
+ def getresponse(self):
+ return _MyResponse(b"the body", status="200")
+
+
+class _MyHTTPBadStatusConnection(object):
+ "Mock of httplib.HTTPConnection that raises BadStatusLine."
+
+ num_calls = 0
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ strict=None,
+ timeout=None,
+ proxy_info=None,
+ ):
+ self.host = host
+ self.port = port
+ self.timeout = timeout
+ self.log = ""
+ self.sock = None
+ _MyHTTPBadStatusConnection.num_calls = 0
+
+ def set_debuglevel(self, level):
+ pass
+
+ def connect(self):
+ pass
+
+ def close(self):
+ pass
+
+ def request(self, method, request_uri, body, headers):
+ pass
+
+ def getresponse(self):
+ _MyHTTPBadStatusConnection.num_calls += 1
+ raise http.client.BadStatusLine("")
+
+
+class HttpTest(unittest.TestCase):
+ def setUp(self):
+ if os.path.exists(cacheDirName):
+ [
+ os.remove(os.path.join(cacheDirName, file))
+ for file in os.listdir(cacheDirName)
+ ]
+ self.http = httplib2.Http(cacheDirName)
+ self.http.clear_credentials()
+
+ def testIPv6NoSSL(self):
+ try:
+ self.http.request("http://[::1]/")
+ except socket.gaierror:
+ self.fail("should get the address family right for IPv6")
+ except socket.error:
+ # Even if IPv6 isn't installed on a machine it should just raise socket.error
+ pass
+
+ def testIPv6SSL(self):
+ try:
+ self.http.request("https://[::1]/")
+ except socket.gaierror:
+ self.fail("should get the address family right for IPv6")
+ except socket.error:
+ # Even if IPv6 isn't installed on a machine it should just raise socket.error
+ pass
+
+ def testConnectionType(self):
+ self.http.force_exception_to_status_code = False
+ response, content = self.http.request(
+ "http://bitworking.org", connection_type=_MyHTTPConnection
+ )
+ self.assertEqual(response["content-location"], "http://bitworking.org")
+ self.assertEqual(content, b"the body")
+
+ def testBadStatusLineRetry(self):
+ old_retries = httplib2.RETRIES
+ httplib2.RETRIES = 1
+ self.http.force_exception_to_status_code = False
+ try:
+ response, content = self.http.request(
+ "http://bitworking.org", connection_type=_MyHTTPBadStatusConnection
+ )
+ except http.client.BadStatusLine:
+ self.assertEqual(2, _MyHTTPBadStatusConnection.num_calls)
+ httplib2.RETRIES = old_retries
+
+ def testGetUnknownServer(self):
+ self.http.force_exception_to_status_code = False
+ try:
+ self.http.request("http://fred.bitworking.org/")
+ self.fail(
+ "An httplib2.ServerNotFoundError Exception must be thrown on an unresolvable server."
+ )
+ except httplib2.ServerNotFoundError:
+ pass
+
+ # Now test with exceptions turned off
+ self.http.force_exception_to_status_code = True
+
+ (response, content) = self.http.request("http://fred.bitworking.org/")
+ self.assertEqual(response["content-type"], "text/plain")
+ self.assertTrue(content.startswith(b"Unable to find"))
+ self.assertEqual(response.status, 400)
+
+ def testGetConnectionRefused(self):
+ self.http.force_exception_to_status_code = False
+ try:
+ self.http.request("http://localhost:7777/")
+ self.fail("An socket.error exception must be thrown on Connection Refused.")
+ except socket.error:
+ pass
+
+ # Now test with exceptions turned off
+ self.http.force_exception_to_status_code = True
+
+ (response, content) = self.http.request("http://localhost:7777/")
+ self.assertEqual(response["content-type"], "text/plain")
+ self.assertTrue(b"Connection refused" in content)
+ self.assertEqual(response.status, 400)
+
+ def testGetIRI(self):
+ if sys.version_info >= (2, 3):
+ uri = urllib.parse.urljoin(
+ base, "reflector/reflector.cgi?d=\N{CYRILLIC CAPITAL LETTER DJE}"
+ )
+ (response, content) = self.http.request(uri, "GET")
+ d = self.reflector(content)
+ self.assertTrue("QUERY_STRING" in d)
+ self.assertTrue(d["QUERY_STRING"].find("%D0%82") > 0)
+
+ def testGetIsDefaultMethod(self):
+ # Test that GET is the default method
+ uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi")
+ (response, content) = self.http.request(uri)
+ self.assertEqual(response["x-method"], "GET")
+
+ def testDifferentMethods(self):
+ # Test that all methods can be used
+ uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi")
+ for method in ["GET", "PUT", "DELETE", "POST"]:
+ (response, content) = self.http.request(uri, method, body=b" ")
+ self.assertEqual(response["x-method"], method)
+
+ def testHeadRead(self):
+ # Test that we don't try to read the response of a HEAD request
+ # since httplib blocks response.read() for HEAD requests.
+ # Oddly enough this doesn't appear as a problem when doing HEAD requests
+ # against Apache servers.
+ uri = "http://www.google.com/"
+ (response, content) = self.http.request(uri, "HEAD")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(content, b"")
+
+ def testGetNoCache(self):
+ # Test that can do a GET w/o the cache turned on.
+ http = httplib2.Http()
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+ (response, content) = http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.previous, None)
+
+ def testGetOnlyIfCachedCacheHit(self):
+ # Test that can do a GET with cache and 'only-if-cached'
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+ (response, content) = self.http.request(uri, "GET")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "only-if-cached"}
+ )
+ self.assertEqual(response.fromcache, True)
+ self.assertEqual(response.status, 200)
+
+ def testGetOnlyIfCachedCacheMiss(self):
+ # Test that can do a GET with no cache with 'only-if-cached'
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "only-if-cached"}
+ )
+ self.assertEqual(response.fromcache, False)
+ self.assertEqual(response.status, 504)
+
+ def testGetOnlyIfCachedNoCacheAtAll(self):
+ # Test that can do a GET with no cache with 'only-if-cached'
+ # Of course, there might be an intermediary beyond us
+ # that responds to the 'only-if-cached', so this
+ # test can't really be guaranteed to pass.
+ http = httplib2.Http()
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+ (response, content) = http.request(
+ uri, "GET", headers={"cache-control": "only-if-cached"}
+ )
+ self.assertEqual(response.fromcache, False)
+ self.assertEqual(response.status, 504)
+
+ def testUserAgent(self):
+ # Test that we provide a default user-agent
+ uri = urllib.parse.urljoin(base, "user-agent/test.cgi")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertTrue(content.startswith(b"Python-httplib2/"))
+
+ def testUserAgentNonDefault(self):
+ # Test that the default user-agent can be over-ridden
+
+ uri = urllib.parse.urljoin(base, "user-agent/test.cgi")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"User-Agent": "fred/1.0"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertTrue(content.startswith(b"fred/1.0"))
+
+ def testGet300WithLocation(self):
+ # Test the we automatically follow 300 redirects if a Location: header is provided
+ uri = urllib.parse.urljoin(base, "300/with-location-header.asis")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 300)
+ self.assertEqual(response.previous.fromcache, False)
+
+ # Confirm that the intermediate 300 is not cached
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 300)
+ self.assertEqual(response.previous.fromcache, False)
+
+ def testGet300WithLocationNoRedirect(self):
+ # Test the we automatically follow 300 redirects if a Location: header is provided
+ self.http.follow_redirects = False
+ uri = urllib.parse.urljoin(base, "300/with-location-header.asis")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 300)
+
+ def testGet300WithoutLocation(self):
+ # Not giving a Location: header in a 300 response is acceptable
+ # In which case we just return the 300 response
+ uri = urllib.parse.urljoin(base, "300/without-location-header.asis")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 300)
+ self.assertTrue(response["content-type"].startswith("text/html"))
+ self.assertEqual(response.previous, None)
+
+ def testGet301(self):
+ # Test that we automatically follow 301 redirects
+ # and that we cache the 301 response
+ uri = urllib.parse.urljoin(base, "301/onestep.asis")
+ destination = urllib.parse.urljoin(base, "302/final-destination.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertTrue("content-location" in response)
+ self.assertEqual(response["content-location"], destination)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 301)
+ self.assertEqual(response.previous.fromcache, False)
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response["content-location"], destination)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 301)
+ self.assertEqual(response.previous.fromcache, True)
+
+ def testHead301(self):
+ # Test that we automatically follow 301 redirects
+ uri = urllib.parse.urljoin(base, "301/onestep.asis")
+ (response, content) = self.http.request(uri, "HEAD")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.previous.status, 301)
+ self.assertEqual(response.previous.fromcache, False)
+
+ def testGet301NoRedirect(self):
+ # Test that we automatically follow 301 redirects
+ # and that we cache the 301 response
+ self.http.follow_redirects = False
+ uri = urllib.parse.urljoin(base, "301/onestep.asis")
+ destination = urllib.parse.urljoin(base, "302/final-destination.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 301)
+
+ def testGet302(self):
+ # Test that we automatically follow 302 redirects
+ # and that we DO NOT cache the 302 response
+ uri = urllib.parse.urljoin(base, "302/onestep.asis")
+ destination = urllib.parse.urljoin(base, "302/final-destination.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response["content-location"], destination)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 302)
+ self.assertEqual(response.previous.fromcache, False)
+
+ uri = urllib.parse.urljoin(base, "302/onestep.asis")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+ self.assertEqual(response["content-location"], destination)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 302)
+ self.assertEqual(response.previous.fromcache, False)
+ self.assertEqual(response.previous["content-location"], uri)
+
+ uri = urllib.parse.urljoin(base, "302/twostep.asis")
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 302)
+ self.assertEqual(response.previous.fromcache, False)
+
+ def testGet302RedirectionLimit(self):
+ # Test that we can set a lower redirection limit
+ # and that we raise an exception when we exceed
+ # that limit.
+ self.http.force_exception_to_status_code = False
+
+ uri = urllib.parse.urljoin(base, "302/twostep.asis")
+ try:
+ (response, content) = self.http.request(uri, "GET", redirections=1)
+ self.fail("This should not happen")
+ except httplib2.RedirectLimit:
+ pass
+ except Exception as e:
+ self.fail("Threw wrong kind of exception ")
+
+ # Re-run the test with out the exceptions
+ self.http.force_exception_to_status_code = True
+
+ (response, content) = self.http.request(uri, "GET", redirections=1)
+ self.assertEqual(response.status, 500)
+ self.assertTrue(response.reason.startswith("Redirected more"))
+ self.assertEqual("302", response["status"])
+ self.assertTrue(content.startswith(b"<html>"))
+ self.assertTrue(response.previous != None)
+
+ def testGet302NoLocation(self):
+ # Test that we throw an exception when we get
+ # a 302 with no Location: header.
+ self.http.force_exception_to_status_code = False
+ uri = urllib.parse.urljoin(base, "302/no-location.asis")
+ try:
+ (response, content) = self.http.request(uri, "GET")
+ self.fail("Should never reach here")
+ except httplib2.RedirectMissingLocation:
+ pass
+ except Exception as e:
+ self.fail("Threw wrong kind of exception ")
+
+ # Re-run the test with out the exceptions
+ self.http.force_exception_to_status_code = True
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 500)
+ self.assertTrue(response.reason.startswith("Redirected but"))
+ self.assertEqual("302", response["status"])
+ self.assertTrue(content.startswith(b"This is content"))
+
+ def testGet301ViaHttps(self):
+ # Google always redirects to http://google.com
+ (response, content) = self.http.request("https://code.google.com/apis/", "GET")
+ self.assertEqual(200, response.status)
+ self.assertEqual(301, response.previous.status)
+
+ def testGetViaHttps(self):
+ # Test that we can handle HTTPS
+ (response, content) = self.http.request("https://google.com/adsense/", "GET")
+ self.assertEqual(200, response.status)
+
+ def testGetViaHttpsSpecViolationOnLocation(self):
+ # Test that we follow redirects through HTTPS
+ # even if they violate the spec by including
+ # a relative Location: header instead of an
+ # absolute one.
+ (response, content) = self.http.request("https://google.com/adsense", "GET")
+ self.assertEqual(200, response.status)
+ self.assertNotEqual(None, response.previous)
+
+ def testGetViaHttpsKeyCert(self):
+ # At this point I can only test
+ # that the key and cert files are passed in
+ # correctly to httplib. It would be nice to have
+ # a real https endpoint to test against.
+ http = httplib2.Http(timeout=2)
+
+ http.add_certificate("akeyfile", "acertfile", "bitworking.org")
+ try:
+ (response, content) = http.request("https://bitworking.org", "GET")
+ except AttributeError:
+ self.assertEqual(
+ http.connections["https:bitworking.org"].key_file, "akeyfile"
+ )
+ self.assertEqual(
+ http.connections["https:bitworking.org"].cert_file, "acertfile"
+ )
+ except IOError:
+ # Skip on 3.2
+ pass
+
+ try:
+ (response, content) = http.request("https://notthere.bitworking.org", "GET")
+ except httplib2.ServerNotFoundError:
+ self.assertEqual(
+ http.connections["https:notthere.bitworking.org"].key_file, None
+ )
+ self.assertEqual(
+ http.connections["https:notthere.bitworking.org"].cert_file, None
+ )
+ except IOError:
+ # Skip on 3.2
+ pass
+
+ def testSslCertValidation(self):
+ # Test that we get an ssl.SSLError when specifying a non-existent CA
+ # certs file.
+ http = httplib2.Http(ca_certs="/nosuchfile")
+ self.assertRaises(IOError, http.request, "https://www.google.com/", "GET")
+
+ # Test that we get a SSLHandshakeError if we try to access
+ # https://www.google.com, using a CA cert file that doesn't contain
+ # the CA Google uses (i.e., simulating a cert that's not signed by a
+ # trusted CA).
+ other_ca_certs = os.path.join(
+ os.path.dirname(os.path.abspath(httplib2.__file__)),
+ "test",
+ "other_cacerts.txt",
+ )
+ http = httplib2.Http(ca_certs=other_ca_certs)
+ self.assertRaises(ssl.SSLError, http.request, "https://www.google.com/", "GET")
+
+ def testSniHostnameValidation(self):
+ self.http.request("https://google.com/", method="GET")
+
+ def testGet303(self):
+ # Do a follow-up GET on a Location: header
+ # returned from a POST that gave a 303.
+ uri = urllib.parse.urljoin(base, "303/303.cgi")
+ (response, content) = self.http.request(uri, "POST", " ")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 303)
+
+ def testGet303NoRedirect(self):
+ # Do a follow-up GET on a Location: header
+ # returned from a POST that gave a 303.
+ self.http.follow_redirects = False
+ uri = urllib.parse.urljoin(base, "303/303.cgi")
+ (response, content) = self.http.request(uri, "POST", " ")
+ self.assertEqual(response.status, 303)
+
+ def test303ForDifferentMethods(self):
+ # Test that all methods can be used
+ uri = urllib.parse.urljoin(base, "303/redirect-to-reflector.cgi")
+ for (method, method_on_303) in [
+ ("PUT", "GET"),
+ ("DELETE", "GET"),
+ ("POST", "GET"),
+ ("GET", "GET"),
+ ("HEAD", "GET"),
+ ]:
+ (response, content) = self.http.request(uri, method, body=b" ")
+ self.assertEqual(response["x-method"], method_on_303)
+
+ def testGet304(self):
+ # Test that we use ETags properly to validate our cache
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
+
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "must-revalidate"},
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+
+ cache_file_name = os.path.join(
+ cacheDirName, httplib2.safename(httplib2.urlnorm(uri)[-1])
+ )
+ f = open(cache_file_name, "r")
+ status_line = f.readline()
+ f.close()
+
+ self.assertTrue(status_line.startswith("status:"))
+
+ (response, content) = self.http.request(
+ uri, "HEAD", headers={"accept-encoding": "identity"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity", "range": "bytes=0-0"}
+ )
+ self.assertEqual(response.status, 206)
+ self.assertEqual(response.fromcache, False)
+
+ def testGetIgnoreEtag(self):
+ # Test that we can forcibly ignore ETags
+ uri = urllib.parse.urljoin(base, "reflector/reflector.cgi")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
+
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "max-age=0"},
+ )
+ d = self.reflector(content)
+ self.assertTrue("HTTP_IF_NONE_MATCH" in d)
+
+ self.http.ignore_etag = True
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "max-age=0"},
+ )
+ d = self.reflector(content)
+ self.assertEqual(response.fromcache, False)
+ self.assertFalse("HTTP_IF_NONE_MATCH" in d)
+
+ def testOverrideEtag(self):
+ # Test that we can forcibly ignore ETags
+ uri = urllib.parse.urljoin(base, "reflector/reflector.cgi")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
+
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "max-age=0"},
+ )
+ d = self.reflector(content)
+ self.assertTrue("HTTP_IF_NONE_MATCH" in d)
+ self.assertNotEqual(d["HTTP_IF_NONE_MATCH"], "fred")
+
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={
+ "accept-encoding": "identity",
+ "cache-control": "max-age=0",
+ "if-none-match": "fred",
+ },
+ )
+ d = self.reflector(content)
+ self.assertTrue("HTTP_IF_NONE_MATCH" in d)
+ self.assertEqual(d["HTTP_IF_NONE_MATCH"], "fred")
+
+ # MAP-commented this out because it consistently fails
+ # def testGet304EndToEnd(self):
+ # # Test that end to end headers get overwritten in the cache
+ # uri = urllib.parse.urljoin(base, "304/end2end.cgi")
+ # (response, content) = self.http.request(uri, "GET")
+ # self.assertNotEqual(response['etag'], "")
+ # old_date = response['date']
+ # time.sleep(2)
+ #
+ # (response, content) = self.http.request(uri, "GET", headers = {'Cache-Control': 'max-age=0'})
+ # # The response should be from the cache, but the Date: header should be updated.
+ # new_date = response['date']
+ # self.assertNotEqual(new_date, old_date)
+ # self.assertEqual(response.status, 200)
+ # self.assertEqual(response.fromcache, True)
+
+ def testGet304LastModified(self):
+ # Test that we can still handle a 304
+ # by only using the last-modified cache validator.
+ uri = urllib.parse.urljoin(
+ base, "304/last-modified-only/last-modified-only.txt"
+ )
+ (response, content) = self.http.request(uri, "GET")
+
+ self.assertNotEqual(response["last-modified"], "")
+ (response, content) = self.http.request(uri, "GET")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+
+ def testGet307(self):
+ # Test that we do follow 307 redirects but
+ # do not cache the 307
+ uri = urllib.parse.urljoin(base, "307/onestep.asis")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 307)
+ self.assertEqual(response.previous.fromcache, False)
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+ self.assertEqual(content, b"This is the final destination.\n")
+ self.assertEqual(response.previous.status, 307)
+ self.assertEqual(response.previous.fromcache, False)
+
+ def testGet410(self):
+ # Test that we pass 410's through
+ uri = urllib.parse.urljoin(base, "410/410.asis")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 410)
+
+ def testVaryHeaderSimple(self):
+ """RFC 2616 13.6 When the cache receives a subsequent request whose Request-URI specifies one or more cache entries including a Vary header field, the cache MUST NOT use such a cache entry to construct a response to the new request unless all of the selecting request-headers present in the new request match the corresponding stored request-headers in the original request.
+
+ """
+ # test that the vary header is sent
+ uri = urllib.parse.urljoin(base, "vary/accept.asis")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertTrue("vary" in response)
+
+ # get the resource again, from the cache since accept header in this
+ # request is the same as the request
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True, msg="Should be from cache")
+
+ # get the resource again, not from cache since Accept headers does not match
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/html"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False, msg="Should not be from cache")
+
+ # get the resource again, without any Accept header, so again no match
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False, msg="Should not be from cache")
+
+ def testNoVary(self):
+ pass
+ # when there is no vary, a different Accept header (e.g.) should not
+ # impact if the cache is used
+ # test that the vary header is not sent
+ # uri = urllib.parse.urljoin(base, "vary/no-vary.asis")
+ # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
+ # self.assertEqual(response.status, 200)
+ # self.assertFalse('vary' in response)
+ #
+ # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
+ # self.assertEqual(response.status, 200)
+ # self.assertEqual(response.fromcache, True, msg="Should be from cache")
+ #
+ # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'})
+ # self.assertEqual(response.status, 200)
+ # self.assertEqual(response.fromcache, True, msg="Should be from cache")
+
+ def testVaryHeaderDouble(self):
+ uri = urllib.parse.urljoin(base, "vary/accept-double.asis")
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={
+ "Accept": "text/plain",
+ "Accept-Language": "da, en-gb;q=0.8, en;q=0.7",
+ },
+ )
+ self.assertEqual(response.status, 200)
+ self.assertTrue("vary" in response)
+
+ # we are from cache
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={
+ "Accept": "text/plain",
+ "Accept-Language": "da, en-gb;q=0.8, en;q=0.7",
+ },
+ )
+ self.assertEqual(response.fromcache, True, msg="Should be from cache")
+
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+
+ # get the resource again, not from cache, varied headers don't match exact
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept-Language": "da"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False, msg="Should not be from cache")
+
+ def testVaryUnusedHeader(self):
+ # A header's value is not considered to vary if it's not used at all.
+ uri = urllib.parse.urljoin(base, "vary/unused-header.asis")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertTrue("vary" in response)
+
+ # we are from cache
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Accept": "text/plain"}
+ )
+ self.assertEqual(response.fromcache, True, msg="Should be from cache")
+
+ def testHeadGZip(self):
+ # Test that we don't try to decompress a HEAD response
+ uri = urllib.parse.urljoin(base, "gzip/final-destination.txt")
+ (response, content) = self.http.request(uri, "HEAD")
+ self.assertEqual(response.status, 200)
+ self.assertNotEqual(int(response["content-length"]), 0)
+ self.assertEqual(content, b"")
+
+ def testGetGZip(self):
+ # Test that we support gzip compression
+ uri = urllib.parse.urljoin(base, "gzip/final-destination.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertFalse("content-encoding" in response)
+ self.assertTrue("-content-encoding" in response)
+ self.assertEqual(
+ int(response["content-length"]), len(b"This is the final destination.\n")
+ )
+ self.assertEqual(content, b"This is the final destination.\n")
+
+ def testPostAndGZipResponse(self):
+ uri = urllib.parse.urljoin(base, "gzip/post.cgi")
+ (response, content) = self.http.request(uri, "POST", body=" ")
+ self.assertEqual(response.status, 200)
+ self.assertFalse("content-encoding" in response)
+ self.assertTrue("-content-encoding" in response)
+
+ def testGetGZipFailure(self):
+ # Test that we raise a good exception when the gzip fails
+ self.http.force_exception_to_status_code = False
+ uri = urllib.parse.urljoin(base, "gzip/failed-compression.asis")
+ try:
+ (response, content) = self.http.request(uri, "GET")
+ self.fail("Should never reach here")
+ except httplib2.FailedToDecompressContent:
+ pass
+ except Exception:
+ self.fail("Threw wrong kind of exception")
+
+ # Re-run the test with out the exceptions
+ self.http.force_exception_to_status_code = True
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 500)
+ self.assertTrue(response.reason.startswith("Content purported"))
+
+ def testIndividualTimeout(self):
+ uri = urllib.parse.urljoin(base, "timeout/timeout.cgi")
+ http = httplib2.Http(timeout=1)
+ http.force_exception_to_status_code = True
+
+ (response, content) = http.request(uri)
+ self.assertEqual(response.status, 408)
+ self.assertTrue(response.reason.startswith("Request Timeout"))
+ self.assertTrue(content.startswith(b"Request Timeout"))
+
+ def testGetDeflate(self):
+ # Test that we support deflate compression
+ uri = urllib.parse.urljoin(base, "deflate/deflated.asis")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertFalse("content-encoding" in response)
+ self.assertEqual(
+ int(response["content-length"]), len("This is the final destination.")
+ )
+ self.assertEqual(content, b"This is the final destination.")
+
+ def testGetDeflateFailure(self):
+ # Test that we raise a good exception when the deflate fails
+ self.http.force_exception_to_status_code = False
+
+ uri = urllib.parse.urljoin(base, "deflate/failed-compression.asis")
+ try:
+ (response, content) = self.http.request(uri, "GET")
+ self.fail("Should never reach here")
+ except httplib2.FailedToDecompressContent:
+ pass
+ except Exception:
+ self.fail("Threw wrong kind of exception")
+
+ # Re-run the test with out the exceptions
+ self.http.force_exception_to_status_code = True
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 500)
+ self.assertTrue(response.reason.startswith("Content purported"))
+
+ def testGetDuplicateHeaders(self):
+ # Test that duplicate headers get concatenated via ','
+ uri = urllib.parse.urljoin(base, "duplicate-headers/multilink.asis")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(content, b"This is content\n")
+ self.assertEqual(
+ response["link"].split(",")[0],
+ '<http://bitworking.org>; rel="home"; title="BitWorking"',
+ )
+
+ def testGetCacheControlNoCache(self):
+ # Test Cache-Control: no-cache on requests
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+
+ (response, content) = self.http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "Cache-Control": "no-cache"},
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+
+ def testGetCacheControlPragmaNoCache(self):
+ # Test Pragma: no-cache on requests
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertNotEqual(response["etag"], "")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+
+ (response, content) = self.http.request(
+ uri, "GET", headers={"accept-encoding": "identity", "Pragma": "no-cache"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+
+ def testGetCacheControlNoStoreRequest(self):
+ # A no-store request means that the response should not be stored.
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Cache-Control": "no-store"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Cache-Control": "no-store"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+
+ def testGetCacheControlNoStoreResponse(self):
+ # A no-store response means that the response should not be stored.
+ uri = urllib.parse.urljoin(base, "no-store/no-store.asis")
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+
+ def testGetCacheControlNoCacheNoStoreRequest(self):
+ # Test that a no-store, no-cache clears the entry from the cache
+ # even if it was cached previously.
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+
+ (response, content) = self.http.request(uri, "GET")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.fromcache, True)
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Cache-Control": "no-store, no-cache"}
+ )
+ (response, content) = self.http.request(
+ uri, "GET", headers={"Cache-Control": "no-store, no-cache"}
+ )
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+
+ def testUpdateInvalidatesCache(self):
+ # Test that calling PUT or DELETE on a
+ # URI that is cache invalidates that cache.
+ uri = urllib.parse.urljoin(base, "304/test_etag.txt")
+
+ (response, content) = self.http.request(uri, "GET")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.fromcache, True)
+ (response, content) = self.http.request(uri, "DELETE")
+ self.assertEqual(response.status, 405)
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.fromcache, False)
+
+ def testUpdateUsesCachedETag(self):
+ # Test that we natively support http://www.w3.org/1999/04/Editing/
+ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+ (response, content) = self.http.request(uri, "PUT", body="foo")
+ self.assertEqual(response.status, 200)
+ (response, content) = self.http.request(uri, "PUT", body="foo")
+ self.assertEqual(response.status, 412)
+
+ def testUpdatePatchUsesCachedETag(self):
+ # Test that we natively support http://www.w3.org/1999/04/Editing/
+ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+ (response, content) = self.http.request(uri, "PATCH", body="foo")
+ self.assertEqual(response.status, 200)
+ (response, content) = self.http.request(uri, "PATCH", body="foo")
+ self.assertEqual(response.status, 412)
+
+ def testUpdateUsesCachedETagAndOCMethod(self):
+ # Test that we natively support http://www.w3.org/1999/04/Editing/
+ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+ self.http.optimistic_concurrency_methods.append("DELETE")
+ (response, content) = self.http.request(uri, "DELETE")
+ self.assertEqual(response.status, 200)
+
+ def testUpdateUsesCachedETagOverridden(self):
+ # Test that we natively support http://www.w3.org/1999/04/Editing/
+ uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
+
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, False)
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+ self.assertEqual(response.fromcache, True)
+ (response, content) = self.http.request(
+ uri, "PUT", body="foo", headers={"if-match": "fred"}
+ )
+ self.assertEqual(response.status, 412)
+
+ def testBasicAuth(self):
+ # Test Basic Authentication
+ uri = urllib.parse.urljoin(base, "basic/file.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ uri = urllib.parse.urljoin(base, "basic/")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ self.http.add_credentials("joe", "password")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ uri = urllib.parse.urljoin(base, "basic/file.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ def testBasicAuthWithDomain(self):
+ # Test Basic Authentication
+ uri = urllib.parse.urljoin(base, "basic/file.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ uri = urllib.parse.urljoin(base, "basic/")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ self.http.add_credentials("joe", "password", "example.org")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ uri = urllib.parse.urljoin(base, "basic/file.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ domain = urllib.parse.urlparse(base)[1]
+ self.http.add_credentials("joe", "password", domain)
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ uri = urllib.parse.urljoin(base, "basic/file.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ def testBasicAuthTwoDifferentCredentials(self):
+ # Test Basic Authentication with multiple sets of credentials
+ uri = urllib.parse.urljoin(base, "basic2/file.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ uri = urllib.parse.urljoin(base, "basic2/")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ self.http.add_credentials("fred", "barney")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ uri = urllib.parse.urljoin(base, "basic2/file.txt")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ def testBasicAuthNested(self):
+ # Test Basic Authentication with resources
+ # that are nested
+ uri = urllib.parse.urljoin(base, "basic-nested/")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ uri = urllib.parse.urljoin(base, "basic-nested/subdir")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ # Now add in credentials one at a time and test.
+ self.http.add_credentials("joe", "password")
+
+ uri = urllib.parse.urljoin(base, "basic-nested/")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ uri = urllib.parse.urljoin(base, "basic-nested/subdir")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ self.http.add_credentials("fred", "barney")
+
+ uri = urllib.parse.urljoin(base, "basic-nested/")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ uri = urllib.parse.urljoin(base, "basic-nested/subdir")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ def testDigestAuth(self):
+ # Test that we support Digest Authentication
+ uri = urllib.parse.urljoin(base, "digest/")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 401)
+
+ self.http.add_credentials("joe", "password")
+ (response, content) = self.http.request(uri, "GET")
+ self.assertEqual(response.status, 200)
+
+ uri = urllib.parse.urljoin(base, "digest/file.txt")
+ (response, content) = self.http.request(uri, "GET")
+
+ def testDigestAuthNextNonceAndNC(self):
+ # Test that if the server sets nextnonce that we reset
+ # the nonce count back to 1
+ uri = urllib.parse.urljoin(base, "digest/file.txt")
+ self.http.add_credentials("joe", "password")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "no-cache"}
+ )
+ info = httplib2._parse_www_authenticate(response, "authentication-info")
+ self.assertEqual(response.status, 200)
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "no-cache"}
+ )
+ info2 = httplib2._parse_www_authenticate(response, "authentication-info")
+ self.assertEqual(response.status, 200)
+
+ if "nextnonce" in info:
+ self.assertEqual(info2["nc"], 1)
+
+ def testDigestAuthStale(self):
+ # Test that we can handle a nonce becoming stale
+ uri = urllib.parse.urljoin(base, "digest-expire/file.txt")
+ self.http.add_credentials("joe", "password")
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "no-cache"}
+ )
+ info = httplib2._parse_www_authenticate(response, "authentication-info")
+ self.assertEqual(response.status, 200)
+
+ time.sleep(3)
+ # Sleep long enough that the nonce becomes stale
+
+ (response, content) = self.http.request(
+ uri, "GET", headers={"cache-control": "no-cache"}
+ )
+ self.assertFalse(response.fromcache)
+ self.assertTrue(response._stale_digest)
+ info3 = httplib2._parse_www_authenticate(response, "authentication-info")
+ self.assertEqual(response.status, 200)
+
+ def reflector(self, content):
+ return dict(
+ [
+ tuple(x.split("=", 1))
+ for x in content.decode("utf-8").strip().split("\n")
+ ]
+ )
+
+ def testReflector(self):
+ uri = urllib.parse.urljoin(base, "reflector/reflector.cgi")
+ (response, content) = self.http.request(uri, "GET")
+ d = self.reflector(content)
+ self.assertTrue("HTTP_USER_AGENT" in d)
+
+ def testConnectionClose(self):
+ uri = "http://www.google.com/"
+ (response, content) = self.http.request(uri, "GET")
+ for c in self.http.connections.values():
+ self.assertNotEqual(None, c.sock)
+ (response, content) = self.http.request(
+ uri, "GET", headers={"connection": "close"}
+ )
+ for c in self.http.connections.values():
+ self.assertEqual(None, c.sock)
+
+ def testPickleHttp(self):
+ pickled_http = pickle.dumps(self.http)
+ new_http = pickle.loads(pickled_http)
+
+ self.assertEqual(
+ sorted(new_http.__dict__.keys()), sorted(self.http.__dict__.keys())
+ )
+ for key in new_http.__dict__:
+ if key in ("certificates", "credentials"):
+ self.assertEqual(
+ new_http.__dict__[key].credentials,
+ self.http.__dict__[key].credentials,
+ )
+ elif key == "cache":
+ self.assertEqual(
+ new_http.__dict__[key].cache, self.http.__dict__[key].cache
+ )
+ else:
+ self.assertEqual(new_http.__dict__[key], self.http.__dict__[key])
+
+ def testPickleHttpWithConnection(self):
+ self.http.request("http://bitworking.org", connection_type=_MyHTTPConnection)
+ pickled_http = pickle.dumps(self.http)
+ new_http = pickle.loads(pickled_http)
+
+ self.assertEqual(list(self.http.connections.keys()), ["http:bitworking.org"])
+ self.assertEqual(new_http.connections, {})
+
+ def testPickleCustomRequestHttp(self):
+ def dummy_request(*args, **kwargs):
+ return new_request(*args, **kwargs)
+
+ dummy_request.dummy_attr = "dummy_value"
+
+ self.http.request = dummy_request
+ pickled_http = pickle.dumps(self.http)
+ self.assertFalse(b"S'request'" in pickled_http)
+
+
+try:
+ import memcache
+
+ class HttpTestMemCached(HttpTest):
+ def setUp(self):
+ self.cache = memcache.Client(["127.0.0.1:11211"], debug=0)
+ # self.cache = memcache.Client(['10.0.0.4:11211'], debug=1)
+ self.http = httplib2.Http(self.cache)
+ self.cache.flush_all()
+ # Not exactly sure why the sleep is needed here, but
+ # if not present then some unit tests that rely on caching
+ # fail. Memcached seems to lose some sets immediately
+ # after a flush_all if the set is to a value that
+ # was previously cached. (Maybe the flush is handled async?)
+ time.sleep(1)
+ self.http.clear_credentials()
+
+
+except:
+ pass
+
+# ------------------------------------------------------------------------
+
+
+class HttpPrivateTest(unittest.TestCase):
+ def testParseCacheControl(self):
+ # Test that we can parse the Cache-Control header
+ self.assertEqual({}, httplib2._parse_cache_control({}))
+ self.assertEqual(
+ {"no-cache": 1},
+ httplib2._parse_cache_control({"cache-control": " no-cache"}),
+ )
+ cc = httplib2._parse_cache_control(
+ {"cache-control": " no-cache, max-age = 7200"}
+ )
+ self.assertEqual(cc["no-cache"], 1)
+ self.assertEqual(cc["max-age"], "7200")
+ cc = httplib2._parse_cache_control({"cache-control": " , "})
+ self.assertEqual(cc[""], 1)
+
+ try:
+ cc = httplib2._parse_cache_control(
+ {"cache-control": "Max-age=3600;post-check=1800,pre-check=3600"}
+ )
+ self.assertTrue("max-age" in cc)
+ except:
+ self.fail("Should not throw exception")
+
+ def testNormalizeHeaders(self):
+ # Test that we normalize headers to lowercase
+ h = httplib2._normalize_headers({"Cache-Control": "no-cache", "Other": "Stuff"})
+ self.assertTrue("cache-control" in h)
+ self.assertTrue("other" in h)
+ self.assertEqual("Stuff", h["other"])
+
+ def testConvertByteStr(self):
+ with self.assertRaises(TypeError):
+ httplib2._convert_byte_str(4)
+ self.assertEqual("Hello World", httplib2._convert_byte_str(b"Hello World"))
+ self.assertEqual("Bye World", httplib2._convert_byte_str("Bye World"))
+
+ def testExpirationModelTransparent(self):
+ # Test that no-cache makes our request TRANSPARENT
+ response_headers = {"cache-control": "max-age=7200"}
+ request_headers = {"cache-control": "no-cache"}
+ self.assertEqual(
+ "TRANSPARENT",
+ httplib2._entry_disposition(response_headers, request_headers),
+ )
+
+ def testMaxAgeNonNumeric(self):
+ # Test that no-cache makes our request TRANSPARENT
+ response_headers = {"cache-control": "max-age=fred, min-fresh=barney"}
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpirationModelNoCacheResponse(self):
+ # The date and expires point to an entry that should be
+ # FRESH, but the no-cache over-rides that.
+ now = time.time()
+ response_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 4)),
+ "cache-control": "no-cache",
+ }
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpirationModelStaleRequestMustReval(self):
+ # must-revalidate forces STALE
+ self.assertEqual(
+ "STALE",
+ httplib2._entry_disposition({}, {"cache-control": "must-revalidate"}),
+ )
+
+ def testExpirationModelStaleResponseMustReval(self):
+ # must-revalidate forces STALE
+ self.assertEqual(
+ "STALE",
+ httplib2._entry_disposition({"cache-control": "must-revalidate"}, {}),
+ )
+
+ def testExpirationModelFresh(self):
+ response_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
+ "cache-control": "max-age=2",
+ }
+ request_headers = {}
+ self.assertEqual(
+ "FRESH", httplib2._entry_disposition(response_headers, request_headers)
+ )
+ time.sleep(3)
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpirationMaxAge0(self):
+ response_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
+ "cache-control": "max-age=0",
+ }
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpirationModelDateAndExpires(self):
+ now = time.time()
+ response_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 2)),
+ }
+ request_headers = {}
+ self.assertEqual(
+ "FRESH", httplib2._entry_disposition(response_headers, request_headers)
+ )
+ time.sleep(3)
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpiresZero(self):
+ now = time.time()
+ response_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": "0",
+ }
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpirationModelDateOnly(self):
+ now = time.time()
+ response_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 3))
+ }
+ request_headers = {}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpirationModelOnlyIfCached(self):
+ response_headers = {}
+ request_headers = {"cache-control": "only-if-cached"}
+ self.assertEqual(
+ "FRESH", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpirationModelMaxAgeBoth(self):
+ now = time.time()
+ response_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "cache-control": "max-age=2",
+ }
+ request_headers = {"cache-control": "max-age=0"}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpirationModelDateAndExpiresMinFresh1(self):
+ now = time.time()
+ response_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 2)),
+ }
+ request_headers = {"cache-control": "min-fresh=2"}
+ self.assertEqual(
+ "STALE", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testExpirationModelDateAndExpiresMinFresh2(self):
+ now = time.time()
+ response_headers = {
+ "date": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+ "expires": time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now + 4)),
+ }
+ request_headers = {"cache-control": "min-fresh=2"}
+ self.assertEqual(
+ "FRESH", httplib2._entry_disposition(response_headers, request_headers)
+ )
+
+ def testParseWWWAuthenticateEmpty(self):
+ res = httplib2._parse_www_authenticate({})
+ self.assertEqual(len(list(res.keys())), 0)
+
+ def testParseWWWAuthenticate(self):
+ # different uses of spaces around commas
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'
+ }
+ )
+ self.assertEqual(len(list(res.keys())), 1)
+ self.assertEqual(len(list(res["test"].keys())), 5)
+
+ # tokens with non-alphanum
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'}
+ )
+ self.assertEqual(len(list(res.keys())), 1)
+ self.assertEqual(len(list(res["t*!%#st"].keys())), 2)
+
+ # quoted string with quoted pairs
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Test realm="a \\"test\\" realm"'}
+ )
+ self.assertEqual(len(list(res.keys())), 1)
+ self.assertEqual(res["test"]["realm"], 'a "test" realm')
+
+ def testParseWWWAuthenticateStrict(self):
+ httplib2.USE_WWW_AUTH_STRICT_PARSING = 1
+ self.testParseWWWAuthenticate()
+ httplib2.USE_WWW_AUTH_STRICT_PARSING = 0
+
+ def testParseWWWAuthenticateBasic(self):
+ res = httplib2._parse_www_authenticate({"www-authenticate": 'Basic realm="me"'})
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Basic realm="me", algorithm="MD5"'}
+ )
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+ self.assertEqual("MD5", basic["algorithm"])
+
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Basic realm="me", algorithm=MD5'}
+ )
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+ self.assertEqual("MD5", basic["algorithm"])
+
+ def testParseWWWAuthenticateBasic2(self):
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Basic realm="me",other="fred" '}
+ )
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+ self.assertEqual("fred", basic["other"])
+
+ def testParseWWWAuthenticateBasic3(self):
+ res = httplib2._parse_www_authenticate(
+ {"www-authenticate": 'Basic REAlm="me" '}
+ )
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+
+ def testParseWWWAuthenticateDigest(self):
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"'
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("testrealm@host.com", digest["realm"])
+ self.assertEqual("auth,auth-int", digest["qop"])
+
+ def testParseWWWAuthenticateMultiple(self):
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41" Basic REAlm="me" '
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("testrealm@host.com", digest["realm"])
+ self.assertEqual("auth,auth-int", digest["qop"])
+ self.assertEqual("dcd98b7102dd2f0e8b11d0f600bfb0c093", digest["nonce"])
+ self.assertEqual("5ccc069c403ebaf9f0171e9517f40e41", digest["opaque"])
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+
+ def testParseWWWAuthenticateMultiple2(self):
+ # Handle an added comma between challenges, which might get thrown in if the challenges were
+ # originally sent in separate www-authenticate headers.
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me" '
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("testrealm@host.com", digest["realm"])
+ self.assertEqual("auth,auth-int", digest["qop"])
+ self.assertEqual("dcd98b7102dd2f0e8b11d0f600bfb0c093", digest["nonce"])
+ self.assertEqual("5ccc069c403ebaf9f0171e9517f40e41", digest["opaque"])
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+
+ def testParseWWWAuthenticateMultiple3(self):
+ # Handle an added comma between challenges, which might get thrown in if the challenges were
+ # originally sent in separate www-authenticate headers.
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("testrealm@host.com", digest["realm"])
+ self.assertEqual("auth,auth-int", digest["qop"])
+ self.assertEqual("dcd98b7102dd2f0e8b11d0f600bfb0c093", digest["nonce"])
+ self.assertEqual("5ccc069c403ebaf9f0171e9517f40e41", digest["opaque"])
+ basic = res["basic"]
+ self.assertEqual("me", basic["realm"])
+ wsse = res["wsse"]
+ self.assertEqual("foo", wsse["realm"])
+ self.assertEqual("UsernameToken", wsse["profile"])
+
+ def testParseWWWAuthenticateMultiple4(self):
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("test-real.m@host.com", digest["realm"])
+ self.assertEqual("\tauth,auth-int", digest["qop"])
+ self.assertEqual("(*)&^&$%#", digest["nonce"])
+
+ def testParseWWWAuthenticateMoreQuoteCombos(self):
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
+ }
+ )
+ digest = res["digest"]
+ self.assertEqual("myrealm", digest["realm"])
+
+ def testParseWWWAuthenticateMalformed(self):
+ try:
+ res = httplib2._parse_www_authenticate(
+ {
+ "www-authenticate": 'OAuth "Facebook Platform" "invalid_token" "Invalid OAuth access token."'
+ }
+ )
+ self.fail("should raise an exception")
+ except httplib2.MalformedHeader:
+ pass
+
+ def testDigestObject(self):
+ credentials = ("joe", "password")
+ host = None
+ request_uri = "/projects/httplib2/test/digest/"
+ headers = {}
+ response = {
+ "www-authenticate": 'Digest realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", '
+ 'algorithm=MD5, qop="auth"'
+ }
+ content = b""
+
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
+ d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46")
+ our_request = "authorization: %s" % headers["authorization"]
+ working_request = (
+ 'authorization: Digest username="joe", realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306",'
+ ' uri="/projects/httplib2/test/digest/", algorithm=MD5, '
+ 'response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, '
+ 'nc=00000001, cnonce="33033375ec278a46"'
+ )
+ self.assertEqual(our_request, working_request)
+
+ def testDigestObjectWithOpaque(self):
+ credentials = ("joe", "password")
+ host = None
+ request_uri = "/projects/httplib2/test/digest/"
+ headers = {}
+ response = {
+ "www-authenticate": 'Digest realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", '
+ 'algorithm=MD5, qop="auth", opaque="atestopaque"'
+ }
+ content = ""
+
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
+ d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46")
+ our_request = "authorization: %s" % headers["authorization"]
+ working_request = (
+ 'authorization: Digest username="joe", realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306",'
+ ' uri="/projects/httplib2/test/digest/", algorithm=MD5, '
+ 'response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, '
+ 'nc=00000001, cnonce="33033375ec278a46", '
+ 'opaque="atestopaque"'
+ )
+ self.assertEqual(our_request, working_request)
+
+ def testDigestObjectStale(self):
+ credentials = ("joe", "password")
+ host = None
+ request_uri = "/projects/httplib2/test/digest/"
+ headers = {}
+ response = httplib2.Response({})
+ response["www-authenticate"] = (
+ 'Digest realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306",'
+ ' algorithm=MD5, qop="auth", stale=true'
+ )
+ response.status = 401
+ content = b""
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
+ # Returns true to force a retry
+ self.assertTrue(d.response(response, content))
+
+ def testDigestObjectAuthInfo(self):
+ credentials = ("joe", "password")
+ host = None
+ request_uri = "/projects/httplib2/test/digest/"
+ headers = {}
+ response = httplib2.Response({})
+ response["www-authenticate"] = (
+ 'Digest realm="myrealm", '
+ 'nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306",'
+ ' algorithm=MD5, qop="auth", stale=true'
+ )
+ response["authentication-info"] = 'nextnonce="fred"'
+ content = b""
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
+ # Returns true to force a retry
+ self.assertFalse(d.response(response, content))
+ self.assertEqual("fred", d.challenge["nonce"])
+ self.assertEqual(1, d.challenge["nc"])
+
+ def testWsseAlgorithm(self):
+ digest = httplib2._wsse_username_token(
+ "d36e316282959a9ed4c89851497a717f", "2003-12-15T14:43:07Z", "taadtaadpstcsm"
+ )
+ expected = b"quR/EWLAV4xLf9Zqyw4pDmfV9OY="
+ self.assertEqual(expected, digest)
+
+ def testEnd2End(self):
+ # one end to end header
+ response = {"content-type": "application/atom+xml", "te": "deflate"}
+ end2end = httplib2._get_end2end_headers(response)
+ self.assertTrue("content-type" in end2end)
+ self.assertTrue("te" not in end2end)
+ self.assertTrue("connection" not in end2end)
+
+ # one end to end header that gets eliminated
+ response = {
+ "connection": "content-type",
+ "content-type": "application/atom+xml",
+ "te": "deflate",
+ }
+ end2end = httplib2._get_end2end_headers(response)
+ self.assertTrue("content-type" not in end2end)
+ self.assertTrue("te" not in end2end)
+ self.assertTrue("connection" not in end2end)
+
+ # Degenerate case of no headers
+ response = {}
+ end2end = httplib2._get_end2end_headers(response)
+ self.assertEqual(0, len(end2end))
+
+ # Degenerate case of connection referrring to a header not passed in
+ response = {"connection": "content-type"}
+ end2end = httplib2._get_end2end_headers(response)
+ self.assertEqual(0, len(end2end))
+
+
+class TestProxyInfo(unittest.TestCase):
+ def setUp(self):
+ self.orig_env = dict(os.environ)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self.orig_env)
+
+ def test_from_url(self):
+ pi = httplib2.proxy_info_from_url("http://myproxy.example.com")
+ self.assertEqual(pi.proxy_host, "myproxy.example.com")
+ self.assertEqual(pi.proxy_port, 80)
+ self.assertEqual(pi.proxy_user, None)
+
+ def test_from_url_ident(self):
+ pi = httplib2.proxy_info_from_url("http://zoidberg:fish@someproxy:99")
+ self.assertEqual(pi.proxy_host, "someproxy")
+ self.assertEqual(pi.proxy_port, 99)
+ self.assertEqual(pi.proxy_user, "zoidberg")
+ self.assertEqual(pi.proxy_pass, "fish")
+
+ def test_from_env(self):
+ os.environ["http_proxy"] = "http://myproxy.example.com:8080"
+ pi = httplib2.proxy_info_from_environment()
+ self.assertEqual(pi.proxy_host, "myproxy.example.com")
+ self.assertEqual(pi.proxy_port, 8080)
+
+ def test_from_env_no_proxy(self):
+ os.environ["http_proxy"] = "http://myproxy.example.com:80"
+ os.environ["https_proxy"] = "http://myproxy.example.com:81"
+ pi = httplib2.proxy_info_from_environment("https")
+ self.assertEqual(pi.proxy_host, "myproxy.example.com")
+ self.assertEqual(pi.proxy_port, 81)
+
+ def test_from_env_none(self):
+ os.environ.clear()
+ pi = httplib2.proxy_info_from_environment()
+ self.assertEqual(pi, None)
+
+ def test_proxy_headers(self):
+ headers = {"key0": "val0", "key1": "val1"}
+ pi = httplib2.ProxyInfo(
+ httplib2.socks.PROXY_TYPE_HTTP, "localhost", 1234, proxy_headers=headers
+ )
+ self.assertEqual(pi.proxy_headers, headers)
+
+ # regression: ensure that httplib2.HTTPConnectionWithTimeout initializes when proxy_info is not supplied
+ def test_proxy_init(self):
+ connection = httplib2.HTTPConnectionWithTimeout("www.google.com", 80)
+ connection.request("GET", "/")
+ connection.close()
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/requirements-test.txt b/requirements-test.txt
index 674a214..d208a8f 100644
--- a/requirements-test.txt
+++ b/requirements-test.txt
@@ -1,4 +1,5 @@
flake8==3.4.1
+future==0.16.0
mock==2.0.0
pytest-cov==2.5.1
pytest-forked==0.2
diff --git a/setup.py b/setup.py
index 0f3a516..3ac0729 100755
--- a/setup.py
+++ b/setup.py
@@ -1,17 +1,43 @@
+import os
import setuptools
+import setuptools.command.test
import sys
-pkgdir = {'': 'python%s' % sys.version_info[0]}
-VERSION = '0.11.3'
+pkgdir = {"": "python%s" % sys.version_info[0]}
+VERSION = '0.12.0'
+
+
+# `python setup.py test` uses existing Python environment, no virtualenv, no pip.
+# Use case: Archlinux package. https://github.com/httplib2/httplib2/issues/103
+# Otherwise, use `script/test`
+class TestCommand(setuptools.command.test.test):
+ def run_tests(self):
+ # pytest may be not installed yet
+ import pytest
+ args = ['--forked', '--fulltrace', '--no-cov', 'tests/']
+ if self.test_suite:
+ args += ['-k', self.test_suite]
+ sys.stderr.write('setup.py:test run pytest {}\n'.format(' '.join(args)))
+ errno = pytest.main(args)
+ sys.exit(errno)
+
+
+def read_requirements(name):
+ project_root = os.path.dirname(os.path.abspath(__file__))
+ with open(os.path.join(project_root, name), 'rb') as f:
+ # remove whitespace and comments
+ g = (line.decode('utf-8').lstrip().split('#', 1)[0].rstrip() for line in f)
+ return [l for l in g if l]
+
setuptools.setup(
- name='httplib2',
+ name="httplib2",
version=VERSION,
- author='Joe Gregorio',
- author_email='joe@bitworking.org',
- url='https://github.com/httplib2/httplib2',
- description='A comprehensive HTTP client library.',
- license='MIT',
+ author="Joe Gregorio",
+ author_email="joe@bitworking.org",
+ url="https://github.com/httplib2/httplib2",
+ description="A comprehensive HTTP client library.",
+ license="MIT",
long_description="""
A comprehensive HTTP client library, ``httplib2`` supports many features left out of other HTTP libraries.
@@ -57,23 +83,24 @@ A comprehensive HTTP client library, ``httplib2`` supports many features left ou
A large and growing set of unit tests.
""",
package_dir=pkgdir,
- packages=['httplib2'],
- package_data={'httplib2': ['*.txt']},
+ packages=["httplib2"],
+ package_data={"httplib2": ["*.txt"]},
+ tests_require=read_requirements("requirements-test.txt"),
+ cmdclass={"test": TestCommand},
classifiers=(
- 'Development Status :: 4 - Beta',
- 'Environment :: Web Environment',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: MIT License',
- 'Operating System :: OS Independent',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.4',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
- 'Topic :: Internet :: WWW/HTTP',
- 'Topic :: Software Development :: Libraries',
+ "Development Status :: 4 - Beta",
+ "Environment :: Web Environment",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: 3.6",
+ "Topic :: Internet :: WWW/HTTP",
+ "Topic :: Software Development :: Libraries",
),
)
diff --git a/tests/__init__.py b/tests/__init__.py
index 1af0720..69d7d10 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -15,6 +15,7 @@ import shutil
import six
import socket
import struct
+import sys
import threading
import time
import traceback
@@ -25,27 +26,28 @@ from six.moves import http_client, queue
@contextlib.contextmanager
def assert_raises(exc_type):
def _name(t):
- return getattr(t, '__name__', None) or str(t)
+ return getattr(t, "__name__", None) or str(t)
if not isinstance(exc_type, tuple):
exc_type = (exc_type,)
- names = ', '.join(map(_name, exc_type))
+ names = ", ".join(map(_name, exc_type))
try:
yield
except exc_type:
pass
else:
- assert False, 'Expected exception(s) {0}'.format(names)
+ assert False, "Expected exception(s) {0}".format(names)
class BufferedReader(object):
- '''io.BufferedReader with \r\n support
- '''
+ """io.BufferedReader with \r\n support
+ """
+
def __init__(self, sock):
- self._buf = b''
+ self._buf = b""
self._end = False
- self._newline = b'\r\n'
+ self._newline = b"\r\n"
self._sock = sock
if isinstance(sock, bytes):
self._sock = None
@@ -57,7 +59,7 @@ class BufferedReader(object):
while untilend or (len(self._buf) < target):
# crutch to enable HttpRequest.from_bytes
if self._sock is None:
- chunk = b''
+ chunk = b""
else:
chunk = self._sock.recv(8 << 10)
# print('!!! recv', chunk)
@@ -80,7 +82,7 @@ class BufferedReader(object):
def readall(self):
self._fill(untilend=True)
- chunk, self._buf = self._buf, b''
+ chunk, self._buf = self._buf, b""
return chunk
def readline(self):
@@ -104,16 +106,20 @@ def parse_http_message(kind, buf):
msg = kind()
msg.raw = start_line
if kind is HttpRequest:
- assert re.match(br'.+ HTTP/\d\.\d\r\n$', start_line), 'Start line does not look like HTTP request: ' + repr(start_line)
- msg.method, msg.uri, msg.proto = start_line.rstrip().decode().split(' ', 2)
- assert msg.proto.startswith('HTTP/'), repr(start_line)
+ assert re.match(
+ br".+ HTTP/\d\.\d\r\n$", start_line
+ ), "Start line does not look like HTTP request: " + repr(start_line)
+ msg.method, msg.uri, msg.proto = start_line.rstrip().decode().split(" ", 2)
+ assert msg.proto.startswith("HTTP/"), repr(start_line)
elif kind is HttpResponse:
- assert re.match(br'^HTTP/\d\.\d \d+ .+\r\n$', start_line), 'Start line does not look like HTTP response: ' + repr(start_line)
- msg.proto, msg.status, msg.reason = start_line.rstrip().decode().split(' ', 2)
+ assert re.match(
+ br"^HTTP/\d\.\d \d+ .+\r\n$", start_line
+ ), "Start line does not look like HTTP response: " + repr(start_line)
+ msg.proto, msg.status, msg.reason = start_line.rstrip().decode().split(" ", 2)
msg.status = int(msg.status)
- assert msg.proto.startswith('HTTP/'), repr(start_line)
+ assert msg.proto.startswith("HTTP/"), repr(start_line)
else:
- raise Exception('Use HttpRequest or HttpResponse .from_{bytes,buffered}')
+ raise Exception("Use HttpRequest or HttpResponse .from_{bytes,buffered}")
msg.version = msg.proto[5:]
while True:
@@ -122,19 +128,19 @@ def parse_http_message(kind, buf):
line = line.rstrip()
if not line:
break
- t = line.decode().split(':', 1)
+ t = line.decode().split(":", 1)
msg.headers[t[0].lower()] = t[1].lstrip()
- content_length_string = msg.headers.get('content-length', '')
+ content_length_string = msg.headers.get("content-length", "")
if content_length_string.isdigit():
content_length = int(content_length_string)
msg.body = msg.body_raw = buf.read(content_length)
- elif msg.headers.get('transfer-encoding') == 'chunked':
+ elif msg.headers.get("transfer-encoding") == "chunked":
raise NotImplemented
- elif msg.version == '1.0':
+ elif msg.version == "1.0":
msg.body = msg.body_raw = buf.readall()
else:
- msg.body = msg.body_raw = b''
+ msg.body = msg.body_raw = b""
msg.raw += msg.body_raw
return msg
@@ -154,7 +160,7 @@ class HttpMessage(object):
return parse_http_message(cls, buf)
def __repr__(self):
- return '{} {}'.format(self.__class__, repr(vars(self)))
+ return "{} {}".format(self.__class__, repr(vars(self)))
class HttpRequest(HttpMessage):
@@ -178,22 +184,30 @@ class MockResponse(six.BytesIO):
class MockHTTPConnection(object):
- '''This class is just a mock of httplib.HTTPConnection used for testing
- '''
-
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=None, proxy_info=None):
+ """This class is just a mock of httplib.HTTPConnection used for testing
+ """
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ strict=None,
+ timeout=None,
+ proxy_info=None,
+ ):
self.host = host
self.port = port
self.timeout = timeout
- self.log = ''
+ self.log = ""
self.sock = None
def set_debuglevel(self, level):
pass
def connect(self):
- 'Connect to a host on a given port.'
+ "Connect to a host on a given port."
pass
def close(self):
@@ -203,21 +217,29 @@ class MockHTTPConnection(object):
pass
def getresponse(self):
- return MockResponse(b'the body', status='200')
+ return MockResponse(b"the body", status="200")
class MockHTTPBadStatusConnection(object):
- '''Mock of httplib.HTTPConnection that raises BadStatusLine.
- '''
+ """Mock of httplib.HTTPConnection that raises BadStatusLine.
+ """
num_calls = 0
- def __init__(self, host, port=None, key_file=None, cert_file=None,
- strict=None, timeout=None, proxy_info=None):
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ strict=None,
+ timeout=None,
+ proxy_info=None,
+ ):
self.host = host
self.port = port
self.timeout = timeout
- self.log = ''
+ self.log = ""
self.sock = None
MockHTTPBadStatusConnection.num_calls = 0
@@ -235,7 +257,7 @@ class MockHTTPBadStatusConnection(object):
def getresponse(self):
MockHTTPBadStatusConnection.num_calls += 1
- raise http_client.BadStatusLine('')
+ raise http_client.BadStatusLine("")
@contextlib.contextmanager
@@ -247,7 +269,7 @@ def server_socket(fun, request_count=1, timeout=5):
gcounter[0] += 1
keep = True
keep &= gcounter[0] < request_count
- keep &= request.headers.get('connection', '').lower() != 'close'
+ keep &= request.headers.get("connection", "").lower() != "close"
return keep
def server_socket_thread(srv):
@@ -266,23 +288,28 @@ def server_socket(fun, request_count=1, timeout=5):
# at least in other/connection_close test
# should not be a problem since socket would close upon garbage collection
if gcounter[0] > request_count:
- gresult[0] = Exception('Request count expected={0} actual={1}'.format(request_count, gcounter[0]))
+ gresult[0] = Exception(
+ "Request count expected={0} actual={1}".format(
+ request_count, gcounter[0]
+ )
+ )
except Exception as e:
- traceback.print_exc()
+ # traceback.print_exc caused IOError: concurrent operation on sys.stderr.close() under setup.py test
+ sys.stderr.write(traceback.format_exc().encode())
gresult[0] = e
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- server.bind(('localhost', 0))
+ server.bind(("localhost", 0))
try:
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
except socket.error as ex:
- print('non critical error on SO_REUSEADDR', ex)
+ print("non critical error on SO_REUSEADDR", ex)
server.listen(10)
server.settimeout(timeout)
t = threading.Thread(target=server_socket_thread, args=(server,))
t.daemon = True
t.start()
- yield u'http://{0}:{1}/'.format(*server.getsockname())
+ yield u"http://{0}:{1}/".format(*server.getsockname())
server.close()
t.join()
if gresult[0] is not None:
@@ -336,43 +363,64 @@ def server_const_bytes(response_content, **kwargs):
_http_kwargs = (
- 'proto', 'status', 'headers', 'body', 'add_content_length', 'add_date', 'add_etag', 'undefined_body_length',
+ "proto",
+ "status",
+ "headers",
+ "body",
+ "add_content_length",
+ "add_date",
+ "add_etag",
+ "undefined_body_length",
)
-def http_response_bytes(proto='HTTP/1.1', status='200 OK', headers=None, body=b'',
- add_content_length=True, add_date=False, add_etag=False,
- undefined_body_length=False,
- **kwargs):
+def http_response_bytes(
+ proto="HTTP/1.1",
+ status="200 OK",
+ headers=None,
+ body=b"",
+ add_content_length=True,
+ add_date=False,
+ add_etag=False,
+ undefined_body_length=False,
+ **kwargs
+):
if undefined_body_length:
add_content_length = False
if headers is None:
headers = {}
if add_content_length:
- headers.setdefault('content-length', str(len(body)))
+ headers.setdefault("content-length", str(len(body)))
if add_date:
- headers.setdefault('date', email.utils.formatdate())
+ headers.setdefault("date", email.utils.formatdate())
if add_etag:
- headers.setdefault('etag', '"{0}"'.format(hashlib.md5(body).hexdigest()))
- header_string = ''.join('{0}: {1}\r\n'.format(k, v) for k, v in headers.items())
- if not undefined_body_length and proto != 'HTTP/1.0' and 'content-length' not in headers:
- raise Exception('httplib2.tests.http_response_bytes: client could not figure response body length')
+ headers.setdefault("etag", '"{0}"'.format(hashlib.md5(body).hexdigest()))
+ header_string = "".join("{0}: {1}\r\n".format(k, v) for k, v in headers.items())
+ if (
+ not undefined_body_length
+ and proto != "HTTP/1.0"
+ and "content-length" not in headers
+ ):
+ raise Exception(
+ "httplib2.tests.http_response_bytes: client could not figure response body length"
+ )
if str(status).isdigit():
- status = '{} {}'.format(status, http_client.responses[status])
- response = '{proto} {status}\r\n{headers}\r\n'.format(
- proto=proto,
- status=status,
- headers=header_string,
- ).encode() + body
+ status = "{} {}".format(status, http_client.responses[status])
+ response = (
+ "{proto} {status}\r\n{headers}\r\n".format(
+ proto=proto, status=status, headers=header_string
+ ).encode()
+ + body
+ )
return response
def make_http_reflect(**kwargs):
- assert 'body' not in kwargs, 'make_http_reflect will overwrite response body'
+ assert "body" not in kwargs, "make_http_reflect will overwrite response " "body"
def fun(request):
kw = copy.deepcopy(kwargs)
- kw['body'] = request.raw
+ kw["body"] = request.raw
response = http_response_bytes(**kw)
return response
@@ -380,8 +428,8 @@ def make_http_reflect(**kwargs):
def server_route(routes, **kwargs):
- response_404 = http_response_bytes(status='404 Not Found')
- response_wildcard = routes.get('')
+ response_404 = http_response_bytes(status="404 Not Found")
+ response_wildcard = routes.get("")
def handler(request):
target = routes.get(request.uri, response_wildcard) or response_404
@@ -395,10 +443,7 @@ def server_route(routes, **kwargs):
def server_const_http(**kwargs):
- response_kwargs = {
- k: kwargs.pop(k) for k in dict(kwargs)
- if k in _http_kwargs
- }
+ response_kwargs = {k: kwargs.pop(k) for k in dict(kwargs) if k in _http_kwargs}
response = http_response_bytes(**response_kwargs)
return server_const_bytes(response, **kwargs)
@@ -409,23 +454,20 @@ def server_list_http(responses, **kwargs):
def handler(request):
return next(i)
- kwargs.setdefault('request_count', len(responses))
+ kwargs.setdefault("request_count", len(responses))
return server_request(handler, **kwargs)
def server_reflect(**kwargs):
- response_kwargs = {
- k: kwargs.pop(k) for k in dict(kwargs)
- if k in _http_kwargs
- }
+ response_kwargs = {k: kwargs.pop(k) for k in dict(kwargs) if k in _http_kwargs}
http_handler = make_http_reflect(**response_kwargs)
return server_request(http_handler, **kwargs)
def http_parse_auth(s):
- '''https://tools.ietf.org/html/rfc7235#section-2.1
- '''
- scheme, rest = s.split(' ', 1)
+ """https://tools.ietf.org/html/rfc7235#section-2.1
+ """
+ scheme, rest = s.split(" ", 1)
result = {}
while True:
m = httplib2.WWW_AUTH_RELAXED.search(rest)
@@ -433,7 +475,7 @@ def http_parse_auth(s):
break
if len(m.groups()) == 3:
key, value, rest = m.groups()
- result[key.lower()] = httplib2.UNQUOTE_PAIRS.sub(r'\1', value)
+ result[key.lower()] = httplib2.UNQUOTE_PAIRS.sub(r"\1", value)
return result
@@ -446,30 +488,34 @@ def store_request_response(out):
response = HttpResponse.from_bytes(response_bytes)
out.append((request, response))
return response_bytes
+
return wrapped
+
return wrapper
-def http_reflect_with_auth(allow_scheme, allow_credentials, out_renew_nonce=None, out_requests=None):
- '''
- allow_scheme - 'basic', 'digest', etc
- allow_credentials - sequence of ('name', 'password')
- out_renew_nonce - None | [function]
+def http_reflect_with_auth(
+ allow_scheme, allow_credentials, out_renew_nonce=None, out_requests=None
+):
+ """allow_scheme - 'basic', 'digest', etc allow_credentials - sequence of ('name', 'password') out_renew_nonce - None | [function]
+
Way to return nonce renew function to caller.
Kind of `out` parameter in some programming languages.
Allows to keep same signature for all handler builder functions.
out_requests - None | []
If set to list, every parsed request will be appended here.
- '''
+ """
glastnc = [None]
gnextnonce = [None]
- gserver_nonce = [gen_digest_nonce(salt=b'n')]
- realm = 'httplib2 test'
- server_opaque = gen_digest_nonce(salt=b'o')
+ gserver_nonce = [gen_digest_nonce(salt=b"n")]
+ realm = "httplib2 test"
+ server_opaque = gen_digest_nonce(salt=b"o")
def renew_nonce():
if gnextnonce[0]:
- assert False, 'previous nextnonce was not used, probably bug in test code'
+ assert False, (
+ "previous nextnonce was not used, probably bug in " "test code"
+ )
gnextnonce[0] = gen_digest_nonce()
return gserver_nonce[0], gnextnonce[0]
@@ -477,46 +523,48 @@ def http_reflect_with_auth(allow_scheme, allow_credentials, out_renew_nonce=None
out_renew_nonce[0] = renew_nonce
def deny(**kwargs):
- nonce_stale = kwargs.pop('nonce_stale', False)
+ nonce_stale = kwargs.pop("nonce_stale", False)
if nonce_stale:
- kwargs.setdefault('body', b'nonce stale')
- if allow_scheme == 'basic':
+ kwargs.setdefault("body", b"nonce stale")
+ if allow_scheme == "basic":
authenticate = 'basic realm="{realm}"'.format(realm=realm)
- elif allow_scheme == 'digest':
+ elif allow_scheme == "digest":
authenticate = (
'digest realm="{realm}", qop="auth"'
+ ', nonce="{nonce}", opaque="{opaque}"'
- + (', stale=true' if nonce_stale else '')
+ + (", stale=true" if nonce_stale else "")
).format(realm=realm, nonce=gserver_nonce[0], opaque=server_opaque)
else:
- raise Exception('unknown allow_scheme={0}'.format(allow_scheme))
- deny_headers = {'www-authenticate': authenticate}
- kwargs.setdefault('status', 401)
+ raise Exception("unknown allow_scheme={0}".format(allow_scheme))
+ deny_headers = {"www-authenticate": authenticate}
+ kwargs.setdefault("status", 401)
# supplied headers may overwrite generated ones
- deny_headers.update(kwargs.get('headers', {}))
- kwargs['headers'] = deny_headers
- kwargs.setdefault('body', b'HTTP authorization required')
+ deny_headers.update(kwargs.get("headers", {}))
+ kwargs["headers"] = deny_headers
+ kwargs.setdefault("body", b"HTTP authorization required")
return http_response_bytes(**kwargs)
@store_request_response(out_requests)
def http_reflect_with_auth_handler(request):
- auth_header = request.headers.get('authorization', '')
+ auth_header = request.headers.get("authorization", "")
if not auth_header:
return deny()
- if ' ' not in auth_header:
- return http_response_bytes(status=400, body=b'authorization header syntax error')
- scheme, data = auth_header.split(' ', 1)
+ if " " not in auth_header:
+ return http_response_bytes(
+ status=400, body=b"authorization header syntax error"
+ )
+ scheme, data = auth_header.split(" ", 1)
scheme = scheme.lower()
if scheme != allow_scheme:
- return deny(body=b'must use different auth scheme')
- if scheme == 'basic':
+ return deny(body=b"must use different auth scheme")
+ if scheme == "basic":
decoded = base64.b64decode(data).decode()
- username, password = decoded.split(':', 1)
+ username, password = decoded.split(":", 1)
if (username, password) in allow_credentials:
return make_http_reflect()(request)
else:
- return deny(body=b'supplied credentials are not allowed')
- elif scheme == 'digest':
+ return deny(body=b"supplied credentials are not allowed")
+ elif scheme == "digest":
server_nonce_old = gserver_nonce[0]
nextnonce = gnextnonce[0]
if nextnonce:
@@ -525,83 +573,101 @@ def http_reflect_with_auth(allow_scheme, allow_credentials, out_renew_nonce=None
gnextnonce[0] = None
server_nonce_current = gserver_nonce[0]
auth_info = http_parse_auth(data)
- client_cnonce = auth_info.get('cnonce', '')
- client_nc = auth_info.get('nc', '')
- client_nonce = auth_info.get('nonce', '')
- client_opaque = auth_info.get('opaque', '')
- client_qop = auth_info.get('qop', 'auth').strip('"')
+ client_cnonce = auth_info.get("cnonce", "")
+ client_nc = auth_info.get("nc", "")
+ client_nonce = auth_info.get("nonce", "")
+ client_opaque = auth_info.get("opaque", "")
+ client_qop = auth_info.get("qop", "auth").strip('"')
# TODO: auth_info.get('algorithm', 'md5')
hasher = hashlib.md5
# TODO: client_qop auth-int
- ha2 = hasher(':'.join((request.method, request.uri)).encode()).hexdigest()
+ ha2 = hasher(":".join((request.method, request.uri)).encode()).hexdigest()
if client_nonce != server_nonce_current:
if client_nonce == server_nonce_old:
return deny(nonce_stale=True)
- return deny(body=b'invalid nonce')
+ return deny(body=b"invalid nonce")
if not client_nc:
- return deny(body=b'auth-info nc missing')
+ return deny(body=b"auth-info nc missing")
if client_opaque != server_opaque:
- return deny(body='auth-info opaque mismatch expected={} actual={}'
- .format(server_opaque, client_opaque).encode())
+ return deny(
+ body="auth-info opaque mismatch expected={} actual={}".format(
+ server_opaque, client_opaque
+ ).encode()
+ )
for allow_username, allow_password in allow_credentials:
- ha1 = hasher(':'.join((allow_username, realm, allow_password)).encode()).hexdigest()
- allow_response = hasher(':'.join((
- ha1, client_nonce, client_nc, client_cnonce, client_qop, ha2,
- )).encode()).hexdigest()
- rspauth_ha2 = hasher(':{}'.format(request.uri).encode()).hexdigest()
- rspauth = hasher(':'.join((
- ha1, client_nonce, client_nc, client_cnonce, client_qop, rspauth_ha2,
- )).encode()).hexdigest()
- if auth_info.get('response', '') == allow_response:
+ ha1 = hasher(
+ ":".join((allow_username, realm, allow_password)).encode()
+ ).hexdigest()
+ allow_response = hasher(
+ ":".join(
+ (ha1, client_nonce, client_nc, client_cnonce, client_qop, ha2)
+ ).encode()
+ ).hexdigest()
+ rspauth_ha2 = hasher(":{}".format(request.uri).encode()).hexdigest()
+ rspauth = hasher(
+ ":".join(
+ (
+ ha1,
+ client_nonce,
+ client_nc,
+ client_cnonce,
+ client_qop,
+ rspauth_ha2,
+ )
+ ).encode()
+ ).hexdigest()
+ if auth_info.get("response", "") == allow_response:
# TODO: fix or remove doubtful comment
# do we need to save nc only on success?
glastnc[0] = client_nc
allow_headers = {
- 'authentication-info': ' '.join((
- 'nextnonce="{}"'.format(nextnonce) if nextnonce else '',
- 'qop={}'.format(client_qop),
- 'rspauth="{}"'.format(rspauth),
- 'cnonce="{}"'.format(client_cnonce),
- 'nc={}'.format(client_nc),
- )).strip(),
+ "authentication-info": " ".join(
+ (
+ 'nextnonce="{}"'.format(nextnonce) if nextnonce else "",
+ "qop={}".format(client_qop),
+ 'rspauth="{}"'.format(rspauth),
+ 'cnonce="{}"'.format(client_cnonce),
+ "nc={}".format(client_nc),
+ )
+ ).strip()
}
return make_http_reflect(headers=allow_headers)(request)
- return deny(body=b'supplied credentials are not allowed')
+ return deny(body=b"supplied credentials are not allowed")
else:
return http_response_bytes(
status=400,
- body='unknown authorization scheme={0}'.format(scheme).encode(),
+ body="unknown authorization scheme={0}".format(scheme).encode(),
)
return http_reflect_with_auth_handler
def get_cache_path():
- default = './_httplib2_test_cache'
- path = os.environ.get('httplib2_test_cache_path') or default
+ default = "./_httplib2_test_cache"
+ path = os.environ.get("httplib2_test_cache_path") or default
if os.path.exists(path):
shutil.rmtree(path)
return path
-def gen_digest_nonce(salt=b''):
- t = struct.pack('>Q', int(time.time() * 1e9))
- return base64.b64encode(t + b':' + hashlib.sha1(t + salt).digest()).decode()
+def gen_digest_nonce(salt=b""):
+ t = struct.pack(">Q", int(time.time() * 1e9))
+ return base64.b64encode(t + b":" + hashlib.sha1(t + salt).digest()).decode()
def gen_password():
length = random.randint(8, 64)
- return ''.join(six.unichr(random.randint(0, 127)) for _ in range(length))
+ return "".join(six.unichr(random.randint(0, 127)) for _ in range(length))
def gzip_compress(bs):
# gzipobj = zlib.compressobj(9, zlib.DEFLATED, zlib.MAX_WBITS | 16)
# result = gzipobj.compress(text) + gzipobj.flush()
buf = six.BytesIO()
- gf = gzip.GzipFile(fileobj=buf, mode='wb', compresslevel=6)
+ gf = gzip.GzipFile(fileobj=buf, mode="wb", compresslevel=6)
gf.write(bs)
gf.close()
return buf.getvalue()
diff --git a/tests/test_auth.py b/tests/test_auth.py
index 3768f2b..6efd6cb 100644
--- a/tests/test_auth.py
+++ b/tests/test_auth.py
@@ -6,32 +6,34 @@ from six.moves import urllib
def test_credentials():
c = httplib2.Credentials()
- c.add('joe', 'password')
- assert tuple(c.iter('bitworking.org'))[0] == ('joe', 'password')
- assert tuple(c.iter(''))[0] == ('joe', 'password')
- c.add('fred', 'password2', 'wellformedweb.org')
- assert tuple(c.iter('bitworking.org'))[0] == ('joe', 'password')
- assert len(tuple(c.iter('bitworking.org'))) == 1
- assert len(tuple(c.iter('wellformedweb.org'))) == 2
- assert ('fred', 'password2') in tuple(c.iter('wellformedweb.org'))
+ c.add("joe", "password")
+ assert tuple(c.iter("bitworking.org"))[0] == ("joe", "password")
+ assert tuple(c.iter(""))[0] == ("joe", "password")
+ c.add("fred", "password2", "wellformedweb.org")
+ assert tuple(c.iter("bitworking.org"))[0] == ("joe", "password")
+ assert len(tuple(c.iter("bitworking.org"))) == 1
+ assert len(tuple(c.iter("wellformedweb.org"))) == 2
+ assert ("fred", "password2") in tuple(c.iter("wellformedweb.org"))
c.clear()
- assert len(tuple(c.iter('bitworking.org'))) == 0
- c.add('fred', 'password2', 'wellformedweb.org')
- assert ('fred', 'password2') in tuple(c.iter('wellformedweb.org'))
- assert len(tuple(c.iter('bitworking.org'))) == 0
- assert len(tuple(c.iter(''))) == 0
+ assert len(tuple(c.iter("bitworking.org"))) == 0
+ c.add("fred", "password2", "wellformedweb.org")
+ assert ("fred", "password2") in tuple(c.iter("wellformedweb.org"))
+ assert len(tuple(c.iter("bitworking.org"))) == 0
+ assert len(tuple(c.iter(""))) == 0
def test_basic():
# Test Basic Authentication
http = httplib2.Http()
password = tests.gen_password()
- handler = tests.http_reflect_with_auth(allow_scheme='basic', allow_credentials=(('joe', password),))
+ handler = tests.http_reflect_with_auth(
+ allow_scheme="basic", allow_credentials=(("joe", password),)
+ )
with tests.server_request(handler, request_count=3) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 401
- http.add_credentials('joe', password)
- response, content = http.request(uri, 'GET')
+ http.add_credentials("joe", password)
+ response, content = http.request(uri, "GET")
assert response.status == 200
@@ -39,16 +41,18 @@ def test_basic_for_domain():
# Test Basic Authentication
http = httplib2.Http()
password = tests.gen_password()
- handler = tests.http_reflect_with_auth(allow_scheme='basic', allow_credentials=(('joe', password),))
+ handler = tests.http_reflect_with_auth(
+ allow_scheme="basic", allow_credentials=(("joe", password),)
+ )
with tests.server_request(handler, request_count=4) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 401
- http.add_credentials('joe', password, 'example.org')
- response, content = http.request(uri, 'GET')
+ http.add_credentials("joe", password, "example.org")
+ response, content = http.request(uri, "GET")
assert response.status == 401
domain = urllib.parse.urlparse(uri)[1]
- http.add_credentials('joe', password, domain)
- response, content = http.request(uri, 'GET')
+ http.add_credentials("joe", password, domain)
+ response, content = http.request(uri, "GET")
assert response.status == 200
@@ -57,17 +61,19 @@ def test_basic_two_credentials():
http = httplib2.Http()
password1 = tests.gen_password()
password2 = tests.gen_password()
- allowed = [('joe', password1)] # exploit shared mutable list
- handler = tests.http_reflect_with_auth(allow_scheme='basic', allow_credentials=allowed)
+ allowed = [("joe", password1)] # exploit shared mutable list
+ handler = tests.http_reflect_with_auth(
+ allow_scheme="basic", allow_credentials=allowed
+ )
with tests.server_request(handler, request_count=7) as uri:
- http.add_credentials('fred', password2)
- response, content = http.request(uri, 'GET')
+ http.add_credentials("fred", password2)
+ response, content = http.request(uri, "GET")
assert response.status == 401
- http.add_credentials('joe', password1)
- response, content = http.request(uri, 'GET')
+ http.add_credentials("joe", password1)
+ response, content = http.request(uri, "GET")
assert response.status == 200
- allowed[0] = ('fred', password2)
- response, content = http.request(uri, 'GET')
+ allowed[0] = ("fred", password2)
+ response, content = http.request(uri, "GET")
assert response.status == 200
@@ -75,12 +81,14 @@ def test_digest():
# Test that we support Digest Authentication
http = httplib2.Http()
password = tests.gen_password()
- handler = tests.http_reflect_with_auth(allow_scheme='digest', allow_credentials=(('joe', password),))
+ handler = tests.http_reflect_with_auth(
+ allow_scheme="digest", allow_credentials=(("joe", password),)
+ )
with tests.server_request(handler, request_count=3) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 401
- http.add_credentials('joe', password)
- response, content = http.request(uri, 'GET')
+ http.add_credentials("joe", password)
+ response, content = http.request(uri, "GET")
assert response.status == 200, content.decode()
@@ -91,25 +99,25 @@ def test_digest_next_nonce_nc():
password = tests.gen_password()
grenew_nonce = [None]
handler = tests.http_reflect_with_auth(
- allow_scheme='digest',
- allow_credentials=(('joe', password),),
+ allow_scheme="digest",
+ allow_credentials=(("joe", password),),
out_renew_nonce=grenew_nonce,
)
with tests.server_request(handler, request_count=5) as uri:
- http.add_credentials('joe', password)
- response1, _ = http.request(uri, 'GET')
- info = httplib2._parse_www_authenticate(response1, 'authentication-info')
+ http.add_credentials("joe", password)
+ response1, _ = http.request(uri, "GET")
+ info = httplib2._parse_www_authenticate(response1, "authentication-info")
assert response1.status == 200
- assert info.get('digest', {}).get('nc') == '00000001', info
- assert not info.get('digest', {}).get('nextnonce'), info
- response2, _ = http.request(uri, 'GET')
- info2 = httplib2._parse_www_authenticate(response2, 'authentication-info')
- assert info2.get('digest', {}).get('nc') == '00000002', info2
+ assert info.get("digest", {}).get("nc") == "00000001", info
+ assert not info.get("digest", {}).get("nextnonce"), info
+ response2, _ = http.request(uri, "GET")
+ info2 = httplib2._parse_www_authenticate(response2, "authentication-info")
+ assert info2.get("digest", {}).get("nc") == "00000002", info2
grenew_nonce[0]()
- response3, content = http.request(uri, 'GET')
- info3 = httplib2._parse_www_authenticate(response3, 'authentication-info')
+ response3, content = http.request(uri, "GET")
+ info3 = httplib2._parse_www_authenticate(response3, "authentication-info")
assert response3.status == 200
- assert info3.get('digest', {}).get('nc') == '00000001', info3
+ assert info3.get("digest", {}).get("nc") == "00000001", info3
def test_digest_auth_stale():
@@ -119,55 +127,97 @@ def test_digest_auth_stale():
grenew_nonce = [None]
requests = []
handler = tests.http_reflect_with_auth(
- allow_scheme='digest',
- allow_credentials=(('joe', password),),
+ allow_scheme="digest",
+ allow_credentials=(("joe", password),),
out_renew_nonce=grenew_nonce,
out_requests=requests,
)
with tests.server_request(handler, request_count=4) as uri:
- http.add_credentials('joe', password)
- response, _ = http.request(uri, 'GET')
+ http.add_credentials("joe", password)
+ response, _ = http.request(uri, "GET")
assert response.status == 200
- info = httplib2._parse_www_authenticate(requests[0][1].headers, 'www-authenticate')
+ info = httplib2._parse_www_authenticate(
+ requests[0][1].headers, "www-authenticate"
+ )
grenew_nonce[0]()
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
assert response.status == 200
assert not response.fromcache
- assert getattr(response, '_stale_digest', False)
- info2 = httplib2._parse_www_authenticate(requests[2][1].headers, 'www-authenticate')
- nonce1 = info.get('digest', {}).get('nonce', '')
- nonce2 = info2.get('digest', {}).get('nonce', '')
- assert nonce1 != ''
- assert nonce2 != ''
+ assert getattr(response, "_stale_digest", False)
+ info2 = httplib2._parse_www_authenticate(
+ requests[2][1].headers, "www-authenticate"
+ )
+ nonce1 = info.get("digest", {}).get("nonce", "")
+ nonce2 = info2.get("digest", {}).get("nonce", "")
+ assert nonce1 != ""
+ assert nonce2 != ""
assert nonce1 != nonce2, (nonce1, nonce2)
@pytest.mark.parametrize(
- 'data', (
+ "data",
+ (
({}, {}),
- ({'www-authenticate': ''}, {}),
- ({'www-authenticate': 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'},
- {'test': {'realm': 'test realm', 'foo': 'foo', 'bar': 'bar', 'baz': 'baz', 'qux': 'qux'}}),
- ({'www-authenticate': 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'},
- {'t*!%#st': {'realm': 'to*!%#en', 'to*!%#en': 'quoted string'}}),
- ({'www-authenticate': 'Test realm="a \\"test\\" realm"'},
- {'test': {'realm': 'a "test" realm'}}),
- ({'www-authenticate': 'Basic realm="me"'},
- {'basic': {'realm': 'me'}}),
- ({'www-authenticate': 'Basic realm="me", algorithm="MD5"'},
- {'basic': {'realm': 'me', 'algorithm': 'MD5'}}),
- ({'www-authenticate': 'Basic realm="me", algorithm=MD5'},
- {'basic': {'realm': 'me', 'algorithm': 'MD5'}}),
- ({'www-authenticate': 'Basic realm="me",other="fred" '},
- {'basic': {'realm': 'me', 'other': 'fred'}}),
- ({'www-authenticate': 'Basic REAlm="me" '},
- {'basic': {'realm': 'me'}}),
- ({'www-authenticate': 'Digest realm="digest1", qop="auth,auth-int", nonce="7102dd2", opaque="e9517f"'},
- {'digest': {'realm': 'digest1', 'qop': 'auth,auth-int', 'nonce': '7102dd2', 'opaque': 'e9517f'}}),
+ ({"www-authenticate": ""}, {}),
+ (
+ {
+ "www-authenticate": 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'
+ },
+ {
+ "test": {
+ "realm": "test realm",
+ "foo": "foo",
+ "bar": "bar",
+ "baz": "baz",
+ "qux": "qux",
+ }
+ },
+ ),
+ (
+ {"www-authenticate": 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'},
+ {"t*!%#st": {"realm": "to*!%#en", "to*!%#en": "quoted string"}},
+ ),
+ (
+ {"www-authenticate": 'Test realm="a \\"test\\" realm"'},
+ {"test": {"realm": 'a "test" realm'}},
+ ),
+ ({"www-authenticate": 'Basic realm="me"'}, {"basic": {"realm": "me"}}),
+ (
+ {"www-authenticate": 'Basic realm="me", algorithm="MD5"'},
+ {"basic": {"realm": "me", "algorithm": "MD5"}},
+ ),
+ (
+ {"www-authenticate": 'Basic realm="me", algorithm=MD5'},
+ {"basic": {"realm": "me", "algorithm": "MD5"}},
+ ),
+ (
+ {"www-authenticate": 'Basic realm="me",other="fred" '},
+ {"basic": {"realm": "me", "other": "fred"}},
+ ),
+ ({"www-authenticate": 'Basic REAlm="me" '}, {"basic": {"realm": "me"}}),
+ (
+ {
+ "www-authenticate": 'Digest realm="digest1", qop="auth,auth-int", nonce="7102dd2", opaque="e9517f"'
+ },
+ {
+ "digest": {
+ "realm": "digest1",
+ "qop": "auth,auth-int",
+ "nonce": "7102dd2",
+ "opaque": "e9517f",
+ }
+ },
+ ),
# multiple schema choice
- ({'www-authenticate': 'Digest realm="multi-d", nonce="8b11d0f6", opaque="cc069c" Basic realm="multi-b" '},
- {'digest': {'realm': 'multi-d', 'nonce': '8b11d0f6', 'opaque': 'cc069c'},
- 'basic': {'realm': 'multi-b'}}),
+ (
+ {
+ "www-authenticate": 'Digest realm="multi-d", nonce="8b11d0f6", opaque="cc069c" Basic realm="multi-b" '
+ },
+ {
+ "digest": {"realm": "multi-d", "nonce": "8b11d0f6", "opaque": "cc069c"},
+ "basic": {"realm": "multi-b"},
+ },
+ ),
# FIXME
# comma between schemas (glue for multiple headers with same name)
# ({'www-authenticate': 'Digest realm="2-comma-d", qop="auth-int", nonce="c0c8ff1", Basic realm="2-comma-b"'},
@@ -186,10 +236,24 @@ def test_digest_auth_stale():
# 'basic': {'realm': 'zoo'},
# 'wsse': {'realm': 'very', 'profile': 'UsernameToken'}}),
# more quote combos
- ({'www-authenticate': 'Digest realm="myrealm", nonce="KBAA=3", algorithm=MD5, qop="auth", stale=true'},
- {'digest': {'realm': 'myrealm', 'nonce': 'KBAA=3', 'algorithm': 'MD5', 'qop': 'auth', 'stale': 'true'}}),
- ), ids=lambda data: str(data[0]))
-@pytest.mark.parametrize('strict', (True, False), ids=('strict', 'relax'))
+ (
+ {
+ "www-authenticate": 'Digest realm="myrealm", nonce="KBAA=3", algorithm=MD5, qop="auth", stale=true'
+ },
+ {
+ "digest": {
+ "realm": "myrealm",
+ "nonce": "KBAA=3",
+ "algorithm": "MD5",
+ "qop": "auth",
+ "stale": "true",
+ }
+ },
+ ),
+ ),
+ ids=lambda data: str(data[0]),
+)
+@pytest.mark.parametrize("strict", (True, False), ids=("strict", "relax"))
def test_parse_www_authenticate_correct(data, strict):
headers, info = data
# FIXME: move strict to parse argument
@@ -204,81 +268,104 @@ def test_parse_www_authenticate_malformed():
# TODO: test (and fix) header value 'barbqwnbm-bb...:asd' leads to dead loop
with tests.assert_raises(httplib2.MalformedHeader):
httplib2._parse_www_authenticate(
- {'www-authenticate': 'OAuth "Facebook Platform" "invalid_token" "Invalid OAuth access token."'}
+ {
+ "www-authenticate": 'OAuth "Facebook Platform" "invalid_token" "Invalid OAuth access token."'
+ }
)
def test_digest_object():
- credentials = ('joe', 'password')
+ credentials = ("joe", "password")
host = None
- request_uri = '/test/digest/'
+ request_uri = "/test/digest/"
headers = {}
response = {
- 'www-authenticate': 'Digest realm="myrealm", nonce="KBAA=35", algorithm=MD5, qop="auth"'
+ "www-authenticate": 'Digest realm="myrealm", nonce="KBAA=35", algorithm=MD5, qop="auth"'
}
- content = b''
+ content = b""
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
- d.request('GET', request_uri, headers, content, cnonce="33033375ec278a46")
- our_request = 'authorization: ' + headers['authorization']
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
+ d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46")
+ our_request = "authorization: " + headers["authorization"]
working_request = (
- 'authorization: Digest username="joe", realm="myrealm", nonce="KBAA=35", uri="/test/digest/"' +
- ', algorithm=MD5, response="de6d4a123b80801d0e94550411b6283f", qop=auth, nc=00000001, cnonce="33033375ec278a46"'
+ 'authorization: Digest username="joe", realm="myrealm", '
+ 'nonce="KBAA=35", uri="/test/digest/"'
+ + ', algorithm=MD5, response="de6d4a123b80801d0e94550411b6283f", '
+ 'qop=auth, nc=00000001, cnonce="33033375ec278a46"'
)
assert our_request == working_request
def test_digest_object_with_opaque():
- credentials = ('joe', 'password')
+ credentials = ("joe", "password")
host = None
- request_uri = '/digest/opaque/'
+ request_uri = "/digest/opaque/"
headers = {}
response = {
- 'www-authenticate': 'Digest realm="myrealm", nonce="30352fd", algorithm=MD5, qop="auth", opaque="atestopaque"',
+ "www-authenticate": 'Digest realm="myrealm", nonce="30352fd", algorithm=MD5, '
+ 'qop="auth", opaque="atestopaque"'
}
- content = ''
+ content = ""
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
- d.request('GET', request_uri, headers, content, cnonce="5ec2")
- our_request = 'authorization: ' + headers['authorization']
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
+ d.request("GET", request_uri, headers, content, cnonce="5ec2")
+ our_request = "authorization: " + headers["authorization"]
working_request = (
- 'authorization: Digest username="joe", realm="myrealm", nonce="30352fd", uri="/digest/opaque/", algorithm=MD5' +
- ', response="a1fab43041f8f3789a447f48018bee48", qop=auth, nc=00000001, cnonce="5ec2", opaque="atestopaque"'
+ 'authorization: Digest username="joe", realm="myrealm", '
+ 'nonce="30352fd", uri="/digest/opaque/", algorithm=MD5'
+ + ', response="a1fab43041f8f3789a447f48018bee48", qop=auth, nc=00000001, '
+ 'cnonce="5ec2", opaque="atestopaque"'
)
assert our_request == working_request
def test_digest_object_stale():
- credentials = ('joe', 'password')
+ credentials = ("joe", "password")
host = None
- request_uri = '/digest/stale/'
+ request_uri = "/digest/stale/"
headers = {}
response = httplib2.Response({})
- response['www-authenticate'] = 'Digest realm="myrealm", nonce="bd669f", algorithm=MD5, qop="auth", stale=true'
+ response["www-authenticate"] = (
+ 'Digest realm="myrealm", nonce="bd669f", '
+ 'algorithm=MD5, qop="auth", stale=true'
+ )
response.status = 401
- content = b''
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
+ content = b""
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
# Returns true to force a retry
assert d.response(response, content)
def test_digest_object_auth_info():
- credentials = ('joe', 'password')
+ credentials = ("joe", "password")
host = None
- request_uri = '/digest/nextnonce/'
+ request_uri = "/digest/nextnonce/"
headers = {}
response = httplib2.Response({})
- response['www-authenticate'] = 'Digest realm="myrealm", nonce="barney", algorithm=MD5, qop="auth", stale=true'
- response['authentication-info'] = 'nextnonce="fred"'
- content = b''
- d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
+ response["www-authenticate"] = (
+ 'Digest realm="myrealm", nonce="barney", '
+ 'algorithm=MD5, qop="auth", stale=true'
+ )
+ response["authentication-info"] = 'nextnonce="fred"'
+ content = b""
+ d = httplib2.DigestAuthentication(
+ credentials, host, request_uri, headers, response, content, None
+ )
# Returns true to force a retry
assert not d.response(response, content)
- assert d.challenge['nonce'] == 'fred'
- assert d.challenge['nc'] == 1
+ assert d.challenge["nonce"] == "fred"
+ assert d.challenge["nc"] == 1
def test_wsse_algorithm():
- digest = httplib2._wsse_username_token('d36e316282959a9ed4c89851497a717f', '2003-12-15T14:43:07Z', 'taadtaadpstcsm')
- expected = b'quR/EWLAV4xLf9Zqyw4pDmfV9OY='
+ digest = httplib2._wsse_username_token(
+ "d36e316282959a9ed4c89851497a717f", "2003-12-15T14:43:07Z", "taadtaadpstcsm"
+ )
+ expected = b"quR/EWLAV4xLf9Zqyw4pDmfV9OY="
assert expected == digest
diff --git a/tests/test_cacerts_from_env.py b/tests/test_cacerts_from_env.py
new file mode 100644
index 0000000..cb2bd9f
--- /dev/null
+++ b/tests/test_cacerts_from_env.py
@@ -0,0 +1,72 @@
+import os
+import sys
+import mock
+import pytest
+import tempfile
+import httplib2
+
+
+CA_CERTS_BUILTIN = os.path.join(os.path.dirname(httplib2.__file__), "cacerts.txt")
+CERTIFI_CERTS_FILE = "unittest_certifi_file"
+CUSTOM_CA_CERTS = "unittest_custom_ca_certs"
+
+
+@pytest.fixture()
+def clean_env():
+ current_env_var = os.environ.get("HTTPLIB2_CA_CERTS")
+ if current_env_var is not None:
+ os.environ.pop("HTTPLIB2_CA_CERTS")
+ yield
+ if current_env_var is not None:
+ os.environ["HTTPLIB2_CA_CERTS"] = current_env_var
+
+
+@pytest.fixture()
+def ca_certs_tmpfile(clean_env):
+ tmpfd, tmpfile = tempfile.mkstemp()
+ open(tmpfile, 'a').close()
+ yield tmpfile
+ os.remove(tmpfile)
+
+
+@mock.patch("httplib2.certs.certifi_available", False)
+@mock.patch("httplib2.certs.custom_ca_locater_available", False)
+def test_certs_file_from_builtin(clean_env):
+ assert httplib2.certs.where() == CA_CERTS_BUILTIN
+
+
+@mock.patch("httplib2.certs.certifi_available", False)
+@mock.patch("httplib2.certs.custom_ca_locater_available", False)
+def test_certs_file_from_environment(ca_certs_tmpfile):
+ os.environ["HTTPLIB2_CA_CERTS"] = ca_certs_tmpfile
+ assert httplib2.certs.where() == ca_certs_tmpfile
+ os.environ["HTTPLIB2_CA_CERTS"] = ""
+ with pytest.raises(RuntimeError):
+ httplib2.certs.where()
+ os.environ.pop("HTTPLIB2_CA_CERTS")
+ assert httplib2.certs.where() == CA_CERTS_BUILTIN
+
+
+@mock.patch("httplib2.certs.certifi_where", mock.MagicMock(return_value=CERTIFI_CERTS_FILE))
+@mock.patch("httplib2.certs.certifi_available", True)
+@mock.patch("httplib2.certs.custom_ca_locater_available", False)
+def test_certs_file_from_certifi(clean_env):
+ assert httplib2.certs.where() == CERTIFI_CERTS_FILE
+
+
+@mock.patch("httplib2.certs.certifi_available", False)
+@mock.patch("httplib2.certs.custom_ca_locater_available", True)
+@mock.patch("httplib2.certs.custom_ca_locater_where", mock.MagicMock(return_value=CUSTOM_CA_CERTS))
+def test_certs_file_from_custom_getter(clean_env):
+ assert httplib2.certs.where() == CUSTOM_CA_CERTS
+
+
+@mock.patch("httplib2.certs.certifi_available", False)
+@mock.patch("httplib2.certs.custom_ca_locater_available", False)
+def test_with_certifi_removed_from_modules(ca_certs_tmpfile):
+ if "certifi" in sys.modules:
+ del sys.modules["certifi"]
+ os.environ["HTTPLIB2_CA_CERTS"] = ca_certs_tmpfile
+ assert httplib2.certs.where() == ca_certs_tmpfile
+ os.environ.pop("HTTPLIB2_CA_CERTS")
+ assert httplib2.certs.where() == CA_CERTS_BUILTIN
diff --git a/tests/test_cache.py b/tests/test_cache.py
index c2a2beb..1f1fde7 100644
--- a/tests/test_cache.py
+++ b/tests/test_cache.py
@@ -5,16 +5,17 @@ import re
import tests
import time
-
-dummy_url = 'http://127.0.0.1:1'
+dummy_url = "http://127.0.0.1:1"
def test_get_only_if_cached_cache_hit():
# Test that can do a GET with cache and 'only-if-cached'
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_const_http(add_etag=True) as uri:
- http.request(uri, 'GET')
- response, content = http.request(uri, 'GET', headers={'cache-control': 'only-if-cached'})
+ http.request(uri, "GET")
+ response, content = http.request(
+ uri, "GET", headers={"cache-control": "only-if-cached"}
+ )
assert response.fromcache
assert response.status == 200
@@ -23,7 +24,9 @@ def test_get_only_if_cached_cache_miss():
# Test that can do a GET with no cache with 'only-if-cached'
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_const_http(request_count=0) as uri:
- response, content = http.request(uri, 'GET', headers={'cache-control': 'only-if-cached'})
+ response, content = http.request(
+ uri, "GET", headers={"cache-control": "only-if-cached"}
+ )
assert not response.fromcache
assert response.status == 504
@@ -35,12 +38,14 @@ def test_get_only_if_cached_no_cache_at_all():
# test can't really be guaranteed to pass.
http = httplib2.Http()
with tests.server_const_http(request_count=0) as uri:
- response, content = http.request(uri, 'GET', headers={'cache-control': 'only-if-cached'})
+ response, content = http.request(
+ uri, "GET", headers={"cache-control": "only-if-cached"}
+ )
assert not response.fromcache
assert response.status == 504
-@pytest.mark.skip(reason='was commented in legacy code')
+@pytest.mark.skip(reason="was commented in legacy code")
def test_TODO_vary_no():
pass
# when there is no vary, a different Accept header (e.g.) should not
@@ -60,40 +65,31 @@ def test_TODO_vary_no():
# assert response.fromcache, "Should be from cache"
-def test_vary_header_simple():
- """
- RFC 2616 13.6
- When the cache receives a subsequent request whose Request-URI
- specifies one or more cache entries including a Vary header field,
- the cache MUST NOT use such a cache entry to construct a response
- to the new request unless all of the selecting request-headers
- present in the new request match the corresponding stored
- request-headers in the original request.
- """
- # test that the vary header is sent
+def test_vary_header_is_sent():
+ # Verifies RFC 2616 13.6.
+ # See https://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html.
http = httplib2.Http(cache=tests.get_cache_path())
response = tests.http_response_bytes(
- headers={'vary': 'Accept', 'cache-control': 'max-age=300'},
- add_date=True,
+ headers={"vary": "Accept", "cache-control": "max-age=300"}, add_date=True
)
with tests.server_const_bytes(response, request_count=3) as uri:
- response, content = http.request(uri, 'GET', headers={'accept': 'text/plain'})
+ response, content = http.request(uri, "GET", headers={"accept": "text/plain"})
assert response.status == 200
- assert 'vary' in response
+ assert "vary" in response
# get the resource again, from the cache since accept header in this
# request is the same as the request
- response, content = http.request(uri, 'GET', headers={'Accept': 'text/plain'})
+ response, content = http.request(uri, "GET", headers={"Accept": "text/plain"})
assert response.status == 200
assert response.fromcache, "Should be from cache"
# get the resource again, not from cache since Accept headers does not match
- response, content = http.request(uri, 'GET', headers={'Accept': 'text/html'})
+ response, content = http.request(uri, "GET", headers={"Accept": "text/html"})
assert response.status == 200
assert not response.fromcache, "Should not be from cache"
# get the resource again, without any Accept header, so again no match
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
assert not response.fromcache, "Should not be from cache"
@@ -101,28 +97,38 @@ def test_vary_header_simple():
def test_vary_header_double():
http = httplib2.Http(cache=tests.get_cache_path())
response = tests.http_response_bytes(
- headers={'vary': 'Accept, Accept-Language', 'cache-control': 'max-age=300'},
+ headers={"vary": "Accept, Accept-Language", "cache-control": "max-age=300"},
add_date=True,
)
with tests.server_const_bytes(response, request_count=3) as uri:
- response, content = http.request(uri, 'GET', headers={
- 'Accept': 'text/plain',
- 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7',
- })
+ response, content = http.request(
+ uri,
+ "GET",
+ headers={
+ "Accept": "text/plain",
+ "Accept-Language": "da, en-gb;q=0.8, en;q=0.7",
+ },
+ )
assert response.status == 200
- assert 'vary' in response
+ assert "vary" in response
# we are from cache
- response, content = http.request(uri, 'GET', headers={
- 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'})
+ response, content = http.request(
+ uri,
+ "GET",
+ headers={
+ "Accept": "text/plain",
+ "Accept-Language": "da, en-gb;q=0.8, en;q=0.7",
+ },
+ )
assert response.fromcache, "Should be from cache"
- response, content = http.request(uri, 'GET', headers={'Accept': 'text/plain'})
+ response, content = http.request(uri, "GET", headers={"Accept": "text/plain"})
assert response.status == 200
assert not response.fromcache
# get the resource again, not from cache, varied headers don't match exact
- response, content = http.request(uri, 'GET', headers={'Accept-Language': 'da'})
+ response, content = http.request(uri, "GET", headers={"Accept-Language": "da"})
assert response.status == 200
assert not response.fromcache, "Should not be from cache"
@@ -130,17 +136,17 @@ def test_vary_header_double():
def test_vary_unused_header():
http = httplib2.Http(cache=tests.get_cache_path())
response = tests.http_response_bytes(
- headers={'vary': 'X-No-Such-Header', 'cache-control': 'max-age=300'},
+ headers={"vary": "X-No-Such-Header", "cache-control": "max-age=300"},
add_date=True,
)
with tests.server_const_bytes(response, request_count=1) as uri:
# A header's value is not considered to vary if it's not used at all.
- response, content = http.request(uri, 'GET', headers={'Accept': 'text/plain'})
+ response, content = http.request(uri, "GET", headers={"Accept": "text/plain"})
assert response.status == 200
- assert 'vary' in response
+ assert "vary" in response
# we are from cache
- response, content = http.request(uri, 'GET', headers={'Accept': 'text/plain'})
+ response, content = http.request(uri, "GET", headers={"Accept": "text/plain"})
assert response.fromcache, "Should be from cache"
@@ -148,16 +154,23 @@ def test_get_cache_control_no_cache():
# Test Cache-Control: no-cache on requests
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_const_http(
- add_date=True, add_etag=True,
- headers={'cache-control': 'max-age=300'}, request_count=2) as uri:
- response, _ = http.request(uri, 'GET', headers={'accept-encoding': 'identity'})
+ add_date=True,
+ add_etag=True,
+ headers={"cache-control": "max-age=300"},
+ request_count=2,
+ ) as uri:
+ response, _ = http.request(uri, "GET", headers={"accept-encoding": "identity"})
assert response.status == 200
- assert response['etag'] != ''
+ assert response["etag"] != ""
assert not response.fromcache
- response, _ = http.request(uri, 'GET', headers={'accept-encoding': 'identity'})
+ response, _ = http.request(uri, "GET", headers={"accept-encoding": "identity"})
assert response.status == 200
assert response.fromcache
- response, _ = http.request(uri, 'GET', headers={'accept-encoding': 'identity', 'Cache-Control': 'no-cache'})
+ response, _ = http.request(
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "Cache-Control": "no-cache"},
+ )
assert response.status == 200
assert not response.fromcache
@@ -166,14 +179,19 @@ def test_get_cache_control_pragma_no_cache():
# Test Pragma: no-cache on requests
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_const_http(
- add_date=True, add_etag=True,
- headers={'cache-control': 'max-age=300'}, request_count=2) as uri:
- response, _ = http.request(uri, 'GET', headers={'accept-encoding': 'identity'})
- assert response['etag'] != ''
- response, _ = http.request(uri, 'GET', headers={'accept-encoding': 'identity'})
+ add_date=True,
+ add_etag=True,
+ headers={"cache-control": "max-age=300"},
+ request_count=2,
+ ) as uri:
+ response, _ = http.request(uri, "GET", headers={"accept-encoding": "identity"})
+ assert response["etag"] != ""
+ response, _ = http.request(uri, "GET", headers={"accept-encoding": "identity"})
assert response.status == 200
assert response.fromcache
- response, _ = http.request(uri, 'GET', headers={'accept-encoding': 'identity', 'Pragma': 'no-cache'})
+ response, _ = http.request(
+ uri, "GET", headers={"accept-encoding": "identity", "Pragma": "no-cache"}
+ )
assert response.status == 200
assert not response.fromcache
@@ -182,12 +200,15 @@ def test_get_cache_control_no_store_request():
# A no-store request means that the response should not be stored.
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_const_http(
- add_date=True, add_etag=True,
- headers={'cache-control': 'max-age=300'}, request_count=2) as uri:
- response, _ = http.request(uri, 'GET', headers={'Cache-Control': 'no-store'})
+ add_date=True,
+ add_etag=True,
+ headers={"cache-control": "max-age=300"},
+ request_count=2,
+ ) as uri:
+ response, _ = http.request(uri, "GET", headers={"Cache-Control": "no-store"})
assert response.status == 200
assert not response.fromcache
- response, _ = http.request(uri, 'GET', headers={'Cache-Control': 'no-store'})
+ response, _ = http.request(uri, "GET", headers={"Cache-Control": "no-store"})
assert response.status == 200
assert not response.fromcache
@@ -196,12 +217,15 @@ def test_get_cache_control_no_store_response():
# A no-store response means that the response should not be stored.
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_const_http(
- add_date=True, add_etag=True,
- headers={'cache-control': 'max-age=300, no-store'}, request_count=2) as uri:
- response, _ = http.request(uri, 'GET')
+ add_date=True,
+ add_etag=True,
+ headers={"cache-control": "max-age=300, no-store"},
+ request_count=2,
+ ) as uri:
+ response, _ = http.request(uri, "GET")
assert response.status == 200
assert not response.fromcache
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
assert response.status == 200
assert not response.fromcache
@@ -211,15 +235,22 @@ def test_get_cache_control_no_cache_no_store_request():
# even if it was cached previously.
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_const_http(
- add_date=True, add_etag=True,
- headers={'cache-control': 'max-age=300'}, request_count=3) as uri:
- response, _ = http.request(uri, 'GET')
- response, _ = http.request(uri, 'GET')
+ add_date=True,
+ add_etag=True,
+ headers={"cache-control": "max-age=300"},
+ request_count=3,
+ ) as uri:
+ response, _ = http.request(uri, "GET")
+ response, _ = http.request(uri, "GET")
assert response.fromcache
- response, _ = http.request(uri, 'GET', headers={'Cache-Control': 'no-store, no-cache'})
+ response, _ = http.request(
+ uri, "GET", headers={"Cache-Control": "no-store, no-cache"}
+ )
assert response.status == 200
assert not response.fromcache
- response, _ = http.request(uri, 'GET', headers={'Cache-Control': 'no-store, no-cache'})
+ response, _ = http.request(
+ uri, "GET", headers={"Cache-Control": "no-store, no-cache"}
+ )
assert response.status == 200
assert not response.fromcache
@@ -230,49 +261,52 @@ def test_update_invalidates_cache():
http = httplib2.Http(cache=tests.get_cache_path())
def handler(request):
- if request.method in ('PUT', 'PATCH', 'DELETE'):
+ if request.method in ("PUT", "PATCH", "DELETE"):
return tests.http_response_bytes(status=405)
return tests.http_response_bytes(
- add_date=True, add_etag=True, headers={'cache-control': 'max-age=300'})
+ add_date=True, add_etag=True, headers={"cache-control": "max-age=300"}
+ )
with tests.server_request(handler, request_count=3) as uri:
- response, _ = http.request(uri, 'GET')
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
+ response, _ = http.request(uri, "GET")
assert response.fromcache
- response, _ = http.request(uri, 'DELETE')
+ response, _ = http.request(uri, "DELETE")
assert response.status == 405
assert not response.fromcache
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
assert not response.fromcache
def handler_conditional_update(request):
respond = tests.http_response_bytes
- if request.method == 'GET':
- if request.headers.get('if-none-match', '') == '12345':
+ if request.method == "GET":
+ if request.headers.get("if-none-match", "") == "12345":
return respond(status=304)
- return respond(add_date=True, headers={'etag': '12345', 'cache-control': 'max-age=300'})
- elif request.method in ('PUT', 'PATCH', 'DELETE'):
- if request.headers.get('if-match', '') == '12345':
+ return respond(
+ add_date=True, headers={"etag": "12345", "cache-control": "max-age=300"}
+ )
+ elif request.method in ("PUT", "PATCH", "DELETE"):
+ if request.headers.get("if-match", "") == "12345":
return respond(status=200)
return respond(status=412)
return respond(status=405)
-@pytest.mark.parametrize('method', ('PUT', 'PATCH'))
+@pytest.mark.parametrize("method", ("PUT", "PATCH"))
def test_update_uses_cached_etag(method):
# Test that we natively support http://www.w3.org/1999/04/Editing/
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_request(handler_conditional_update, request_count=3) as uri:
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
assert response.status == 200
assert not response.fromcache
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
assert response.status == 200
assert response.fromcache
- response, _ = http.request(uri, method, body=b'foo')
+ response, _ = http.request(uri, method, body=b"foo")
assert response.status == 200
- response, _ = http.request(uri, method, body=b'foo')
+ response, _ = http.request(uri, method, body=b"foo")
assert response.status == 412
@@ -280,14 +314,14 @@ def test_update_uses_cached_etag_and_oc_method():
# Test that we natively support http://www.w3.org/1999/04/Editing/
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_request(handler_conditional_update, request_count=2) as uri:
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
assert response.status == 200
assert not response.fromcache
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
assert response.status == 200
assert response.fromcache
- http.optimistic_concurrency_methods.append('DELETE')
- response, _ = http.request(uri, 'DELETE')
+ http.optimistic_concurrency_methods.append("DELETE")
+ response, _ = http.request(uri, "DELETE")
assert response.status == 200
@@ -295,27 +329,35 @@ def test_update_uses_cached_etag_overridden():
# Test that we natively support http://www.w3.org/1999/04/Editing/
http = httplib2.Http(cache=tests.get_cache_path())
with tests.server_request(handler_conditional_update, request_count=2) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
assert not response.fromcache
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
assert response.fromcache
- response, content = http.request(uri, 'PUT', body=b'foo', headers={'if-match': 'fred'})
+ response, content = http.request(
+ uri, "PUT", body=b"foo", headers={"if-match": "fred"}
+ )
assert response.status == 412
@pytest.mark.parametrize(
- 'data', (
+ "data",
+ (
({}, {}),
- ({'cache-control': ' no-cache'},
- {'no-cache': 1}),
- ({'cache-control': ' no-store, max-age = 7200'},
- {'no-store': 1, 'max-age': '7200'}),
- ({'cache-control': ' , '}, {'': 1}), # FIXME
- ({'cache-control': 'Max-age=3600;post-check=1800,pre-check=3600'},
- {'max-age': '3600;post-check=1800', 'pre-check': '3600'}),
- ), ids=lambda data: str(data[0]))
+ ({"cache-control": " no-cache"}, {"no-cache": 1}),
+ (
+ {"cache-control": " no-store, max-age = 7200"},
+ {"no-store": 1, "max-age": "7200"},
+ ),
+ ({"cache-control": " , "}, {"": 1}), # FIXME
+ (
+ {"cache-control": "Max-age=3600;post-check=1800,pre-check=3600"},
+ {"max-age": "3600;post-check=1800", "pre-check": "3600"},
+ ),
+ ),
+ ids=lambda data: str(data[0]),
+)
def test_parse_cache_control(data):
header, expected = data
assert httplib2._parse_cache_control(header) == expected
@@ -323,36 +365,59 @@ def test_parse_cache_control(data):
def test_normalize_headers():
# Test that we normalize headers to lowercase
- h = httplib2._normalize_headers({'Cache-Control': 'no-cache', 'Other': 'Stuff'})
- assert 'cache-control' in h
- assert 'other' in h
- assert h['other'] == 'Stuff'
+ h = httplib2._normalize_headers({"Cache-Control": "no-cache", "Other": "Stuff"})
+ assert "cache-control" in h
+ assert "other" in h
+ assert h["other"] == "Stuff"
@pytest.mark.parametrize(
- 'data', (
- ({'cache-control': 'no-cache'}, {'cache-control': 'max-age=7200'}, 'TRANSPARENT'),
- ({}, {'cache-control': 'max-age=fred, min-fresh=barney'}, 'STALE'),
- ({}, {'date': '{now}', 'expires': '{now+3}'}, 'FRESH'),
- ({}, {'date': '{now}', 'expires': '{now+3}', 'cache-control': 'no-cache'}, 'STALE'),
- ({'cache-control': 'must-revalidate'}, {}, 'STALE'),
- ({}, {'cache-control': 'must-revalidate'}, 'STALE'),
- ({}, {'date': '{now}', 'cache-control': 'max-age=0'}, 'STALE'),
- ({'cache-control': 'only-if-cached'}, {}, 'FRESH'),
- ({}, {'date': '{now}', 'expires': '0'}, 'STALE'),
- ({}, {'data': '{now+3}'}, 'STALE'),
- ({'cache-control': 'max-age=0'}, {'date': '{now}', 'cache-control': 'max-age=2'}, 'STALE'),
- ({'cache-control': 'min-fresh=2'}, {'date': '{now}', 'expires': '{now+2}'}, 'STALE'),
- ({'cache-control': 'min-fresh=2'}, {'date': '{now}', 'expires': '{now+4}'}, 'FRESH'),
- ), ids=lambda data: str(data))
+ "data",
+ (
+ (
+ {"cache-control": "no-cache"},
+ {"cache-control": "max-age=7200"},
+ "TRANSPARENT",
+ ),
+ ({}, {"cache-control": "max-age=fred, min-fresh=barney"}, "STALE"),
+ ({}, {"date": "{now}", "expires": "{now+3}"}, "FRESH"),
+ (
+ {},
+ {"date": "{now}", "expires": "{now+3}", "cache-control": "no-cache"},
+ "STALE",
+ ),
+ ({"cache-control": "must-revalidate"}, {}, "STALE"),
+ ({}, {"cache-control": "must-revalidate"}, "STALE"),
+ ({}, {"date": "{now}", "cache-control": "max-age=0"}, "STALE"),
+ ({"cache-control": "only-if-cached"}, {}, "FRESH"),
+ ({}, {"date": "{now}", "expires": "0"}, "STALE"),
+ ({}, {"data": "{now+3}"}, "STALE"),
+ (
+ {"cache-control": "max-age=0"},
+ {"date": "{now}", "cache-control": "max-age=2"},
+ "STALE",
+ ),
+ (
+ {"cache-control": "min-fresh=2"},
+ {"date": "{now}", "expires": "{now+2}"},
+ "STALE",
+ ),
+ (
+ {"cache-control": "min-fresh=2"},
+ {"date": "{now}", "expires": "{now+4}"},
+ "FRESH",
+ ),
+ ),
+ ids=lambda data: str(data),
+)
def test_entry_disposition(data):
now = time.time()
- nowre = re.compile(r'{now([\+\-]\d+)?}')
+ nowre = re.compile(r"{now([\+\-]\d+)?}")
def render(s):
m = nowre.match(s)
if m:
- offset = int(m.expand(r'\1')) if m.group(1) else 0
+ offset = int(m.expand(r"\1")) if m.group(1) else 0
s = email.utils.formatdate(now + offset, usegmt=True)
return s
@@ -364,24 +429,24 @@ def test_entry_disposition(data):
def test_expiration_model_fresh():
response_headers = {
- 'date': email.utils.formatdate(usegmt=True),
- 'cache-control': 'max-age=2'
+ "date": email.utils.formatdate(usegmt=True),
+ "cache-control": "max-age=2",
}
- assert httplib2._entry_disposition(response_headers, {}) == 'FRESH'
+ assert httplib2._entry_disposition(response_headers, {}) == "FRESH"
# TODO: add current time as _entry_disposition argument to avoid sleep in tests
time.sleep(3)
- assert httplib2._entry_disposition(response_headers, {}) == 'STALE'
+ assert httplib2._entry_disposition(response_headers, {}) == "STALE"
def test_expiration_model_date_and_expires():
now = time.time()
response_headers = {
- 'date': email.utils.formatdate(now, usegmt=True),
- 'expires': email.utils.formatdate(now + 2, usegmt=True),
+ "date": email.utils.formatdate(now, usegmt=True),
+ "expires": email.utils.formatdate(now + 2, usegmt=True),
}
- assert httplib2._entry_disposition(response_headers, {}) == 'FRESH'
+ assert httplib2._entry_disposition(response_headers, {}) == "FRESH"
time.sleep(3)
- assert httplib2._entry_disposition(response_headers, {}) == 'STALE'
+ assert httplib2._entry_disposition(response_headers, {}) == "STALE"
# TODO: Repeat all cache tests with memcache. pytest.mark.parametrize
diff --git a/tests/test_encoding.py b/tests/test_encoding.py
index df991a1..c7eead5 100644
--- a/tests/test_encoding.py
+++ b/tests/test_encoding.py
@@ -6,42 +6,42 @@ def test_gzip_head():
# Test that we don't try to decompress a HEAD response
http = httplib2.Http()
response = tests.http_response_bytes(
- headers={'content-encoding': 'gzip', 'content-length': 42},
+ headers={"content-encoding": "gzip", "content-length": 42}
)
with tests.server_const_bytes(response) as uri:
- response, content = http.request(uri, 'HEAD')
+ response, content = http.request(uri, "HEAD")
assert response.status == 200
- assert int(response['content-length']) != 0
- assert content == b''
+ assert int(response["content-length"]) != 0
+ assert content == b""
def test_gzip_get():
# Test that we support gzip compression
http = httplib2.Http()
response = tests.http_response_bytes(
- headers={'content-encoding': 'gzip'},
- body=tests.gzip_compress(b'properly compressed'),
+ headers={"content-encoding": "gzip"},
+ body=tests.gzip_compress(b"properly compressed"),
)
with tests.server_const_bytes(response) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
- assert 'content-encoding' not in response
- assert '-content-encoding' in response
- assert int(response['content-length']) == len(b'properly compressed')
- assert content == b'properly compressed'
+ assert "content-encoding" not in response
+ assert "-content-encoding" in response
+ assert int(response["content-length"]) == len(b"properly compressed")
+ assert content == b"properly compressed"
def test_gzip_post_response():
http = httplib2.Http()
response = tests.http_response_bytes(
- headers={'content-encoding': 'gzip'},
- body=tests.gzip_compress(b'properly compressed'),
+ headers={"content-encoding": "gzip"},
+ body=tests.gzip_compress(b"properly compressed"),
)
with tests.server_const_bytes(response) as uri:
- response, content = http.request(uri, 'POST', body=b'')
+ response, content = http.request(uri, "POST", body=b"")
assert response.status == 200
- assert 'content-encoding' not in response
- assert '-content-encoding' in response
+ assert "content-encoding" not in response
+ assert "-content-encoding" in response
def test_gzip_malformed_response():
@@ -49,34 +49,33 @@ def test_gzip_malformed_response():
# Test that we raise a good exception when the gzip fails
http.force_exception_to_status_code = False
response = tests.http_response_bytes(
- headers={'content-encoding': 'gzip'},
- body=b'obviously not compressed',
+ headers={"content-encoding": "gzip"}, body=b"obviously not compressed"
)
with tests.server_const_bytes(response, request_count=2) as uri:
with tests.assert_raises(httplib2.FailedToDecompressContent):
- http.request(uri, 'GET')
+ http.request(uri, "GET")
# Re-run the test with out the exceptions
http.force_exception_to_status_code = True
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 500
- assert response.reason.startswith('Content purported')
+ assert response.reason.startswith("Content purported")
def test_deflate_get():
# Test that we support deflate compression
http = httplib2.Http()
response = tests.http_response_bytes(
- headers={'content-encoding': 'deflate'},
- body=tests.deflate_compress(b'properly compressed'),
+ headers={"content-encoding": "deflate"},
+ body=tests.deflate_compress(b"properly compressed"),
)
with tests.server_const_bytes(response) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
- assert 'content-encoding' not in response
- assert int(response['content-length']) == len(b'properly compressed')
- assert content == b'properly compressed'
+ assert "content-encoding" not in response
+ assert int(response["content-length"]) == len(b"properly compressed")
+ assert content == b"properly compressed"
def test_deflate_malformed_response():
@@ -84,16 +83,15 @@ def test_deflate_malformed_response():
http = httplib2.Http()
http.force_exception_to_status_code = False
response = tests.http_response_bytes(
- headers={'content-encoding': 'deflate'},
- body=b'obviously not compressed',
+ headers={"content-encoding": "deflate"}, body=b"obviously not compressed"
)
with tests.server_const_bytes(response, request_count=2) as uri:
with tests.assert_raises(httplib2.FailedToDecompressContent):
- http.request(uri, 'GET')
+ http.request(uri, "GET")
# Re-run the test with out the exceptions
http.force_exception_to_status_code = True
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 500
- assert response.reason.startswith('Content purported')
+ assert response.reason.startswith("Content purported")
diff --git a/tests/test_external.py b/tests/test_external.py
index 20652c9..10b0d8b 100644
--- a/tests/test_external.py
+++ b/tests/test_external.py
@@ -1,7 +1,7 @@
-'''These tests rely on replies from public internet services
+"""These tests rely on replies from public internet services
TODO: reimplement with local stubs
-'''
+"""
import httplib2
import os
import pytest
@@ -13,7 +13,7 @@ import tests
def test_get_301_via_https():
# Google always redirects to http://google.com
http = httplib2.Http()
- response, content = http.request('https://code.google.com/apis/', 'GET')
+ response, content = http.request("https://code.google.com/apis/", "GET")
assert response.status == 200
assert response.previous.status == 301
@@ -21,7 +21,7 @@ def test_get_301_via_https():
def test_get_via_https():
# Test that we can handle HTTPS
http = httplib2.Http()
- response, content = http.request('https://google.com/adsense/', 'GET')
+ response, content = http.request("https://google.com/adsense/", "GET")
assert response.status == 200
@@ -31,7 +31,7 @@ def test_get_via_https_spec_violation_on_location():
# a relative Location: header instead of an
# absolute one.
http = httplib2.Http()
- response, content = http.request('https://google.com/adsense', 'GET')
+ response, content = http.request("https://google.com/adsense", "GET")
assert response.status == 200
assert response.previous is not None
@@ -42,21 +42,21 @@ def test_get_via_https_key_cert():
# correctly to httplib. It would be nice to have
# a real https endpoint to test against.
http = httplib2.Http(timeout=2)
- http.add_certificate('akeyfile', 'acertfile', 'bitworking.org')
+ http.add_certificate("akeyfile", "acertfile", "bitworking.org")
try:
- http.request('https://bitworking.org', 'GET')
+ http.request("https://bitworking.org", "GET")
except AttributeError:
- assert http.connections['https:bitworking.org'].key_file == 'akeyfile'
- assert http.connections['https:bitworking.org'].cert_file == 'acertfile'
+ assert http.connections["https:bitworking.org"].key_file == "akeyfile"
+ assert http.connections["https:bitworking.org"].cert_file == "acertfile"
except IOError:
# Skip on 3.2
pass
try:
- http.request('https://notthere.bitworking.org', 'GET')
+ http.request("https://notthere.bitworking.org", "GET")
except httplib2.ServerNotFoundError:
- assert http.connections['https:notthere.bitworking.org'].key_file is None
- assert http.connections['https:notthere.bitworking.org'].cert_file is None
+ assert http.connections["https:notthere.bitworking.org"].key_file is None
+ assert http.connections["https:notthere.bitworking.org"].cert_file is None
except IOError:
# Skip on 3.2
pass
@@ -65,14 +65,17 @@ def test_get_via_https_key_cert():
def test_ssl_invalid_ca_certs_path():
# Test that we get an ssl.SSLError when specifying a non-existent CA
# certs file.
- http = httplib2.Http(ca_certs='/nosuchfile')
+ http = httplib2.Http(ca_certs="/nosuchfile")
with tests.assert_raises(IOError):
- http.request('https://www.google.com/', 'GET')
+ http.request("https://www.google.com/", "GET")
@pytest.mark.xfail(
sys.version_info <= (3,),
- reason='FIXME: for unknown reason Python 2.7.10 validates www.google.com against dummy CA www.example.com',
+ reason=(
+ "FIXME: for unknown reason Python 2.7.10 validates www.google.com "
+ "against dummy CA www.example.com"
+ ),
)
def test_ssl_wrong_ca():
# Test that we get a SSLHandshakeError if we try to access
@@ -80,16 +83,16 @@ def test_ssl_wrong_ca():
# the CA Google uses (i.e., simulating a cert that's not signed by a
# trusted CA).
other_ca_certs = os.path.join(
- os.path.dirname(os.path.abspath(httplib2.__file__)),
- 'test', 'other_cacerts.txt')
+ os.path.dirname(os.path.abspath(httplib2.__file__)), "test", "other_cacerts.txt"
+ )
assert os.path.exists(other_ca_certs)
http = httplib2.Http(ca_certs=other_ca_certs)
http.follow_redirects = False
with tests.assert_raises(ssl.SSLError):
- http.request('https://www.google.com/', 'GET')
+ http.request("https://www.google.com/", "GET")
def test_sni_hostname_validation():
# TODO: make explicit test server with SNI validation
http = httplib2.Http()
- http.request('https://google.com/', method='GET')
+ http.request("https://google.com/", method="GET")
diff --git a/tests/test_http.py b/tests/test_http.py
index 29d67af..2bee6ca 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -1,22 +1,32 @@
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
import email.utils
+import errno
import httplib2
import mock
import os
import pytest
+from six.moves import http_client, urllib
import socket
import tests
-from six.moves import http_client, urllib
+DUMMY_URL = "http://127.0.0.1:1"
-dummy_url = 'http://127.0.0.1:1'
+
+def _raise_connection_refused_exception(*args, **kwargs):
+ raise socket.error(errno.ECONNREFUSED, "Connection refused.")
def test_connection_type():
http = httplib2.Http()
http.force_exception_to_status_code = False
- response, content = http.request(dummy_url, connection_type=tests.MockHTTPConnection)
- assert response['content-location'] == dummy_url
- assert content == b'the body'
+ response, content = http.request(
+ DUMMY_URL, connection_type=tests.MockHTTPConnection
+ )
+ assert response["content-location"] == DUMMY_URL
+ assert content == b"the body"
def test_bad_status_line_retry():
@@ -25,7 +35,9 @@ def test_bad_status_line_retry():
httplib2.RETRIES = 1
http.force_exception_to_status_code = False
try:
- response, content = http.request(dummy_url, connection_type=tests.MockHTTPBadStatusConnection)
+ response, content = http.request(
+ DUMMY_URL, connection_type=tests.MockHTTPBadStatusConnection
+ )
except http_client.BadStatusLine:
assert tests.MockHTTPBadStatusConnection.num_calls == 2
httplib2.RETRIES = old_retries
@@ -35,39 +47,60 @@ def test_unknown_server():
http = httplib2.Http()
http.force_exception_to_status_code = False
with tests.assert_raises(httplib2.ServerNotFoundError):
- with mock.patch('socket.socket.connect', side_effect=socket.gaierror):
+ with mock.patch("socket.socket.connect", side_effect=socket.gaierror):
http.request("http://no-such-hostname./")
# Now test with exceptions turned off
http.force_exception_to_status_code = True
response, content = http.request("http://no-such-hostname./")
- assert response['content-type'] == 'text/plain'
+ assert response["content-type"] == "text/plain"
assert content.startswith(b"Unable to find")
assert response.status == 400
-def test_connection_refused():
+@pytest.mark.skipif(
+ os.environ.get("TRAVIS_PYTHON_VERSION") in ("2.7", "pypy"),
+ reason="Fails on Travis py27/pypy, works elsewhere. "
+ "See https://travis-ci.org/httplib2/httplib2/jobs/408769880.",
+)
+@mock.patch("socket.socket.connect", spec=True)
+def test_connection_refused_raises_exception(mock_socket_connect):
+ mock_socket_connect.side_effect = _raise_connection_refused_exception
http = httplib2.Http()
http.force_exception_to_status_code = False
with tests.assert_raises(socket.error):
- http.request(dummy_url)
+ http.request(DUMMY_URL)
- # Now test with exceptions turned off
+
+@pytest.mark.skipif(
+ os.environ.get("TRAVIS_PYTHON_VERSION") in ("2.7", "pypy"),
+ reason="Fails on Travis py27/pypy, works elsewhere. "
+ "See https://travis-ci.org/httplib2/httplib2/jobs/408769880.",
+)
+@mock.patch("socket.socket.connect", spec=True)
+def test_connection_refused_returns_response(mock_socket_connect):
+ mock_socket_connect.side_effect = _raise_connection_refused_exception
+ http = httplib2.Http()
http.force_exception_to_status_code = True
- response, content = http.request(dummy_url)
- assert response['content-type'] == 'text/plain'
- assert (b"Connection refused" in content or b"actively refused" in content)
+ response, content = http.request(DUMMY_URL)
+ content = content.lower()
+ assert response["content-type"] == "text/plain"
+ assert (
+ b"connection refused" in content
+ or b"actively refused" in content
+ or b"socket is not connected" in content
+ )
assert response.status == 400
def test_get_iri():
http = httplib2.Http()
- query = u'?a=\N{CYRILLIC CAPITAL LETTER DJE}'
+ query = u"?a=\N{CYRILLIC CAPITAL LETTER DJE}"
with tests.server_reflect() as uri:
- response, content = http.request(uri + query, 'GET')
+ response, content = http.request(uri + query, "GET")
assert response.status == 200
reflected = tests.HttpRequest.from_bytes(content)
- assert reflected.uri == '/?a=%D0%82'
+ assert reflected.uri == "/?a=%D0%82"
def test_get_is_default_method():
@@ -77,13 +110,13 @@ def test_get_is_default_method():
response, content = http.request(uri)
assert response.status == 200
reflected = tests.HttpRequest.from_bytes(content)
- assert reflected.method == 'GET'
+ assert reflected.method == "GET"
def test_different_methods():
# Test that all methods can be used
http = httplib2.Http()
- methods = ['GET', 'PUT', 'DELETE', 'POST', 'unknown']
+ methods = ["GET", "PUT", "DELETE", "POST", "unknown"]
with tests.server_reflect(request_count=len(methods)) as uri:
for method in methods:
response, content = http.request(uri, method, body=b" ")
@@ -96,9 +129,9 @@ def test_head_read():
# Test that we don't try to read the response of a HEAD request
# since httplib blocks response.read() for HEAD requests.
http = httplib2.Http()
- respond_with = b'HTTP/1.0 200 OK\r\ncontent-length: 14\r\n\r\nnon-empty-body'
+ respond_with = b"HTTP/1.0 200 OK\r\ncontent-length: " b"14\r\n\r\nnon-empty-body"
with tests.server_const_bytes(respond_with) as uri:
- response, content = http.request(uri, 'HEAD')
+ response, content = http.request(uri, "HEAD")
assert response.status == 200
assert content == b""
@@ -107,7 +140,7 @@ def test_get_no_cache():
# Test that can do a GET w/o the cache turned on.
http = httplib2.Http()
with tests.server_const_http() as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
assert response.previous is None
@@ -116,32 +149,34 @@ def test_user_agent():
# Test that we provide a default user-agent
http = httplib2.Http()
with tests.server_reflect() as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
reflected = tests.HttpRequest.from_bytes(content)
- assert reflected.headers.get('user-agent', '').startswith('Python-httplib2/')
+ assert reflected.headers.get("user-agent", "").startswith("Python-httplib2/")
def test_user_agent_non_default():
# Test that the default user-agent can be over-ridden
http = httplib2.Http()
with tests.server_reflect() as uri:
- response, content = http.request(uri, 'GET', headers={'User-Agent': 'fred/1.0'})
+ response, content = http.request(uri, "GET", headers={"User-Agent": "fred/1.0"})
assert response.status == 200
reflected = tests.HttpRequest.from_bytes(content)
- assert reflected.headers.get('user-agent') == 'fred/1.0'
+ assert reflected.headers.get("user-agent") == "fred/1.0"
def test_get_300_with_location():
# Test the we automatically follow 300 redirects if a Location: header is provided
http = httplib2.Http()
- final_content = b'This is the final destination.\n'
+ final_content = b"This is the final destination.\n"
routes = {
- '/final': tests.http_response_bytes(body=final_content),
- '': tests.http_response_bytes(status='300 Multiple Choices', headers={'location': '/final'}),
+ "/final": tests.http_response_bytes(body=final_content),
+ "": tests.http_response_bytes(
+ status="300 Multiple Choices", headers={"location": "/final"}
+ ),
}
with tests.server_route(routes, request_count=2) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
assert content == final_content
assert response.previous.status == 300
@@ -149,7 +184,7 @@ def test_get_300_with_location():
# Confirm that the intermediate 300 is not cached
with tests.server_route(routes, request_count=2) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
assert content == final_content
assert response.previous.status == 300
@@ -161,11 +196,12 @@ def test_get_300_with_location_noredirect():
http = httplib2.Http()
http.follow_redirects = False
response = tests.http_response_bytes(
- status='300 Multiple Choices',
- headers={'location': '/final'},
- body=b'redirect body')
+ status="300 Multiple Choices",
+ headers={"location": "/final"},
+ body=b"redirect body",
+ )
with tests.server_const_bytes(response) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 300
@@ -173,80 +209,92 @@ def test_get_300_without_location():
# Not giving a Location: header in a 300 response is acceptable
# In which case we just return the 300 response
http = httplib2.Http()
- with tests.server_const_http(status='300 Multiple Choices', body=b'redirect body') as uri:
- response, content = http.request(uri, 'GET')
+ with tests.server_const_http(
+ status="300 Multiple Choices", body=b"redirect body"
+ ) as uri:
+ response, content = http.request(uri, "GET")
assert response.status == 300
assert response.previous is None
- assert content == b'redirect body'
+ assert content == b"redirect body"
def test_get_301():
# Test that we automatically follow 301 redirects
# and that we cache the 301 response
http = httplib2.Http(cache=tests.get_cache_path())
- destination = ''
+ destination = ""
routes = {
- '/final': tests.http_response_bytes(body=b'This is the final destination.\n'),
- '': tests.http_response_bytes(
- status='301 Now where did I leave that URL', headers={'location': '/final'}, body=b'redirect body'),
+ "/final": tests.http_response_bytes(body=b"This is the final destination.\n"),
+ "": tests.http_response_bytes(
+ status="301 Now where did I leave that URL",
+ headers={"location": "/final"},
+ body=b"redirect body",
+ ),
}
with tests.server_route(routes, request_count=3) as uri:
- destination = urllib.parse.urljoin(uri, '/final')
- response1, content1 = http.request(uri, 'GET')
- response2, content2 = http.request(uri, 'GET')
+ destination = urllib.parse.urljoin(uri, "/final")
+ response1, content1 = http.request(uri, "GET")
+ response2, content2 = http.request(uri, "GET")
assert response1.status == 200
- assert 'content-location' in response2
- assert response1['content-location'] == destination
- assert content1 == b'This is the final destination.\n'
+ assert "content-location" in response2
+ assert response1["content-location"] == destination
+ assert content1 == b"This is the final destination.\n"
assert response1.previous.status == 301
assert not response1.previous.fromcache
assert response2.status == 200
- assert response2['content-location'] == destination
- assert content2 == b'This is the final destination.\n'
+ assert response2["content-location"] == destination
+ assert content2 == b"This is the final destination.\n"
assert response2.previous.status == 301
assert response2.previous.fromcache
@pytest.mark.skip(
- not os.environ.get('httplib2_test_still_run_skipped') and
- os.environ.get('TRAVIS_PYTHON_VERSION') in ('2.7', 'pypy'),
- reason='FIXME: timeout on Travis py27 and pypy, works elsewhere',
+ not os.environ.get("httplib2_test_still_run_skipped")
+ and os.environ.get("TRAVIS_PYTHON_VERSION") in ("2.7", "pypy"),
+ reason="FIXME: timeout on Travis py27 and pypy, works elsewhere",
)
def test_head_301():
# Test that we automatically follow 301 redirects
http = httplib2.Http()
- destination = ''
+ destination = ""
routes = {
- '/final': tests.http_response_bytes(body=b'This is the final destination.\n'),
- '': tests.http_response_bytes(
- status='301 Now where did I leave that URL', headers={'location': '/final'}, body=b'redirect body'),
+ "/final": tests.http_response_bytes(body=b"This is the final destination.\n"),
+ "": tests.http_response_bytes(
+ status="301 Now where did I leave that URL",
+ headers={"location": "/final"},
+ body=b"redirect body",
+ ),
}
with tests.server_route(routes, request_count=2) as uri:
- destination = urllib.parse.urljoin(uri, '/final')
- response, content = http.request(uri, 'HEAD')
+ destination = urllib.parse.urljoin(uri, "/final")
+ response, content = http.request(uri, "HEAD")
assert response.status == 200
- assert response['content-location'] == destination
+ assert response["content-location"] == destination
assert response.previous.status == 301
assert not response.previous.fromcache
-@pytest.mark.xfail(reason='FIXME: 301 cache works only with follow_redirects, should work regardless')
+@pytest.mark.xfail(
+ reason=(
+ "FIXME: 301 cache works only with follow_redirects, should work " "regardless"
+ )
+)
def test_get_301_no_redirect():
# Test that we cache the 301 response
http = httplib2.Http(cache=tests.get_cache_path(), timeout=0.5)
http.follow_redirects = False
response = tests.http_response_bytes(
- status='301 Now where did I leave that URL',
- headers={'location': '/final', 'cache-control': 'max-age=300'},
- body=b'redirect body',
+ status="301 Now where did I leave that URL",
+ headers={"location": "/final", "cache-control": "max-age=300"},
+ body=b"redirect body",
add_date=True,
)
with tests.server_const_bytes(response) as uri:
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
assert response.status == 301
assert not response.fromcache
- response, _ = http.request(uri, 'GET')
+ response, _ = http.request(uri, "GET")
assert response.status == 301
assert response.fromcache
@@ -255,39 +303,41 @@ def test_get_302():
# Test that we automatically follow 302 redirects
# and that we DO NOT cache the 302 response
http = httplib2.Http(cache=tests.get_cache_path())
- second_url, final_url = '', ''
+ second_url, final_url = "", ""
routes = {
- '/final': tests.http_response_bytes(body=b'This is the final destination.\n'),
- '/second': tests.http_response_bytes(
- status='302 Found', headers={'location': '/final'}, body=b'second redirect'),
- '': tests.http_response_bytes(
- status='302 Found', headers={'location': '/second'}, body=b'redirect body'),
+ "/final": tests.http_response_bytes(body=b"This is the final destination.\n"),
+ "/second": tests.http_response_bytes(
+ status="302 Found", headers={"location": "/final"}, body=b"second redirect"
+ ),
+ "": tests.http_response_bytes(
+ status="302 Found", headers={"location": "/second"}, body=b"redirect body"
+ ),
}
with tests.server_route(routes, request_count=7) as uri:
- second_url = urllib.parse.urljoin(uri, '/second')
- final_url = urllib.parse.urljoin(uri, '/final')
- response1, content1 = http.request(second_url, 'GET')
- response2, content2 = http.request(second_url, 'GET')
- response3, content3 = http.request(uri, 'GET')
+ second_url = urllib.parse.urljoin(uri, "/second")
+ final_url = urllib.parse.urljoin(uri, "/final")
+ response1, content1 = http.request(second_url, "GET")
+ response2, content2 = http.request(second_url, "GET")
+ response3, content3 = http.request(uri, "GET")
assert response1.status == 200
- assert response1['content-location'] == final_url
- assert content1 == b'This is the final destination.\n'
+ assert response1["content-location"] == final_url
+ assert content1 == b"This is the final destination.\n"
assert response1.previous.status == 302
assert not response1.previous.fromcache
assert response2.status == 200
# FIXME:
# assert response2.fromcache
- assert response2['content-location'] == final_url
- assert content2 == b'This is the final destination.\n'
+ assert response2["content-location"] == final_url
+ assert content2 == b"This is the final destination.\n"
assert response2.previous.status == 302
assert not response2.previous.fromcache
- assert response2.previous['content-location'] == second_url
+ assert response2.previous["content-location"] == second_url
assert response3.status == 200
# FIXME:
# assert response3.fromcache
- assert content3 == b'This is the final destination.\n'
+ assert content3 == b"This is the final destination.\n"
assert response3.previous.status == 302
assert not response3.previous.fromcache
@@ -299,28 +349,30 @@ def test_get_302_redirection_limit():
http = httplib2.Http()
http.force_exception_to_status_code = False
routes = {
- '/second': tests.http_response_bytes(
- status='302 Found', headers={'location': '/final'}, body=b'second redirect'),
- '': tests.http_response_bytes(
- status='302 Found', headers={'location': '/second'}, body=b'redirect body'),
+ "/second": tests.http_response_bytes(
+ status="302 Found", headers={"location": "/final"}, body=b"second redirect"
+ ),
+ "": tests.http_response_bytes(
+ status="302 Found", headers={"location": "/second"}, body=b"redirect body"
+ ),
}
with tests.server_route(routes, request_count=4) as uri:
try:
- http.request(uri, 'GET', redirections=1)
- assert False, 'This should not happen'
+ http.request(uri, "GET", redirections=1)
+ assert False, "This should not happen"
except httplib2.RedirectLimit:
pass
except Exception:
- assert False, 'Threw wrong kind of exception '
+ assert False, "Threw wrong kind of exception "
# Re-run the test with out the exceptions
http.force_exception_to_status_code = True
- response, content = http.request(uri, 'GET', redirections=1)
+ response, content = http.request(uri, "GET", redirections=1)
assert response.status == 500
- assert response.reason.startswith('Redirected more')
- assert response['status'] == '302'
- assert content == b'second redirect'
+ assert response.reason.startswith("Redirected more")
+ assert response["status"] == "302"
+ assert content == b"second redirect"
assert response.previous is not None
@@ -329,61 +381,63 @@ def test_get_302_no_location():
# a 302 with no Location: header.
http = httplib2.Http()
http.force_exception_to_status_code = False
- with tests.server_const_http(status='302 Found', request_count=2) as uri:
+ with tests.server_const_http(status="302 Found", request_count=2) as uri:
try:
- http.request(uri, 'GET')
- assert False, 'Should never reach here'
+ http.request(uri, "GET")
+ assert False, "Should never reach here"
except httplib2.RedirectMissingLocation:
pass
except Exception:
- assert False, 'Threw wrong kind of exception '
+ assert False, "Threw wrong kind of exception "
# Re-run the test with out the exceptions
http.force_exception_to_status_code = True
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 500
- assert response.reason.startswith('Redirected but')
- assert '302' == response['status']
- assert content == b''
+ assert response.reason.startswith("Redirected but")
+ assert "302" == response["status"]
+ assert content == b""
@pytest.mark.skip(
- not os.environ.get('httplib2_test_still_run_skipped') and
- os.environ.get('TRAVIS_PYTHON_VERSION') in ('2.7', 'pypy'),
- reason='FIXME: timeout on Travis py27 and pypy, works elsewhere',
+ not os.environ.get("httplib2_test_still_run_skipped")
+ and os.environ.get("TRAVIS_PYTHON_VERSION") in ("2.7", "pypy"),
+ reason="FIXME: timeout on Travis py27 and pypy, works elsewhere",
)
def test_303():
# Do a follow-up GET on a Location: header
# returned from a POST that gave a 303.
http = httplib2.Http()
routes = {
- '/final': tests.make_http_reflect(),
- '': tests.make_http_reflect(status='303 See Other', headers={'location': '/final'}),
+ "/final": tests.make_http_reflect(),
+ "": tests.make_http_reflect(
+ status="303 See Other", headers={"location": "/final"}
+ ),
}
with tests.server_route(routes, request_count=2) as uri:
- response, content = http.request(uri, 'POST', " ")
+ response, content = http.request(uri, "POST", " ")
assert response.status == 200
reflected = tests.HttpRequest.from_bytes(content)
- assert reflected.uri == '/final'
+ assert reflected.uri == "/final"
assert response.previous.status == 303
# Skip follow-up GET
http = httplib2.Http()
http.follow_redirects = False
with tests.server_route(routes, request_count=1) as uri:
- response, content = http.request(uri, 'POST', " ")
+ response, content = http.request(uri, "POST", " ")
assert response.status == 303
# All methods can be used
http = httplib2.Http()
- cases = 'DELETE GET HEAD POST PUT EVEN_NEW_ONES'.split(' ')
+ cases = "DELETE GET HEAD POST PUT EVEN_NEW_ONES".split(" ")
with tests.server_route(routes, request_count=len(cases) * 2) as uri:
for method in cases:
- response, content = http.request(uri, method, body=b'q q')
+ response, content = http.request(uri, method, body=b"q q")
assert response.status == 200
reflected = tests.HttpRequest.from_bytes(content)
- assert reflected.method == 'GET'
+ assert reflected.method == "GET"
def test_etag_used():
@@ -393,40 +447,45 @@ def test_etag_used():
response_kwargs = dict(
add_date=True,
add_etag=True,
- body=b'something',
- headers={
- 'cache-control': 'public,max-age=300',
- },
+ body=b"something",
+ headers={"cache-control": "public,max-age=300"},
)
def handler(request):
- if request.headers.get('range'):
+ if request.headers.get("range"):
return tests.http_response_bytes(status=206, **response_kwargs)
return tests.http_response_bytes(**response_kwargs)
with tests.server_request(handler, request_count=2) as uri:
- response, _ = http.request(uri, 'GET', headers={'accept-encoding': 'identity'})
- assert response['etag'] == '"437b930db84b8079c2dd804a71936b5f"'
+ response, _ = http.request(uri, "GET", headers={"accept-encoding": "identity"})
+ assert response["etag"] == '"437b930db84b8079c2dd804a71936b5f"'
- http.request(uri, 'GET', headers={'accept-encoding': 'identity'})
+ http.request(uri, "GET", headers={"accept-encoding": "identity"})
response, _ = http.request(
- uri, 'GET',
- headers={'accept-encoding': 'identity', 'cache-control': 'must-revalidate'},
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "must-revalidate"},
)
assert response.status == 200
assert response.fromcache
# TODO: API to read cache item, at least internal to tests
- cache_file_name = os.path.join(cache_path, httplib2.safename(httplib2.urlnorm(uri)[-1]))
- with open(cache_file_name, 'r') as f:
+ cache_file_name = os.path.join(
+ cache_path, httplib2.safename(httplib2.urlnorm(uri)[-1])
+ )
+ with open(cache_file_name, "r") as f:
status_line = f.readline()
assert status_line.startswith("status:")
- response, content = http.request(uri, 'HEAD', headers={'accept-encoding': 'identity'})
+ response, content = http.request(
+ uri, "HEAD", headers={"accept-encoding": "identity"}
+ )
assert response.status == 200
assert response.fromcache
- response, content = http.request(uri, 'GET', headers={'accept-encoding': 'identity', 'range': 'bytes=0-0'})
+ response, content = http.request(
+ uri, "GET", headers={"accept-encoding": "identity", "range": "bytes=0-0"}
+ )
assert response.status == 206
assert not response.fromcache
@@ -434,63 +493,67 @@ def test_etag_used():
def test_etag_ignore():
# Test that we can forcibly ignore ETags
http = httplib2.Http(cache=tests.get_cache_path())
- response_kwargs = dict(
- add_date=True,
- add_etag=True,
- )
+ response_kwargs = dict(add_date=True, add_etag=True)
with tests.server_reflect(request_count=3, **response_kwargs) as uri:
- response, content = http.request(uri, 'GET', headers={'accept-encoding': 'identity'})
+ response, content = http.request(
+ uri, "GET", headers={"accept-encoding": "identity"}
+ )
assert response.status == 200
- assert response['etag'] != ""
+ assert response["etag"] != ""
response, content = http.request(
- uri, 'GET',
- headers={'accept-encoding': 'identity', 'cache-control': 'max-age=0'},
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "max-age=0"},
)
reflected = tests.HttpRequest.from_bytes(content)
- assert reflected.headers.get('if-none-match')
+ assert reflected.headers.get("if-none-match")
http.ignore_etag = True
response, content = http.request(
- uri, 'GET',
- headers={'accept-encoding': 'identity', 'cache-control': 'max-age=0'},
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "max-age=0"},
)
assert not response.fromcache
reflected = tests.HttpRequest.from_bytes(content)
- assert not reflected.headers.get('if-none-match')
+ assert not reflected.headers.get("if-none-match")
def test_etag_override():
# Test that we can forcibly ignore ETags
http = httplib2.Http(cache=tests.get_cache_path())
- response_kwargs = dict(
- add_date=True,
- add_etag=True,
- )
+ response_kwargs = dict(add_date=True, add_etag=True)
with tests.server_reflect(request_count=3, **response_kwargs) as uri:
- response, _ = http.request(uri, 'GET', headers={'accept-encoding': 'identity'})
+ response, _ = http.request(uri, "GET", headers={"accept-encoding": "identity"})
assert response.status == 200
- assert response['etag'] != ''
+ assert response["etag"] != ""
response, content = http.request(
- uri, 'GET',
- headers={'accept-encoding': 'identity', 'cache-control': 'max-age=0'},
+ uri,
+ "GET",
+ headers={"accept-encoding": "identity", "cache-control": "max-age=0"},
)
assert response.status == 200
reflected = tests.HttpRequest.from_bytes(content)
- assert reflected.headers.get('if-none-match')
- assert reflected.headers.get('if-none-match') != 'fred'
+ assert reflected.headers.get("if-none-match")
+ assert reflected.headers.get("if-none-match") != "fred"
response, content = http.request(
- uri, 'GET',
- headers={'accept-encoding': 'identity', 'cache-control': 'max-age=0', 'if-none-match': 'fred'},
+ uri,
+ "GET",
+ headers={
+ "accept-encoding": "identity",
+ "cache-control": "max-age=0",
+ "if-none-match": "fred",
+ },
)
assert response.status == 200
reflected = tests.HttpRequest.from_bytes(content)
- assert reflected.headers.get('if-none-match') == 'fred'
+ assert reflected.headers.get("if-none-match") == "fred"
-@pytest.mark.skip(reason='was commented in legacy code')
+@pytest.mark.skip(reason="was commented in legacy code")
def test_get_304_end_to_end():
pass
# Test that end to end headers get overwritten in the cache
@@ -517,23 +580,18 @@ def test_get_304_last_modified():
def handler(read):
read()
yield tests.http_response_bytes(
- status=200,
- body=b'something',
- headers={
- 'date': date,
- 'last-modified': date,
- },
+ status=200, body=b"something", headers={"date": date, "last-modified": date}
)
request2 = read()
- assert request2.headers['if-modified-since'] == date
+ assert request2.headers["if-modified-since"] == date
yield tests.http_response_bytes(status=304)
with tests.server_yield(handler, request_count=2) as uri:
- response, content = http.request(uri, 'GET')
- assert response.get('last-modified') == date
+ response, content = http.request(uri, "GET")
+ assert response.get("last-modified") == date
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
assert response.fromcache
@@ -542,51 +600,48 @@ def test_get_307():
# Test that we do follow 307 redirects but
# do not cache the 307
http = httplib2.Http(cache=tests.get_cache_path(), timeout=1)
- r307 = tests.http_response_bytes(
- status=307,
- headers={'location': '/final'},
- )
+ r307 = tests.http_response_bytes(status=307, headers={"location": "/final"})
r200 = tests.http_response_bytes(
status=200,
add_date=True,
- body=b'final content\n',
- headers={'cache-control': 'max-age=300'},
+ body=b"final content\n",
+ headers={"cache-control": "max-age=300"},
)
with tests.server_list_http([r307, r200, r307]) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.previous.status == 307
assert not response.previous.fromcache
assert response.status == 200
assert not response.fromcache
- assert content == b'final content\n'
+ assert content == b"final content\n"
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.previous.status == 307
assert not response.previous.fromcache
assert response.status == 200
assert response.fromcache
- assert content == b'final content\n'
+ assert content == b"final content\n"
def test_get_410():
# Test that we pass 410's through
http = httplib2.Http()
with tests.server_const_http(status=410) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 410
def test_get_duplicate_headers():
# Test that duplicate headers get concatenated via ','
http = httplib2.Http()
- response = b'''HTTP/1.0 200 OK\r\n\
+ response = b"""HTTP/1.0 200 OK\r\n\
Link: link1\r\n\
Content-Length: 7\r\n\
Link: link2\r\n\r\n\
-content'''
+content"""
with tests.server_const_bytes(response) as uri:
- response, content = http.request(uri, 'GET')
+ response, content = http.request(uri, "GET")
assert response.status == 200
assert content == b"content"
- assert response['link'], 'link1, link2'
+ assert response["link"], "link1, link2"
diff --git a/tests/test_other.py b/tests/test_other.py
index 61b0d46..f714c61 100644
--- a/tests/test_other.py
+++ b/tests/test_other.py
@@ -12,24 +12,30 @@ from six.moves import urllib
@pytest.mark.skipif(
sys.version_info <= (3,),
- reason='TODO: httplib2._convert_byte_str was defined only in python3 code version',
+ reason=(
+ "TODO: httplib2._convert_byte_str was defined only in python3 code " "version"
+ ),
)
def test_convert_byte_str():
with tests.assert_raises(TypeError):
httplib2._convert_byte_str(4)
- assert httplib2._convert_byte_str(b'Hello') == 'Hello'
- assert httplib2._convert_byte_str('World') == 'World'
+ assert httplib2._convert_byte_str(b"Hello") == "Hello"
+ assert httplib2._convert_byte_str("World") == "World"
def test_reflect():
http = httplib2.Http()
with tests.server_reflect() as uri:
- response, content = http.request(uri + '?query', 'METHOD')
+ response, content = http.request(uri + "?query", "METHOD")
assert response.status == 200
host = urllib.parse.urlparse(uri).netloc
- assert content.startswith('''\
+ assert content.startswith(
+ """\
METHOD /?query HTTP/1.1\r\n\
-Host: {host}\r\n'''.format(host=host).encode()), content
+Host: {host}\r\n""".format(
+ host=host
+ ).encode()
+ ), content
def test_pickle_http():
@@ -41,29 +47,32 @@ def test_pickle_http():
assert new_http.certificates.credentials == http.certificates.credentials
assert new_http.cache.cache == http.cache.cache
for key in new_http.__dict__:
- if key not in ('cache', 'certificates', 'credentials'):
+ if key not in ("cache", "certificates", "credentials"):
assert getattr(new_http, key) == getattr(http, key)
def test_pickle_http_with_connection():
http = httplib2.Http()
- http.request('http://random-domain:81/', connection_type=tests.MockHTTPConnection)
+ http.request("http://random-domain:81/", connection_type=tests.MockHTTPConnection)
new_http = pickle.loads(pickle.dumps(http))
- assert tuple(http.connections) == ('http:random-domain:81',)
+ assert tuple(http.connections) == ("http:random-domain:81",)
assert new_http.connections == {}
def test_pickle_custom_request_http():
http = httplib2.Http()
http.request = lambda: None
- http.request.dummy_attr = 'dummy_value'
+ http.request.dummy_attr = "dummy_value"
new_http = pickle.loads(pickle.dumps(http))
- assert getattr(new_http.request, 'dummy_attr', None) is None
+ assert getattr(new_http.request, "dummy_attr", None) is None
@pytest.mark.xfail(
sys.version_info >= (3,),
- reason='FIXME: for unknown reason global timeout test fails in Python3 with response 200',
+ reason=(
+ "FIXME: for unknown reason global timeout test fails in Python3 "
+ "with response 200"
+ ),
)
def test_timeout_global():
def handler(request):
@@ -73,7 +82,7 @@ def test_timeout_global():
try:
socket.setdefaulttimeout(0.1)
except Exception:
- pytest.skip('cannot set global socket timeout')
+ pytest.skip("cannot set global socket timeout")
try:
http = httplib2.Http()
http.force_exception_to_status_code = True
@@ -100,7 +109,7 @@ def test_timeout_individual():
def test_timeout_https():
- c = httplib2.HTTPSConnectionWithTimeout('localhost', 80, timeout=47)
+ c = httplib2.HTTPSConnectionWithTimeout("localhost", 80, timeout=47)
assert 47 == c.timeout
@@ -114,32 +123,36 @@ def test_connection_close():
def handler(request):
g.append(request.number)
- return tests.http_response_bytes(proto='HTTP/1.1')
+ return tests.http_response_bytes(proto="HTTP/1.1")
with tests.server_request(handler, request_count=3) as uri:
- http.request(uri, 'GET') # conn1 req1
+ http.request(uri, "GET") # conn1 req1
for c in http.connections.values():
assert c.sock is not None
- http.request(uri, 'GET', headers={'connection': 'close'})
+ http.request(uri, "GET", headers={"connection": "close"})
time.sleep(0.7)
- http.request(uri, 'GET') # conn2 req1
+ http.request(uri, "GET") # conn2 req1
assert g == [1, 2, 1]
def test_get_end2end_headers():
# one end to end header
- response = {'content-type': 'application/atom+xml', 'te': 'deflate'}
+ response = {"content-type": "application/atom+xml", "te": "deflate"}
end2end = httplib2._get_end2end_headers(response)
- assert 'content-type' in end2end
- assert 'te' not in end2end
- assert 'connection' not in end2end
+ assert "content-type" in end2end
+ assert "te" not in end2end
+ assert "connection" not in end2end
# one end to end header that gets eliminated
- response = {'connection': 'content-type', 'content-type': 'application/atom+xml', 'te': 'deflate'}
+ response = {
+ "connection": "content-type",
+ "content-type": "application/atom+xml",
+ "te": "deflate",
+ }
end2end = httplib2._get_end2end_headers(response)
- assert 'content-type' not in end2end
- assert 'te' not in end2end
- assert 'connection' not in end2end
+ assert "content-type" not in end2end
+ assert "te" not in end2end
+ assert "connection" not in end2end
# Degenerate case of no headers
response = {}
@@ -147,34 +160,49 @@ def test_get_end2end_headers():
assert len(end2end) == 0
# Degenerate case of connection referrring to a header not passed in
- response = {'connection': 'content-type'}
+ response = {"connection": "content-type"}
end2end = httplib2._get_end2end_headers(response)
assert len(end2end) == 0
@pytest.mark.xfail(
- os.environ.get('TRAVIS_PYTHON_VERSION') in ('2.7', 'pypy'),
- reason='FIXME: fail on Travis py27 and pypy, works elsewhere',
+ os.environ.get("TRAVIS_PYTHON_VERSION") in ("2.7", "pypy"),
+ reason="FIXME: fail on Travis py27 and pypy, works elsewhere",
)
-@pytest.mark.parametrize('scheme', ('http', 'https'))
+@pytest.mark.parametrize("scheme", ("http", "https"))
def test_ipv6(scheme):
# Even if IPv6 isn't installed on a machine it should just raise socket.error
- uri = '{scheme}://[::1]:1/'.format(scheme=scheme)
+ uri = "{scheme}://[::1]:1/".format(scheme=scheme)
try:
httplib2.Http(timeout=0.1).request(uri)
except socket.gaierror:
- assert False, 'should get the address family right for IPv6'
+ assert False, "should get the address family right for IPv6"
except socket.error:
pass
-@pytest.mark.parametrize('conn_type', (httplib2.HTTPConnectionWithTimeout, httplib2.HTTPSConnectionWithTimeout))
+@pytest.mark.parametrize(
+ "conn_type",
+ (httplib2.HTTPConnectionWithTimeout, httplib2.HTTPSConnectionWithTimeout),
+)
def test_connection_proxy_info_attribute_error(conn_type):
# HTTPConnectionWithTimeout did not initialize its .proxy_info attribute
# https://github.com/httplib2/httplib2/pull/97
# Thanks to Joseph Ryan https://github.com/germanjoey
- conn = conn_type('no-such-hostname.', 80)
+ conn = conn_type("no-such-hostname.", 80)
# TODO: replace mock with dummy local server
with tests.assert_raises(socket.gaierror):
- with mock.patch('socket.socket.connect', side_effect=socket.gaierror):
- conn.request('GET', '/')
+ with mock.patch("socket.socket.connect", side_effect=socket.gaierror):
+ conn.request("GET", "/")
+
+
+def test_http_443_forced_https():
+ http = httplib2.Http()
+ http.force_exception_to_status_code = True
+ uri = "http://localhost:443/"
+ # sorry, using internal structure of Http to check chosen scheme
+ with mock.patch("httplib2.Http._request") as m:
+ http.request(uri)
+ assert len(m.call_args) > 0, "expected Http._request() call"
+ conn = m.call_args[0][0]
+ assert isinstance(conn, httplib2.HTTPConnectionWithTimeout)
diff --git a/tests/test_proxy.py b/tests/test_proxy.py
index 4007868..56a7d99 100644
--- a/tests/test_proxy.py
+++ b/tests/test_proxy.py
@@ -1,41 +1,53 @@
-'''Warning: these tests modify os.environ global state.
-Each test must be run in separate process.
-Must use pytest --forked or similar technique.
-'''
+"""Proxy tests.
+
+Tests do modify `os.environ` global states. Each test must be run in separate
+process. Must use `pytest --forked` or similar technique.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
import httplib2
import mock
import os
+import pytest
import socket
import tests
+from six.moves import urllib
+
+
+def _raise_name_not_known_error(*args, **kwargs):
+ raise socket.gaierror(socket.EAI_NONAME, "Name or service not known")
def test_from_url():
- pi = httplib2.proxy_info_from_url('http://myproxy.example.com')
- assert pi.proxy_host == 'myproxy.example.com'
+ pi = httplib2.proxy_info_from_url("http://myproxy.example.com")
+ assert pi.proxy_host == "myproxy.example.com"
assert pi.proxy_port == 80
assert pi.proxy_user is None
def test_from_url_ident():
- pi = httplib2.proxy_info_from_url('http://zoidberg:fish@someproxy:99')
- assert pi.proxy_host == 'someproxy'
+ pi = httplib2.proxy_info_from_url("http://zoidberg:fish@someproxy:99")
+ assert pi.proxy_host == "someproxy"
assert pi.proxy_port == 99
- assert pi.proxy_user == 'zoidberg'
- assert pi.proxy_pass == 'fish'
+ assert pi.proxy_user == "zoidberg"
+ assert pi.proxy_pass == "fish"
def test_from_env():
- os.environ['http_proxy'] = 'http://myproxy.example.com:8080'
+ os.environ["http_proxy"] = "http://myproxy.example.com:8080"
pi = httplib2.proxy_info_from_environment()
- assert pi.proxy_host == 'myproxy.example.com'
+ assert pi.proxy_host == "myproxy.example.com"
assert pi.proxy_port == 8080
def test_from_env_https():
- os.environ['http_proxy'] = 'http://myproxy.example.com:80'
- os.environ['https_proxy'] = 'http://myproxy.example.com:81'
- pi = httplib2.proxy_info_from_environment('https')
- assert pi.proxy_host == 'myproxy.example.com'
+ os.environ["http_proxy"] = "http://myproxy.example.com:80"
+ os.environ["https_proxy"] = "http://myproxy.example.com:81"
+ pi = httplib2.proxy_info_from_environment("https")
+ assert pi.proxy_host == "myproxy.example.com"
assert pi.proxy_port == 81
@@ -46,47 +58,91 @@ def test_from_env_none():
def test_applies_to():
- os.environ['http_proxy'] = 'http://myproxy.example.com:80'
- os.environ['https_proxy'] = 'http://myproxy.example.com:81'
- os.environ['no_proxy'] = 'localhost,example.com,.wildcard'
+ os.environ["http_proxy"] = "http://myproxy.example.com:80"
+ os.environ["https_proxy"] = "http://myproxy.example.com:81"
+ os.environ["no_proxy"] = "localhost,example.com,.wildcard"
pi = httplib2.proxy_info_from_environment()
- assert not pi.applies_to('localhost')
- assert pi.applies_to('www.google.com')
- assert pi.applies_to('prefixlocalhost')
- assert pi.applies_to('www.example.com')
- assert pi.applies_to('sub.example.com')
- assert not pi.applies_to('sub.wildcard')
- assert not pi.applies_to('pub.sub.wildcard')
+ assert not pi.applies_to("localhost")
+ assert pi.applies_to("www.google.com")
+ assert pi.applies_to("prefixlocalhost")
+ assert pi.applies_to("www.example.com")
+ assert pi.applies_to("sub.example.com")
+ assert not pi.applies_to("sub.wildcard")
+ assert not pi.applies_to("pub.sub.wildcard")
def test_noproxy_trailing_comma():
- os.environ['http_proxy'] = 'http://myproxy.example.com:80'
- os.environ['no_proxy'] = 'localhost,other.host,'
+ os.environ["http_proxy"] = "http://myproxy.example.com:80"
+ os.environ["no_proxy"] = "localhost,other.host,"
pi = httplib2.proxy_info_from_environment()
- assert not pi.applies_to('localhost')
- assert not pi.applies_to('other.host')
- assert pi.applies_to('example.domain')
+ assert not pi.applies_to("localhost")
+ assert not pi.applies_to("other.host")
+ assert pi.applies_to("example.domain")
def test_noproxy_star():
- os.environ['http_proxy'] = 'http://myproxy.example.com:80'
- os.environ['NO_PROXY'] = '*'
+ os.environ["http_proxy"] = "http://myproxy.example.com:80"
+ os.environ["NO_PROXY"] = "*"
pi = httplib2.proxy_info_from_environment()
- for host in ('localhost', '169.254.38.192', 'www.google.com'):
+ for host in ("localhost", "169.254.38.192", "www.google.com"):
assert not pi.applies_to(host)
def test_headers():
- headers = {'key0': 'val0', 'key1': 'val1'}
- pi = httplib2.ProxyInfo(httplib2.socks.PROXY_TYPE_HTTP, 'localhost', 1234, proxy_headers=headers)
+ headers = {"key0": "val0", "key1": "val1"}
+ pi = httplib2.ProxyInfo(
+ httplib2.socks.PROXY_TYPE_HTTP, "localhost", 1234, proxy_headers=headers
+ )
assert pi.proxy_headers == headers
-def test_github_100_socks_basestring():
- # https://github.com/httplib2/httplib2/pull/100
- # NameError: name 'basestring' is not defined
- # TODO: replace invalid address with dummy local server
- http = httplib2.Http(proxy_info=httplib2.ProxyInfo(httplib2.socks.PROXY_TYPE_HTTP, '255.255.255.255', 8001))
+@pytest.mark.skipif(
+ os.environ.get("TRAVIS_PYTHON_VERSION") in ("2.7", "pypy"),
+ reason="Fails on Travis py27/pypy, works elsewhere. "
+ "See https://travis-ci.org/httplib2/httplib2/jobs/408769880.",
+)
+@mock.patch("socket.socket.connect", spec=True)
+def test_server_not_found_error_is_raised_for_invalid_hostname(mock_socket_connect):
+ """Invalidates https://github.com/httplib2/httplib2/pull/100."""
+ mock_socket_connect.side_effect = _raise_name_not_known_error
+ http = httplib2.Http(
+ proxy_info=httplib2.ProxyInfo(
+ httplib2.socks.PROXY_TYPE_HTTP, "255.255.255.255", 8001
+ )
+ )
with tests.assert_raises(httplib2.ServerNotFoundError):
- with mock.patch('socket.socket.connect', side_effect=socket.gaierror):
- http.request('http://255.255.255.255/', 'GET')
+ http.request("http://invalid.hostname.foo.bar/", "GET")
+
+
+def test_auth_str_bytes():
+ # https://github.com/httplib2/httplib2/pull/115
+ # Proxy-Authorization b64encode() TypeError: a bytes-like object is required, not 'str'
+ with tests.server_const_http(request_count=2) as uri:
+ uri_parsed = urllib.parse.urlparse(uri)
+ http = httplib2.Http(
+ proxy_info=httplib2.ProxyInfo(
+ httplib2.socks.PROXY_TYPE_HTTP,
+ proxy_host=uri_parsed.hostname,
+ proxy_port=uri_parsed.port,
+ proxy_rdns=True,
+ proxy_user=u"user_str",
+ proxy_pass=u"pass_str",
+ )
+ )
+ response, _ = http.request(uri, "GET")
+ assert response.status == 200
+
+ with tests.server_const_http(request_count=2) as uri:
+ uri_parsed = urllib.parse.urlparse(uri)
+ http = httplib2.Http(
+ proxy_info=httplib2.ProxyInfo(
+ httplib2.socks.PROXY_TYPE_HTTP,
+ proxy_host=uri_parsed.hostname,
+ proxy_port=uri_parsed.port,
+ proxy_rdns=True,
+ proxy_user=b"user_bytes",
+ proxy_pass=b"pass_bytes",
+ )
+ )
+ response, _ = http.request(uri, "GET")
+ assert response.status == 200
diff --git a/tests/test_uri.py b/tests/test_uri.py
index 3ed3b74..9eb42cf 100644
--- a/tests/test_uri.py
+++ b/tests/test_uri.py
@@ -1,24 +1,26 @@
import httplib2
+import pytest
def test_from_std66():
cases = (
- ('http://example.com',
- ('http', 'example.com', '', None, None)),
- ('https://example.com',
- ('https', 'example.com', '', None, None)),
- ('https://example.com:8080',
- ('https', 'example.com:8080', '', None, None)),
- ('http://example.com/',
- ('http', 'example.com', '/', None, None)),
- ('http://example.com/path',
- ('http', 'example.com', '/path', None, None)),
- ('http://example.com/path?a=1&b=2',
- ('http', 'example.com', '/path', 'a=1&b=2', None)),
- ('http://example.com/path?a=1&b=2#fred',
- ('http', 'example.com', '/path', 'a=1&b=2', 'fred')),
- ('http://example.com/path?a=1&b=2#fred',
- ('http', 'example.com', '/path', 'a=1&b=2', 'fred')),
+ ("http://example.com", ("http", "example.com", "", None, None)),
+ ("https://example.com", ("https", "example.com", "", None, None)),
+ ("https://example.com:8080", ("https", "example.com:8080", "", None, None)),
+ ("http://example.com/", ("http", "example.com", "/", None, None)),
+ ("http://example.com/path", ("http", "example.com", "/path", None, None)),
+ (
+ "http://example.com/path?a=1&b=2",
+ ("http", "example.com", "/path", "a=1&b=2", None),
+ ),
+ (
+ "http://example.com/path?a=1&b=2#fred",
+ ("http", "example.com", "/path", "a=1&b=2", "fred"),
+ ),
+ (
+ "http://example.com/path?a=1&b=2#fred",
+ ("http", "example.com", "/path", "a=1&b=2", "fred"),
+ ),
)
for a, b in cases:
assert httplib2.parse_uri(a) == b
@@ -26,53 +28,88 @@ def test_from_std66():
def test_norm():
cases = (
- ('http://example.org',
- 'http://example.org/'),
- ('http://EXAMple.org',
- 'http://example.org/'),
- ('http://EXAMple.org?=b',
- 'http://example.org/?=b'),
- ('http://EXAMple.org/mypath?a=b',
- 'http://example.org/mypath?a=b'),
- ('http://localhost:80',
- 'http://localhost:80/'),
+ ("http://example.org", "http://example.org/"),
+ ("http://EXAMple.org", "http://example.org/"),
+ ("http://EXAMple.org?=b", "http://example.org/?=b"),
+ ("http://EXAMple.org/mypath?a=b", "http://example.org/mypath?a=b"),
+ ("http://localhost:80", "http://localhost:80/"),
)
for a, b in cases:
assert httplib2.urlnorm(a)[-1] == b
- assert httplib2.urlnorm('http://localhost:80/') == httplib2.urlnorm('HTTP://LOCALHOST:80')
+ assert httplib2.urlnorm("http://localhost:80/") == httplib2.urlnorm(
+ "HTTP://LOCALHOST:80"
+ )
try:
- httplib2.urlnorm('/')
- assert False, 'Non-absolute URIs should raise an exception'
+ httplib2.urlnorm("/")
+ assert False, "Non-absolute URIs should raise an exception"
except httplib2.RelativeURIError:
pass
-def test_safename():
- cases = (
- ('http://example.org/fred/?a=b',
- 'example.org,fred,a=b,58489f63a7a83c3b7794a6a398ee8b1f'),
- ('http://example.org/fred?/a=b',
- 'example.org,fred,a=b,8c5946d56fec453071f43329ff0be46b'),
- ('http://www.example.org/fred?/a=b',
- 'www.example.org,fred,a=b,499c44b8d844a011b67ea2c015116968'),
- ('https://www.example.org/fred?/a=b',
- 'www.example.org,fred,a=b,692e843a333484ce0095b070497ab45d'),
- (httplib2.urlnorm('http://WWW')[-1],
- httplib2.safename(httplib2.urlnorm('http://www')[-1])),
- (u'http://\u2304.org/fred/?a=b',
- 'xn--http,-4y1d.org,fred,a=b,579924c35db315e5a32e3d9963388193'),
- )
- for a, b in cases:
- assert httplib2.safename(a) == b
+@pytest.mark.parametrize(
+ "data",
+ (
+ ("", ",d41d8cd98f00b204e9800998ecf8427e"),
+ (
+ "http://example.org/fred/?a=b",
+ "example.orgfreda=b,58489f63a7a83c3b7794a6a398ee8b1f",
+ ),
+ (
+ "http://example.org/fred?/a=b",
+ "example.orgfreda=b,8c5946d56fec453071f43329ff0be46b",
+ ),
+ (
+ "http://www.example.org/fred?/a=b",
+ "www.example.orgfreda=b,499c44b8d844a011b67ea2c015116968",
+ ),
+ (
+ "https://www.example.org/fred?/a=b",
+ "www.example.orgfreda=b,692e843a333484ce0095b070497ab45d",
+ ),
+ (
+ httplib2.urlnorm("http://WWW")[-1],
+ httplib2.safename(httplib2.urlnorm("http://www")[-1]),
+ ),
+ (
+ u"http://\u2304.org/fred/?a=b",
+ ".orgfreda=b,ecaf0f97756c0716de76f593bd60a35e",
+ ),
+ (
+ "normal-resource-name.js",
+ "normal-resource-name.js,8ff7c46fd6e61bf4e91a0a1606954a54",
+ ),
+ (
+ "foo://dom/path/brath/carapath",
+ "dompathbrathcarapath,83db942781ed975c7a5b7c24039f8ca3",
+ ),
+ ("with/slash", "withslash,17cc656656bb8ce2411bd41ead56d176"),
+ (
+ "thisistoomuch" * 42,
+ ("thisistoomuch" * 6) + "thisistoomuc,c4553439dd179422c6acf6a8ac093eb6",
+ ),
+ (u"\u043f\u0440", ",9f18c0db74a9734e9d18461e16345083"),
+ (u"\u043f\u0440".encode("utf-8"), ",9f18c0db74a9734e9d18461e16345083"),
+ (
+ b"column\tvalues/unstr.zip",
+ "columnvaluesunstr.zip,b9740dcd0553e11b526450ceb8f76683",
+ ),
+ ),
+ ids=str,
+)
+def test_safename(data):
+ result = httplib2.safename(data[0])
+ assert result == data[1]
+
- assert httplib2.safename('http://www') != httplib2.safename('https://www')
+def test_safename2():
+ assert httplib2.safename("http://www") != httplib2.safename("https://www")
# Test the max length limits
- uri = 'http://' + ('w' * 200) + '.org'
- uri2 = 'http://' + ('w' * 201) + '.org'
+ uri = "http://" + ("w" * 200) + ".org"
+ uri2 = "http://" + ("w" * 201) + ".org"
assert httplib2.safename(uri) != httplib2.safename(uri2)
- # Max length should be 200 + 1 (',') + 32
- assert len(httplib2.safename(uri2)) == 233
- assert len(httplib2.safename(uri)) == 233
+ # Max length should be 90 + 1 (',') + 32 = 123
+ assert len(httplib2.safename(uri2)) == 123
+ assert len(httplib2.safename(uri)) == 123