Update tornado to v2.3.0.
This includes support for pycurl client-side certificates, so our version is
once again an unmodified copy of tornado.
* commit '7628968bd77123b4d43ff58a542f9dbc99def1c5':
Squashed 'tr/vendor/tornado/' changes from 3dd205c..d66c868
Conflicts:
tr/vendor/tornado/tornado/curl_httpclient.py
Change-Id: I2590a80662ca996463e6526f836b853ea96fe81e
diff --git a/.gitignore b/.gitignore
index ba77616..0e0a11e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,6 @@
*.pyc
*.so
+*.class
*~
build/
/dist/
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..b928c44
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,16 @@
+# http://travis-ci.org/#!/facebook/tornado
+language: python
+# The build of 2.6 on travis has a bug related to ssl (it segfaults in
+# test_sslv2_fail)
+python:
+ - 2.7
+ - 3.2
+# TODO: install pycurl, twisted, etc (depends on python version)
+install:
+ - python setup.py install
+script:
+ # Must cd somewhere else so python3 doesn't get confused and run
+ # the python2 code from the current directory instead of the installed
+ # 2to3 version.
+ - cd maint
+ - python -m tornado.test.runtests
diff --git a/MANIFEST.in b/MANIFEST.in
index dcd3459..3614dac 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -5,4 +5,5 @@
include tornado/test/test.crt
include tornado/test/test.key
include tornado/test/static/robots.txt
+include tornado/test/templates/utf8.html
global-exclude _auto2to3*
\ No newline at end of file
diff --git a/maint/requirements.txt b/maint/requirements.txt
index 30e0d32..774ec1b 100644
--- a/maint/requirements.txt
+++ b/maint/requirements.txt
@@ -7,7 +7,9 @@
# Other useful tools
Sphinx==1.1.2
+autopep8==0.5
coverage==3.5.1
+pep8==0.6.1
pyflakes==0.5.0
tox==1.3
virtualenv==1.7
diff --git a/maint/scripts/custom_fixers/__init__.py b/maint/scripts/custom_fixers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/maint/scripts/custom_fixers/__init__.py
diff --git a/maint/scripts/custom_fixers/fix_future_imports.py b/maint/scripts/custom_fixers/fix_future_imports.py
new file mode 100644
index 0000000..3037abf
--- /dev/null
+++ b/maint/scripts/custom_fixers/fix_future_imports.py
@@ -0,0 +1,60 @@
+"""Updates all source files to import the same set of __future__ directives.
+"""
+from lib2to3 import fixer_base
+from lib2to3 import pytree
+from lib2to3.pgen2 import token
+from lib2to3.fixer_util import FromImport, Name, Comma, Newline
+
+# copied from fix_tuple_params.py
+def is_docstring(stmt):
+ return isinstance(stmt, pytree.Node) and \
+ stmt.children[0].type == token.STRING
+
+class FixFutureImports(fixer_base.BaseFix):
+ BM_compatible = True
+
+ PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
+
+ def start_tree(self, tree, filename):
+ self.found_future_import = False
+
+ def new_future_import(self, old):
+ new = FromImport("__future__",
+ [Name("absolute_import", prefix=" "), Comma(),
+ Name("division", prefix=" "), Comma(),
+ Name("with_statement", prefix=" ")])
+ if old is not None:
+ new.prefix = old.prefix
+ return new
+
+ def transform(self, node, results):
+ self.found_future_import = True
+ return self.new_future_import(node)
+
+ def finish_tree(self, tree, filename):
+ if self.found_future_import:
+ return
+ if not isinstance(tree, pytree.Node):
+ # Empty files (usually __init__.py) show up as a single Leaf
+ # instead of a Node, so leave them alone
+ return
+ first_stmt = tree.children[0]
+ if is_docstring(first_stmt):
+ # Skip a line and add the import after the docstring
+ tree.insert_child(1, Newline())
+ pos = 2
+ elif first_stmt.prefix:
+ # No docstring, but an initial comment (perhaps a #! line).
+ # Transfer the initial comment to a new blank line.
+ newline = Newline()
+ newline.prefix = first_stmt.prefix
+ first_stmt.prefix = ""
+ tree.insert_child(0, newline)
+ pos = 1
+ else:
+ # No comments or docstring, just insert at the start
+ pos = 0
+ tree.insert_child(pos, self.new_future_import(None))
+ tree.insert_child(pos+1, Newline()) # terminates the import stmt
+
+
diff --git a/maint/scripts/run_autopep8.sh b/maint/scripts/run_autopep8.sh
new file mode 100755
index 0000000..5d85efd
--- /dev/null
+++ b/maint/scripts/run_autopep8.sh
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+# Runs autopep8 in the configuration used for tornado.
+#
+# W602 is "deprecated form of raising exception", but the fix is incorrect
+# (and I'm not sure if the three-argument form of raise is really deprecated
+# in the first place)
+autopep8 --ignore=W602 -i tornado/*.py tornado/platform/*.py tornado/test/*.py
diff --git a/maint/scripts/run_fixers.py b/maint/scripts/run_fixers.py
new file mode 100755
index 0000000..cfa2c36
--- /dev/null
+++ b/maint/scripts/run_fixers.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+
+import sys
+from lib2to3.main import main
+
+sys.exit(main("custom_fixers"))
diff --git a/maint/vm/freebsd/Vagrantfile b/maint/vm/freebsd/Vagrantfile
index 1672492..b86bd80 100644
--- a/maint/vm/freebsd/Vagrantfile
+++ b/maint/vm/freebsd/Vagrantfile
@@ -1,5 +1,3 @@
-require 'vagrant/systems/freebsd'
-
Vagrant::Config.run do |config|
# A freebsd image can be created with veewee
# https://github.com/jedi4ever/veewee
@@ -10,18 +8,16 @@
# vagrant box add freebsd freebsd.box
config.vm.box = "freebsd"
- config.vm.system = :freebsd
+ config.vm.guest = :freebsd
# Note that virtualbox shared folders don't work with freebsd, so
# we'd need nfs shared folders here even if virtualbox gains
# support for symlinks.
- config.vm.network "172.19.1.3"
- config.vm.share_folder("tornado", "/tornado", "../../..", :nfs => true)
-
- # This doesn't seem to get mounted by default for freebsd,
- # but that's actually a good thing since there are apparently issues
+ config.vm.network :hostonly, "172.19.1.3"
+ # Name this v-root to clobber the default /vagrant mount point.
+ # We can't mount it over nfs because there are apparently issues
# when one nfs export is a subfolder of another.
- #config.vm.share_folder("v-root", "/vagrant", ".", :nfs => true)
+ config.vm.share_folder("v-root", "/tornado", "../../..", :nfs => true)
config.vm.provision :shell, :path => "setup.sh"
end
\ No newline at end of file
diff --git a/maint/vm/ubuntu10.04/Vagrantfile b/maint/vm/ubuntu10.04/Vagrantfile
index 63520cb..31f7b18 100644
--- a/maint/vm/ubuntu10.04/Vagrantfile
+++ b/maint/vm/ubuntu10.04/Vagrantfile
@@ -2,7 +2,7 @@
config.vm.box = "lucid64"
config.vm.box_url = "http://files.vagrantup.com/lucid64.box"
- config.vm.network "172.19.1.2"
+ config.vm.network :hostonly, "172.19.1.2"
config.vm.share_folder("tornado", "/tornado", "../../..", :nfs=> true)
config.vm.provision :shell, :path => "setup.sh"
diff --git a/maint/vm/ubuntu10.04/tox.ini b/maint/vm/ubuntu10.04/tox.ini
index 87841ac..2ae1b27 100644
--- a/maint/vm/ubuntu10.04/tox.ini
+++ b/maint/vm/ubuntu10.04/tox.ini
@@ -17,6 +17,7 @@
pycurl
simplejson
twisted==11.0.0
+ zope.interface<4.0
[testenv:py26-full]
deps =
diff --git a/maint/vm/ubuntu11.04/Vagrantfile b/maint/vm/ubuntu12.04/Vagrantfile
similarity index 67%
rename from maint/vm/ubuntu11.04/Vagrantfile
rename to maint/vm/ubuntu12.04/Vagrantfile
index 00a1938..9fcc82a 100644
--- a/maint/vm/ubuntu11.04/Vagrantfile
+++ b/maint/vm/ubuntu12.04/Vagrantfile
@@ -1,7 +1,7 @@
Vagrant::Config.run do |config|
- config.vm.box = "ubuntu11.04"
+ config.vm.box = "ubuntu12.04"
- config.vm.network "172.19.1.4"
+ config.vm.network :hostonly, "172.19.1.5"
config.vm.share_folder("tornado", "/tornado", "../../..", :nfs=> true)
config.vm.provision :shell, :path => "setup.sh"
diff --git a/maint/vm/ubuntu11.04/setup.sh b/maint/vm/ubuntu12.04/setup.sh
similarity index 72%
rename from maint/vm/ubuntu11.04/setup.sh
rename to maint/vm/ubuntu12.04/setup.sh
index d5a30f6..67119e6 100644
--- a/maint/vm/ubuntu11.04/setup.sh
+++ b/maint/vm/ubuntu12.04/setup.sh
@@ -2,12 +2,6 @@
set -e
-# Ubuntu 10.10+ do some extra permissions checks for hard links.
-# Vagrant's nfs shared folders come through with funny uids, but
-# attempts to access them still work despite the visible permissions
-# being incorrect.
-sysctl -w kernel.yama.protected_nonaccess_hardlinks=0
-
apt-get update
# libcurl4-gnutls-dev is the default if you ask for libcurl4-dev, but it
@@ -26,7 +20,7 @@
apt-get -y install $APT_PACKAGES
-# Ubuntu 11.04 has python 2.7 as default; install more from here.
+# Ubuntu 12.04 has python 2.7 as default; install more from here.
# The most important thing is to have both 2.5 and a later version so we
# test with both tornado.epoll and 2.6+ stdlib's select.epoll.
add-apt-repository ppa:fkrull/deadsnakes
@@ -35,8 +29,6 @@
DEADSNAKES_PACKAGES="
python2.5
python2.5-dev
-python2.6
-python2.6-dev
python3.2
python3.2-dev
"
diff --git a/maint/vm/ubuntu11.04/tox.ini b/maint/vm/ubuntu12.04/tox.ini
similarity index 73%
rename from maint/vm/ubuntu11.04/tox.ini
rename to maint/vm/ubuntu12.04/tox.ini
index 87841ac..e30ef22 100644
--- a/maint/vm/ubuntu11.04/tox.ini
+++ b/maint/vm/ubuntu12.04/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27-full, py25-full, py32, py25, py26, py26-full, py27
+envlist = py27-full, py25-full, py32, py25, py27
setupdir=/tornado
toxworkdir=/home/vagrant/tox-tornado
@@ -17,12 +17,7 @@
pycurl
simplejson
twisted==11.0.0
-
-[testenv:py26-full]
-deps =
- MySQL-python
- pycurl
- twisted==11.0.0
+ zope.interface<4.0
[testenv:py27-full]
basepython = python2.7
diff --git a/setup.py b/setup.py
index 495459e..1456f1d 100644
--- a/setup.py
+++ b/setup.py
@@ -33,7 +33,7 @@
extensions.append(distutils.core.Extension(
"tornado.epoll", ["tornado/epoll.c"]))
-version = "2.2"
+version = "2.3"
if major >= 3:
import setuptools # setuptools is required for use_2to3
@@ -45,7 +45,8 @@
packages = ["tornado", "tornado.test", "tornado.platform"],
package_data = {
"tornado": ["ca-certificates.crt"],
- "tornado.test": ["README", "test.crt", "test.key", "static/robots.txt"],
+ "tornado.test": ["README", "test.crt", "test.key", "static/robots.txt",
+ "templates/utf8.html"],
},
ext_modules = extensions,
author="Facebook",
diff --git a/tornado/__init__.py b/tornado/__init__.py
index f13041e..31daebf 100644
--- a/tornado/__init__.py
+++ b/tornado/__init__.py
@@ -16,6 +16,8 @@
"""The Tornado web server and tools."""
+from __future__ import absolute_import, division, with_statement
+
# version is a human-readable version number.
# version_info is a four-tuple for programmatic comparison. The first
@@ -23,5 +25,5 @@
# is zero for an official release, positive for a development branch,
# or negative for a release candidate (after the base version number
# has been incremented)
-version = "2.2"
-version_info = (2, 2, 0, 0)
+version = "2.3"
+version_info = (2, 3, 0, 0)
diff --git a/tornado/auth.py b/tornado/auth.py
index a716210..a61e359 100644
--- a/tornado/auth.py
+++ b/tornado/auth.py
@@ -44,6 +44,8 @@
# Save the user with, e.g., set_secure_cookie()
"""
+from __future__ import absolute_import, division, with_statement
+
import base64
import binascii
import hashlib
@@ -59,13 +61,14 @@
from tornado.httputil import url_concat
from tornado.util import bytes_type, b
+
class OpenIdMixin(object):
"""Abstract implementation of OpenID and Attribute Exchange.
See GoogleMixin below for example implementations.
"""
def authenticate_redirect(self, callback_uri=None,
- ax_attrs=["name","email","language","username"]):
+ ax_attrs=["name", "email", "language", "username"]):
"""Returns the authentication URL for this service.
After authentication, the service will redirect back to the given
@@ -91,7 +94,8 @@
args = dict((k, v[-1]) for k, v in self.request.arguments.iteritems())
args["openid.mode"] = u"check_authentication"
url = self._OPENID_ENDPOINT
- if http_client is None: http_client = httpclient.AsyncHTTPClient()
+ if http_client is None:
+ http_client = httpclient.AsyncHTTPClient()
http_client.fetch(url, self.async_callback(
self._on_authentication_verified, callback),
method="POST", body=urllib.urlencode(args))
@@ -158,8 +162,10 @@
self.get_argument(name) == u"http://openid.net/srv/ax/1.0":
ax_ns = name[10:]
break
+
def get_ax_arg(uri):
- if not ax_ns: return u""
+ if not ax_ns:
+ return u""
prefix = "openid." + ax_ns + ".type."
ax_name = None
for name in self.request.arguments.iterkeys():
@@ -167,7 +173,8 @@
part = name[len(prefix):]
ax_name = "openid." + ax_ns + ".value." + part
break
- if not ax_name: return u""
+ if not ax_name:
+ return u""
return self.get_argument(ax_name, u"")
email = get_ax_arg("http://axschema.org/contact/email")
@@ -190,9 +197,12 @@
user["name"] = u" ".join(name_parts)
elif email:
user["name"] = email.split("@")[0]
- if email: user["email"] = email
- if locale: user["locale"] = locale
- if username: user["username"] = username
+ if email:
+ user["email"] = email
+ if locale:
+ user["locale"] = locale
+ if username:
+ user["username"] = username
callback(user)
@@ -235,7 +245,6 @@
self._on_request_token, self._OAUTH_AUTHORIZE_URL,
callback_uri))
-
def get_authenticated_user(self, callback, http_client=None):
"""Gets the OAuth authorized user and access token on callback.
@@ -269,7 +278,7 @@
http_client.fetch(self._oauth_access_token_url(token),
self.async_callback(self._on_access_token, callback))
- def _oauth_request_token_url(self, callback_uri= None, extra_params=None):
+ def _oauth_request_token_url(self, callback_uri=None, extra_params=None):
consumer_token = self._oauth_consumer_token()
url = self._OAUTH_REQUEST_TOKEN_URL
args = dict(
@@ -283,7 +292,8 @@
if callback_uri:
args["oauth_callback"] = urlparse.urljoin(
self.request.full_url(), callback_uri)
- if extra_params: args.update(extra_params)
+ if extra_params:
+ args.update(extra_params)
signature = _oauth10a_signature(consumer_token, "GET", url, args)
else:
signature = _oauth_signature(consumer_token, "GET", url, args)
@@ -316,7 +326,7 @@
oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"),
)
if "verifier" in request_token:
- args["oauth_verifier"]=request_token["verifier"]
+ args["oauth_verifier"] = request_token["verifier"]
if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
signature = _oauth10a_signature(consumer_token, "GET", url, args,
@@ -376,11 +386,12 @@
base_args["oauth_signature"] = signature
return base_args
+
class OAuth2Mixin(object):
"""Abstract implementation of OAuth v 2."""
def authorize_redirect(self, redirect_uri=None, client_id=None,
- client_secret=None, extra_params=None ):
+ client_secret=None, extra_params=None):
"""Redirects the user to obtain OAuth authorization for this service.
Some providers require that you register a Callback
@@ -393,11 +404,12 @@
"redirect_uri": redirect_uri,
"client_id": client_id
}
- if extra_params: args.update(extra_params)
+ if extra_params:
+ args.update(extra_params)
self.redirect(
url_concat(self._OAUTH_AUTHORIZE_URL, args))
- def _oauth_request_token_url(self, redirect_uri= None, client_id = None,
+ def _oauth_request_token_url(self, redirect_uri=None, client_id=None,
client_secret=None, code=None,
extra_params=None):
url = self._OAUTH_ACCESS_TOKEN_URL
@@ -407,9 +419,11 @@
client_id=client_id,
client_secret=client_secret,
)
- if extra_params: args.update(extra_params)
+ if extra_params:
+ args.update(extra_params)
return url_concat(url, args)
+
class TwitterMixin(OAuthMixin):
"""Twitter OAuth authentication.
@@ -450,15 +464,14 @@
_OAUTH_AUTHENTICATE_URL = "http://api.twitter.com/oauth/authenticate"
_OAUTH_NO_CALLBACKS = False
-
- def authenticate_redirect(self, callback_uri = None):
+ def authenticate_redirect(self, callback_uri=None):
"""Just like authorize_redirect(), but auto-redirects if authorized.
This is generally the right interface to use if you are using
Twitter for single-sign on.
"""
http = httpclient.AsyncHTTPClient()
- http.fetch(self._oauth_request_token_url(callback_uri = callback_uri), self.async_callback(
+ http.fetch(self._oauth_request_token_url(callback_uri=callback_uri), self.async_callback(
self._on_request_token, self._OAUTH_AUTHENTICATE_URL, None))
def twitter_request(self, path, callback, access_token=None,
@@ -514,7 +527,8 @@
oauth = self._oauth_request_parameters(
url, access_token, all_args, method=method)
args.update(oauth)
- if args: url += "?" + urllib.urlencode(args)
+ if args:
+ url += "?" + urllib.urlencode(args)
callback = self.async_callback(self._on_twitter_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
@@ -590,7 +604,6 @@
_OAUTH_NO_CALLBACKS = True
_OAUTH_VERSION = "1.0"
-
def friendfeed_request(self, path, callback, access_token=None,
post_args=None, **args):
"""Fetches the given relative API path, e.g., "/bret/friends"
@@ -636,7 +649,8 @@
oauth = self._oauth_request_parameters(
url, access_token, all_args, method=method)
args.update(oauth)
- if args: url += "?" + urllib.urlencode(args)
+ if args:
+ url += "?" + urllib.urlencode(args)
callback = self.async_callback(self._on_friendfeed_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
@@ -701,7 +715,7 @@
_OAUTH_ACCESS_TOKEN_URL = "https://www.google.com/accounts/OAuthGetAccessToken"
def authorize_redirect(self, oauth_scope, callback_uri=None,
- ax_attrs=["name","email","language","username"]):
+ ax_attrs=["name", "email", "language", "username"]):
"""Authenticates and authorizes for the given Google resource.
Some of the available resources are:
@@ -746,6 +760,7 @@
def _oauth_get_user(self, access_token, callback):
OpenIdMixin.get_authenticated_user(self, callback)
+
class FacebookMixin(object):
"""Facebook Connect authentication.
@@ -926,9 +941,11 @@
def _signature(self, args):
parts = ["%s=%s" % (n, args[n]) for n in sorted(args.keys())]
body = "".join(parts) + self.settings["facebook_secret"]
- if isinstance(body, unicode): body = body.encode("utf-8")
+ if isinstance(body, unicode):
+ body = body.encode("utf-8")
return hashlib.md5(body).hexdigest()
+
class FacebookGraphMixin(OAuth2Mixin):
"""Facebook authentication using the new Graph API and OAuth2."""
_OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?"
@@ -937,68 +954,68 @@
def get_authenticated_user(self, redirect_uri, client_id, client_secret,
code, callback, extra_fields=None):
- """Handles the login for the Facebook user, returning a user object.
+ """Handles the login for the Facebook user, returning a user object.
- Example usage::
+ Example usage::
- class FacebookGraphLoginHandler(LoginHandler, tornado.auth.FacebookGraphMixin):
- @tornado.web.asynchronous
- def get(self):
- if self.get_argument("code", False):
- self.get_authenticated_user(
- redirect_uri='/auth/facebookgraph/',
- client_id=self.settings["facebook_api_key"],
- client_secret=self.settings["facebook_secret"],
- code=self.get_argument("code"),
- callback=self.async_callback(
- self._on_login))
- return
- self.authorize_redirect(redirect_uri='/auth/facebookgraph/',
- client_id=self.settings["facebook_api_key"],
- extra_params={"scope": "read_stream,offline_access"})
+ class FacebookGraphLoginHandler(LoginHandler, tornado.auth.FacebookGraphMixin):
+ @tornado.web.asynchronous
+ def get(self):
+ if self.get_argument("code", False):
+ self.get_authenticated_user(
+ redirect_uri='/auth/facebookgraph/',
+ client_id=self.settings["facebook_api_key"],
+ client_secret=self.settings["facebook_secret"],
+ code=self.get_argument("code"),
+ callback=self.async_callback(
+ self._on_login))
+ return
+ self.authorize_redirect(redirect_uri='/auth/facebookgraph/',
+ client_id=self.settings["facebook_api_key"],
+ extra_params={"scope": "read_stream,offline_access"})
- def _on_login(self, user):
- logging.error(user)
- self.finish()
+ def _on_login(self, user):
+ logging.error(user)
+ self.finish()
- """
- http = httpclient.AsyncHTTPClient()
- args = {
- "redirect_uri": redirect_uri,
- "code": code,
- "client_id": client_id,
- "client_secret": client_secret,
- }
+ """
+ http = httpclient.AsyncHTTPClient()
+ args = {
+ "redirect_uri": redirect_uri,
+ "code": code,
+ "client_id": client_id,
+ "client_secret": client_secret,
+ }
- fields = set(['id', 'name', 'first_name', 'last_name',
- 'locale', 'picture', 'link'])
- if extra_fields: fields.update(extra_fields)
+ fields = set(['id', 'name', 'first_name', 'last_name',
+ 'locale', 'picture', 'link'])
+ if extra_fields:
+ fields.update(extra_fields)
- http.fetch(self._oauth_request_token_url(**args),
- self.async_callback(self._on_access_token, redirect_uri, client_id,
- client_secret, callback, fields))
+ http.fetch(self._oauth_request_token_url(**args),
+ self.async_callback(self._on_access_token, redirect_uri, client_id,
+ client_secret, callback, fields))
def _on_access_token(self, redirect_uri, client_id, client_secret,
callback, fields, response):
- if response.error:
- logging.warning('Facebook auth error: %s' % str(response))
- callback(None)
- return
+ if response.error:
+ logging.warning('Facebook auth error: %s' % str(response))
+ callback(None)
+ return
- args = escape.parse_qs_bytes(escape.native_str(response.body))
- session = {
- "access_token": args["access_token"][-1],
- "expires": args.get("expires")
- }
+ args = escape.parse_qs_bytes(escape.native_str(response.body))
+ session = {
+ "access_token": args["access_token"][-1],
+ "expires": args.get("expires")
+ }
- self.facebook_request(
- path="/me",
- callback=self.async_callback(
- self._on_get_user_info, callback, session, fields),
- access_token=session["access_token"],
- fields=",".join(fields)
- )
-
+ self.facebook_request(
+ path="/me",
+ callback=self.async_callback(
+ self._on_get_user_info, callback, session, fields),
+ access_token=session["access_token"],
+ fields=",".join(fields)
+ )
def _on_get_user_info(self, callback, session, fields, user):
if user is None:
@@ -1052,8 +1069,9 @@
if access_token:
all_args["access_token"] = access_token
all_args.update(args)
- all_args.update(post_args or {})
- if all_args: url += "?" + urllib.urlencode(all_args)
+
+ if all_args:
+ url += "?" + urllib.urlencode(all_args)
callback = self.async_callback(self._on_facebook_request, callback)
http = httpclient.AsyncHTTPClient()
if post_args is not None:
@@ -1070,6 +1088,7 @@
return
callback(escape.json_decode(response.body))
+
def _oauth_signature(consumer_token, method, url, parameters={}, token=None):
"""Calculates the HMAC-SHA1 OAuth signature for the given request.
@@ -1084,7 +1103,7 @@
base_elems.append(normalized_url)
base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
for k, v in sorted(parameters.items())))
- base_string = "&".join(_oauth_escape(e) for e in base_elems)
+ base_string = "&".join(_oauth_escape(e) for e in base_elems)
key_elems = [escape.utf8(consumer_token["secret"])]
key_elems.append(escape.utf8(token["secret"] if token else ""))
@@ -1093,6 +1112,7 @@
hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
+
def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None):
"""Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request.
@@ -1108,7 +1128,7 @@
base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
for k, v in sorted(parameters.items())))
- base_string = "&".join(_oauth_escape(e) for e in base_elems)
+ base_string = "&".join(_oauth_escape(e) for e in base_elems)
key_elems = [escape.utf8(urllib.quote(consumer_token["secret"], safe='~'))]
key_elems.append(escape.utf8(urllib.quote(token["secret"], safe='~') if token else ""))
key = b("&").join(key_elems)
@@ -1116,6 +1136,7 @@
hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
return binascii.b2a_base64(hash.digest())[:-1]
+
def _oauth_escape(val):
if isinstance(val, unicode):
val = val.encode("utf-8")
@@ -1130,5 +1151,3 @@
special = (b("oauth_token"), b("oauth_token_secret"))
token.update((k, p[k][0]) for k in p if k not in special)
return token
-
-
diff --git a/tornado/autoreload.py b/tornado/autoreload.py
index 7e3a3d7..286782c 100644
--- a/tornado/autoreload.py
+++ b/tornado/autoreload.py
@@ -26,7 +26,40 @@
multi-process mode is used.
"""
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
+
+import os
+import sys
+
+# sys.path handling
+# -----------------
+#
+# If a module is run with "python -m", the current directory (i.e. "")
+# is automatically prepended to sys.path, but not if it is run as
+# "path/to/file.py". The processing for "-m" rewrites the former to
+# the latter, so subsequent executions won't have the same path as the
+# original.
+#
+# Conversely, when run as path/to/file.py, the directory containing
+# file.py gets added to the path, which can cause confusion as imports
+# may become relative in spite of the future import.
+#
+# We address the former problem by setting the $PYTHONPATH environment
+# variable before re-execution so the new process will see the correct
+# path. We attempt to address the latter problem when tornado.autoreload
+# is run as __main__, although we can't fix the general case because
+# we cannot reliably reconstruct the original command line
+# (http://bugs.python.org/issue14208).
+
+if __name__ == "__main__":
+ # This sys.path manipulation must come before our imports (as much
+ # as possible - if we introduced a tornado.sys or tornado.os
+ # module we'd be in trouble), or else our imports would become
+ # relative again despite the future import.
+ #
+ # There is a separate __main__ block at the end of the file to call main().
+ if sys.path[0] == os.path.dirname(__file__):
+ del sys.path[0]
import functools
import logging
@@ -44,6 +77,7 @@
except ImportError:
signal = None
+
def start(io_loop=None, check_time=500):
"""Restarts the process automatically when a module is modified.
@@ -57,6 +91,7 @@
scheduler = ioloop.PeriodicCallback(callback, check_time, io_loop=io_loop)
scheduler.start()
+
def wait():
"""Wait for a watched file to change, then restart the process.
@@ -70,6 +105,7 @@
_watched_files = set()
+
def watch(filename):
"""Add a file to the watch list.
@@ -79,6 +115,7 @@
_reload_hooks = []
+
def add_reload_hook(fn):
"""Add a function to be called before reloading the process.
@@ -89,6 +126,7 @@
"""
_reload_hooks.append(fn)
+
def _close_all_fds(io_loop):
for fd in io_loop._handlers.keys():
try:
@@ -98,6 +136,7 @@
_reload_attempted = False
+
def _reload_on_update(modify_times):
if _reload_attempted:
# We already tried to reload and it didn't work, so don't try again.
@@ -112,15 +151,18 @@
# in the standard library), and occasionally this can cause strange
# failures in getattr. Just ignore anything that's not an ordinary
# module.
- if not isinstance(module, types.ModuleType): continue
+ if not isinstance(module, types.ModuleType):
+ continue
path = getattr(module, "__file__", None)
- if not path: continue
+ if not path:
+ continue
if path.endswith(".pyc") or path.endswith(".pyo"):
path = path[:-1]
_check_file(modify_times, path)
for path in _watched_files:
_check_file(modify_times, path)
+
def _check_file(modify_times, path):
try:
modified = os.stat(path).st_mtime
@@ -133,6 +175,7 @@
logging.info("%s modified; restarting server", path)
_reload()
+
def _reload():
global _reload_attempted
_reload_attempted = True
@@ -143,6 +186,15 @@
# ioloop.set_blocking_log_threshold so it doesn't fire
# after the exec.
signal.setitimer(signal.ITIMER_REAL, 0, 0)
+ # sys.path fixes: see comments at top of file. If sys.path[0] is an empty
+ # string, we were (probably) invoked with -m and the effective path
+ # is about to change on re-exec. Add the current directory to $PYTHONPATH
+ # to ensure that the new process sees the same path we did.
+ path_prefix = '.' + os.pathsep
+ if (sys.path[0] == '' and
+ not os.environ.get("PYTHONPATH", "").startswith(path_prefix)):
+ os.environ["PYTHONPATH"] = (path_prefix +
+ os.environ.get("PYTHONPATH", ""))
if sys.platform == 'win32':
# os.execv is broken on Windows and can't properly parse command line
# arguments and executable name if they contain whitespaces. subprocess
@@ -173,9 +225,11 @@
python -m tornado.autoreload -m module.to.run [args...]
python -m tornado.autoreload path/to/script.py [args...]
"""
+
+
def main():
"""Command-line wrapper to re-run a script whenever its source changes.
-
+
Scripts may be specified by filename or module name::
python -m tornado.autoreload -m tornado.test.runtests
@@ -226,25 +280,14 @@
if mode == 'module':
# runpy did a fake import of the module as __main__, but now it's
# no longer in sys.modules. Figure out where it is and watch it.
- watch(pkgutil.get_loader(module).get_filename())
+ loader = pkgutil.get_loader(module)
+ if loader is not None:
+ watch(loader.get_filename())
wait()
-
+
if __name__ == "__main__":
- # If this module is run with "python -m tornado.autoreload", the current
- # directory is automatically prepended to sys.path, but not if it is
- # run as "path/to/tornado/autoreload.py". The processing for "-m" rewrites
- # the former to the latter, so subsequent executions won't have the same
- # path as the original. Modify os.environ here to ensure that the
- # re-executed process will have the same path.
- # Conversely, when run as path/to/tornado/autoreload.py, the directory
- # containing autoreload.py gets added to the path, but we don't want
- # tornado modules importable at top level, so remove it.
- path_prefix = '.' + os.pathsep
- if (sys.path[0] == '' and
- not os.environ.get("PYTHONPATH", "").startswith(path_prefix)):
- os.environ["PYTHONPATH"] = path_prefix + os.environ.get("PYTHONPATH", "")
- elif sys.path[0] == os.path.dirname(__file__):
- del sys.path[0]
+ # See also the other __main__ block at the top of the file, which modifies
+ # sys.path before our imports
main()
diff --git a/tornado/curl_httpclient.py b/tornado/curl_httpclient.py
index 1a8312c..95958c1 100644
--- a/tornado/curl_httpclient.py
+++ b/tornado/curl_httpclient.py
@@ -16,7 +16,7 @@
"""Blocking and non-blocking HTTP client implementations using pycurl."""
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
import cStringIO
import collections
@@ -32,6 +32,7 @@
from tornado.escape import utf8
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main
+
class CurlAsyncHTTPClient(AsyncHTTPClient):
def initialize(self, io_loop=None, max_clients=10,
max_simultaneous_connections=None):
@@ -109,15 +110,17 @@
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = self.io_loop.add_timeout(
- time.time() + msecs/1000.0, self._handle_timeout)
+ time.time() + msecs / 1000.0, self._handle_timeout)
def _handle_events(self, fd, events):
"""Called by IOLoop when there is activity on one of our
file descriptors.
"""
action = 0
- if events & ioloop.IOLoop.READ: action |= pycurl.CSELECT_IN
- if events & ioloop.IOLoop.WRITE: action |= pycurl.CSELECT_OUT
+ if events & ioloop.IOLoop.READ:
+ action |= pycurl.CSELECT_IN
+ if events & ioloop.IOLoop.WRITE:
+ action |= pycurl.CSELECT_OUT
while True:
try:
ret, num_handles = self._socket_action(fd, action)
@@ -250,7 +253,6 @@
except Exception:
self.handle_callback_exception(info["callback"])
-
def handle_callback_exception(self, callback):
self.io_loop.handle_callback_exception(callback)
@@ -372,7 +374,7 @@
# Handle curl's cryptic options for every individual HTTP method
if request.method in ("POST", "PUT"):
- request_buffer = cStringIO.StringIO(utf8(request.body))
+ request_buffer = cStringIO.StringIO(utf8(request.body))
curl.setopt(pycurl.READFUNCTION, request_buffer.read)
if request.method == "POST":
def ioctl(cmd):
@@ -393,11 +395,11 @@
curl.unsetopt(pycurl.USERPWD)
logging.debug("%s %s", request.method, request.url)
- if request.client_key is not None:
- curl.setopt(pycurl.SSLKEY, request.client_key)
if request.client_cert is not None:
curl.setopt(pycurl.SSLCERT, request.client_cert)
+ if request.client_key is not None:
+ curl.setopt(pycurl.SSLKEY, request.client_key)
if threading.activeCount() > 1:
# libcurl/pycurl is not thread-safe by default. When multiple threads
@@ -423,6 +425,7 @@
return
headers.parse_line(header_line)
+
def _curl_debug(debug_type, debug_msg):
debug_types = ('I', '<', '>', '<', '>')
if debug_type == 0:
diff --git a/tornado/database.py b/tornado/database.py
index 9771713..982c5db 100644
--- a/tornado/database.py
+++ b/tornado/database.py
@@ -16,14 +16,24 @@
"""A lightweight wrapper around MySQLdb."""
+from __future__ import absolute_import, division, with_statement
+
import copy
-import MySQLdb.constants
-import MySQLdb.converters
-import MySQLdb.cursors
import itertools
import logging
import time
+try:
+ import MySQLdb.constants
+ import MySQLdb.converters
+ import MySQLdb.cursors
+except ImportError:
+ # If MySQLdb isn't available this module won't actually be useable,
+ # but we want it to at least be importable (mainly for readthedocs.org,
+ # which has limitations on third-party modules)
+ MySQLdb = None
+
+
class Connection(object):
"""A lightweight wrapper around MySQLdb DB-API connections.
@@ -41,7 +51,7 @@
UTF-8 on all connections to avoid time zone and encoding errors.
"""
def __init__(self, host, database, user=None, password=None,
- max_idle_time=7*3600):
+ max_idle_time=7 * 3600):
self.host = host
self.database = database
self.max_idle_time = max_idle_time
@@ -210,20 +220,19 @@
except KeyError:
raise AttributeError(name)
+if MySQLdb is not None:
+ # Fix the access conversions to properly recognize unicode/binary
+ FIELD_TYPE = MySQLdb.constants.FIELD_TYPE
+ FLAG = MySQLdb.constants.FLAG
+ CONVERSIONS = copy.copy(MySQLdb.converters.conversions)
-# Fix the access conversions to properly recognize unicode/binary
-FIELD_TYPE = MySQLdb.constants.FIELD_TYPE
-FLAG = MySQLdb.constants.FLAG
-CONVERSIONS = copy.copy(MySQLdb.converters.conversions)
+ field_types = [FIELD_TYPE.BLOB, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING]
+ if 'VARCHAR' in vars(FIELD_TYPE):
+ field_types.append(FIELD_TYPE.VARCHAR)
-field_types = [FIELD_TYPE.BLOB, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING]
-if 'VARCHAR' in vars(FIELD_TYPE):
- field_types.append(FIELD_TYPE.VARCHAR)
+ for field_type in field_types:
+ CONVERSIONS[field_type] = [(FLAG.BINARY, str)] + CONVERSIONS[field_type]
-for field_type in field_types:
- CONVERSIONS[field_type] = [(FLAG.BINARY, str)] + CONVERSIONS[field_type]
-
-
-# Alias some common MySQL exceptions
-IntegrityError = MySQLdb.IntegrityError
-OperationalError = MySQLdb.OperationalError
+ # Alias some common MySQL exceptions
+ IntegrityError = MySQLdb.IntegrityError
+ OperationalError = MySQLdb.OperationalError
diff --git a/tornado/escape.py b/tornado/escape.py
index 4010b1c..ed07c53 100644
--- a/tornado/escape.py
+++ b/tornado/escape.py
@@ -20,14 +20,18 @@
have crept in over time.
"""
+from __future__ import absolute_import, division, with_statement
+
import htmlentitydefs
import re
import sys
import urllib
# Python3 compatibility: On python2.5, introduce the bytes alias from 2.6
-try: bytes
-except Exception: bytes = str
+try:
+ bytes
+except Exception:
+ bytes = str
try:
from urlparse import parse_qs # Python 2.6+
@@ -62,6 +66,8 @@
_XHTML_ESCAPE_RE = re.compile('[&<>"]')
_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"'}
+
+
def xhtml_escape(value):
"""Escapes a string so it is valid within XML or XHTML."""
return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)],
@@ -143,13 +149,14 @@
result = parse_qs(qs, keep_blank_values, strict_parsing,
encoding='latin1', errors='strict')
encoded = {}
- for k,v in result.iteritems():
+ for k, v in result.iteritems():
encoded[k] = [i.encode('latin1') for i in v]
return encoded
-
_UTF8_TYPES = (bytes, type(None))
+
+
def utf8(value):
"""Converts a string argument to a byte string.
@@ -162,6 +169,8 @@
return value.encode("utf-8")
_TO_UNICODE_TYPES = (unicode, type(None))
+
+
def to_unicode(value):
"""Converts a string argument to a unicode string.
@@ -185,6 +194,8 @@
native_str = utf8
_BASESTRING_TYPES = (basestring, type(None))
+
+
def to_basestring(value):
"""Converts a string argument to a subclass of basestring.
@@ -199,13 +210,14 @@
assert isinstance(value, bytes)
return value.decode("utf-8")
+
def recursive_unicode(obj):
"""Walks a simple data structure, converting byte strings to unicode.
Supports lists, tuples, and dictionaries.
"""
if isinstance(obj, dict):
- return dict((recursive_unicode(k), recursive_unicode(v)) for (k,v) in obj.iteritems())
+ return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.iteritems())
elif isinstance(obj, list):
return list(recursive_unicode(i) for i in obj)
elif isinstance(obj, tuple):
@@ -215,7 +227,7 @@
else:
return obj
-# I originally used the regex from
+# I originally used the regex from
# http://daringfireball.net/2010/07/improved_regex_for_matching_urls
# but it gets all exponential on certain patterns (such as too many trailing
# dots), causing the regex matcher to never return.
@@ -234,8 +246,17 @@
shorten: Long urls will be shortened for display.
- extra_params: Extra text to include in the link tag,
- e.g. linkify(text, extra_params='rel="nofollow" class="external"')
+ extra_params: Extra text to include in the link tag, or a callable
+ taking the link as an argument and returning the extra text
+ e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``,
+ or::
+
+ def extra_params_cb(url):
+ if url.startswith("http://example.com"):
+ return 'class="internal"'
+ else:
+ return 'class="external" rel="nofollow"'
+ linkify(text, extra_params=extra_params_cb)
require_protocol: Only linkify urls which include a protocol. If this is
False, urls such as www.facebook.com will also be linkified.
@@ -244,7 +265,7 @@
e.g. linkify(text, permitted_protocols=["http", "ftp", "mailto"]).
It is very unsafe to include protocols such as "javascript".
"""
- if extra_params:
+ if extra_params and not callable(extra_params):
extra_params = " " + extra_params.strip()
def make_link(m):
@@ -260,7 +281,10 @@
if not proto:
href = "http://" + href # no proto specified, use http
- params = extra_params
+ if callable(extra_params):
+ params = " " + extra_params(href).strip()
+ else:
+ params = extra_params
# clip long urls. max_len is just an approximation
max_len = 30
diff --git a/tornado/gen.py b/tornado/gen.py
index 51be537..506697d 100644
--- a/tornado/gen.py
+++ b/tornado/gen.py
@@ -62,7 +62,7 @@
called with more than one argument or any keyword arguments, the result
is an `Arguments` object, which is a named tuple ``(args, kwargs)``.
"""
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
import functools
import operator
@@ -71,10 +71,22 @@
from tornado.stack_context import ExceptionStackContext
-class KeyReuseError(Exception): pass
-class UnknownKeyError(Exception): pass
-class LeakedCallbackError(Exception): pass
-class BadYieldError(Exception): pass
+
+class KeyReuseError(Exception):
+ pass
+
+
+class UnknownKeyError(Exception):
+ pass
+
+
+class LeakedCallbackError(Exception):
+ pass
+
+
+class BadYieldError(Exception):
+ pass
+
def engine(func):
"""Decorator for asynchronous generators.
@@ -92,6 +104,7 @@
@functools.wraps(func)
def wrapper(*args, **kwargs):
runner = None
+
def handle_exception(typ, value, tb):
# if the function throws an exception before its first "yield"
# (or is not a generator at all), the Runner won't exist yet.
@@ -100,21 +113,23 @@
if runner is not None:
return runner.handle_exception(typ, value, tb)
return False
- with ExceptionStackContext(handle_exception):
+ with ExceptionStackContext(handle_exception) as deactivate:
gen = func(*args, **kwargs)
if isinstance(gen, types.GeneratorType):
- runner = Runner(gen)
+ runner = Runner(gen, deactivate)
runner.run()
return
assert gen is None, gen
+ deactivate()
# no yield, so we're done
return wrapper
+
class YieldPoint(object):
"""Base class for objects that may be yielded from the generator."""
def start(self, runner):
"""Called by the runner after the generator has yielded.
-
+
No other methods will be called on this object before ``start``.
"""
raise NotImplementedError()
@@ -128,12 +143,13 @@
def get_result(self):
"""Returns the value to use as the result of the yield expression.
-
+
This method will only be called once, and only after `is_ready`
has returned true.
"""
raise NotImplementedError()
+
class Callback(YieldPoint):
"""Returns a callable object that will allow a matching `Wait` to proceed.
@@ -159,6 +175,7 @@
def get_result(self):
return self.runner.result_callback(self.key)
+
class Wait(YieldPoint):
"""Returns the argument passed to the result of a previous `Callback`."""
def __init__(self, key):
@@ -173,6 +190,7 @@
def get_result(self):
return self.runner.pop_result(self.key)
+
class WaitAll(YieldPoint):
"""Returns the results of multiple previous `Callbacks`.
@@ -189,10 +207,10 @@
def is_ready(self):
return all(self.runner.is_ready(key) for key in self.keys)
-
+
def get_result(self):
return [self.runner.pop_result(key) for key in self.keys]
-
+
class Task(YieldPoint):
"""Runs a single asynchronous operation.
@@ -203,9 +221,9 @@
A `Task` is equivalent to a `Callback`/`Wait` pair (with a unique
key generated automatically)::
-
+
result = yield gen.Task(func, args)
-
+
func(args, callback=(yield gen.Callback(key)))
result = yield gen.Wait(key)
"""
@@ -221,13 +239,14 @@
runner.register_callback(self.key)
self.kwargs["callback"] = runner.result_callback(self.key)
self.func(*self.args, **self.kwargs)
-
+
def is_ready(self):
return self.runner.is_ready(self.key)
def get_result(self):
return self.runner.pop_result(self.key)
+
class Multi(YieldPoint):
"""Runs multiple asynchronous operations in parallel.
@@ -239,7 +258,7 @@
def __init__(self, children):
assert all(isinstance(i, YieldPoint) for i in children)
self.children = children
-
+
def start(self, runner):
for i in self.children:
i.start(runner)
@@ -250,21 +269,26 @@
def get_result(self):
return [i.get_result() for i in self.children]
+
class _NullYieldPoint(YieldPoint):
def start(self, runner):
pass
+
def is_ready(self):
return True
+
def get_result(self):
return None
+
class Runner(object):
"""Internal implementation of `tornado.gen.engine`.
Maintains information about pending callbacks and their results.
"""
- def __init__(self, gen):
+ def __init__(self, gen, deactivate_stack_context):
self.gen = gen
+ self.deactivate_stack_context = deactivate_stack_context
self.yield_point = _NullYieldPoint()
self.pending_callbacks = set()
self.results = {}
@@ -329,6 +353,7 @@
raise LeakedCallbackError(
"finished without waiting for callbacks %r" %
self.pending_callbacks)
+ self.deactivate_stack_context()
return
except Exception:
self.finished = True
@@ -366,6 +391,8 @@
return False
# in python 2.6+ this could be a collections.namedtuple
+
+
class Arguments(tuple):
"""The result of a yield expression whose callback had more than one
argument (or keyword arguments).
diff --git a/tornado/httpclient.py b/tornado/httpclient.py
index 354d907..0fcc943 100644
--- a/tornado/httpclient.py
+++ b/tornado/httpclient.py
@@ -29,6 +29,8 @@
supported version is 7.18.2, and the recommended version is 7.21.1 or newer.
"""
+from __future__ import absolute_import, division, with_statement
+
import calendar
import email.utils
import httplib
@@ -40,6 +42,7 @@
from tornado.ioloop import IOLoop
from tornado.util import import_object, bytes_type
+
class HTTPClient(object):
"""A blocking HTTP client.
@@ -54,11 +57,11 @@
except httpclient.HTTPError, e:
print "Error:", e
"""
- def __init__(self, async_client_class=None):
+ def __init__(self, async_client_class=None, **kwargs):
self._io_loop = IOLoop()
if async_client_class is None:
async_client_class = AsyncHTTPClient
- self._async_client = async_client_class(self._io_loop)
+ self._async_client = async_client_class(self._io_loop, **kwargs)
self._response = None
self._closed = False
@@ -74,7 +77,7 @@
def fetch(self, request, **kwargs):
"""Executes a request, returning an `HTTPResponse`.
-
+
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
@@ -91,6 +94,7 @@
response.rethrow()
return response
+
class AsyncHTTPClient(object):
"""An non-blocking HTTP client.
@@ -120,6 +124,8 @@
_impl_class = None
_impl_kwargs = None
+ _DEFAULT_MAX_CLIENTS = 10
+
@classmethod
def _async_clients(cls):
assert cls is not AsyncHTTPClient, "should only be called on subclasses"
@@ -127,7 +133,7 @@
cls._async_client_dict = weakref.WeakKeyDictionary()
return cls._async_client_dict
- def __new__(cls, io_loop=None, max_clients=10, force_instance=False,
+ def __new__(cls, io_loop=None, max_clients=None, force_instance=False,
**kwargs):
io_loop = io_loop or IOLoop.instance()
if cls is AsyncHTTPClient:
@@ -145,7 +151,13 @@
if cls._impl_kwargs:
args.update(cls._impl_kwargs)
args.update(kwargs)
- instance.initialize(io_loop, max_clients, **args)
+ if max_clients is not None:
+ # max_clients is special because it may be passed
+ # positionally instead of by keyword
+ args["max_clients"] = max_clients
+ elif "max_clients" not in args:
+ args["max_clients"] = AsyncHTTPClient._DEFAULT_MAX_CLIENTS
+ instance.initialize(io_loop, **args)
if not force_instance:
impl._async_clients()[io_loop] = instance
return instance
@@ -200,6 +212,16 @@
AsyncHTTPClient._impl_class = impl
AsyncHTTPClient._impl_kwargs = kwargs
+ @staticmethod
+ def _save_configuration():
+ return (AsyncHTTPClient._impl_class, AsyncHTTPClient._impl_kwargs)
+
+ @staticmethod
+ def _restore_configuration(saved):
+ AsyncHTTPClient._impl_class = saved[0]
+ AsyncHTTPClient._impl_kwargs = saved[1]
+
+
class HTTPRequest(object):
"""HTTP client request object."""
def __init__(self, url, method="GET", headers=None, body=None,
@@ -235,23 +257,23 @@
:arg bool use_gzip: Request gzip encoding from the server
:arg string network_interface: Network interface to use for request
:arg callable streaming_callback: If set, `streaming_callback` will
- be run with each chunk of data as it is received, and
- `~HTTPResponse.body` and `~HTTPResponse.buffer` will be empty in
+ be run with each chunk of data as it is received, and
+ `~HTTPResponse.body` and `~HTTPResponse.buffer` will be empty in
the final response.
:arg callable header_callback: If set, `header_callback` will
- be run with each header line as it is received, and
+ be run with each header line as it is received, and
`~HTTPResponse.headers` will be empty in the final response.
:arg callable prepare_curl_callback: If set, will be called with
a `pycurl.Curl` object to allow the application to make additional
`setopt` calls.
- :arg string proxy_host: HTTP proxy hostname. To use proxies,
- `proxy_host` and `proxy_port` must be set; `proxy_username` and
- `proxy_pass` are optional. Proxies are currently only support
+ :arg string proxy_host: HTTP proxy hostname. To use proxies,
+ `proxy_host` and `proxy_port` must be set; `proxy_username` and
+ `proxy_pass` are optional. Proxies are currently only support
with `curl_httpclient`.
:arg int proxy_port: HTTP proxy port
:arg string proxy_username: HTTP proxy username
:arg string proxy_password: HTTP proxy password
- :arg bool allow_nonstandard_methods: Allow unknown values for `method`
+ :arg bool allow_nonstandard_methods: Allow unknown values for `method`
argument?
:arg bool validate_cert: For HTTPS requests, validate the server's
certificate?
@@ -260,7 +282,7 @@
any request uses a custom `ca_certs` file, they all must (they
don't have to all use the same `ca_certs`, but it's not possible
to mix requests with ca_certs and requests that use the defaults.
- :arg bool allow_ipv6: Use IPv6 when available? Default is false in
+ :arg bool allow_ipv6: Use IPv6 when available? Default is false in
`simple_httpclient` and true in `curl_httpclient`
:arg string client_key: Filename for client SSL key, if any
:arg string client_cert: Filename for client SSL certificate, if any
@@ -325,12 +347,15 @@
plus 'queue', which is the delay (if any) introduced by waiting for
a slot under AsyncHTTPClient's max_clients setting.
"""
- def __init__(self, request, code, headers={}, buffer=None,
+ def __init__(self, request, code, headers=None, buffer=None,
effective_url=None, error=None, request_time=None,
- time_info={}):
+ time_info=None):
self.request = request
self.code = code
- self.headers = headers
+ if headers is not None:
+ self.headers = headers
+ else:
+ self.headers = httputil.HTTPHeaders()
self.buffer = buffer
self._body = None
if effective_url is None:
@@ -345,7 +370,7 @@
else:
self.error = error
self.request_time = request_time
- self.time_info = time_info
+ self.time_info = time_info or {}
def _get_body(self):
if self.buffer is None:
diff --git a/tornado/httpserver.py b/tornado/httpserver.py
index e24c376..5667ba1 100644
--- a/tornado/httpserver.py
+++ b/tornado/httpserver.py
@@ -24,11 +24,12 @@
`tornado.web.RequestHandler.request`.
"""
+from __future__ import absolute_import, division, with_statement
+
import Cookie
import logging
import socket
import time
-import urlparse
from tornado.escape import utf8, native_str, parse_qs_bytes
from tornado import httputil
@@ -38,10 +39,11 @@
from tornado.util import b, bytes_type
try:
- import ssl # Python 2.6+
+ import ssl # Python 2.6+
except ImportError:
ssl = None
+
class HTTPServer(TCPServer):
r"""A non-blocking, single-threaded HTTP server.
@@ -103,7 +105,7 @@
In many cases, `tornado.web.Application.listen` can be used to avoid
the need to explicitly create the `HTTPServer`.
- 2. `~tornado.netutil.TCPServer.bind`/`~tornado.netutil.TCPServer.start`:
+ 2. `~tornado.netutil.TCPServer.bind`/`~tornado.netutil.TCPServer.start`:
simple multi-process::
server = HTTPServer(app)
@@ -143,10 +145,12 @@
HTTPConnection(stream, address, self.request_callback,
self.no_keep_alive, self.xheaders)
+
class _BadRequestException(Exception):
"""Exception class for malformed HTTP requests."""
pass
+
class HTTPConnection(object):
"""Handles a connection to an HTTP client, executing HTTP requests.
@@ -156,9 +160,6 @@
def __init__(self, stream, address, request_callback, no_keep_alive=False,
xheaders=False):
self.stream = stream
- if self.stream.socket.family not in (socket.AF_INET, socket.AF_INET6):
- # Unix (or other) socket; fake the remote address
- address = ('0.0.0.0', 0)
self.address = address
self.request_callback = request_callback
self.no_keep_alive = no_keep_alive
@@ -189,7 +190,7 @@
if self._write_callback is not None:
callback = self._write_callback
self._write_callback = None
- callback()
+ callback()
# _on_write_complete is enqueued on the IOLoop whenever the
# IOStream's write buffer becomes empty, but it's possible for
# another callback that runs on the IOLoop before it to
@@ -233,9 +234,20 @@
if not version.startswith("HTTP/"):
raise _BadRequestException("Malformed HTTP version in HTTP Request-Line")
headers = httputil.HTTPHeaders.parse(data[eol:])
+
+ # HTTPRequest wants an IP, not a full socket address
+ if getattr(self.stream.socket, 'family', socket.AF_INET) in (
+ socket.AF_INET, socket.AF_INET6):
+ # Jython 2.5.2 doesn't have the socket.family attribute,
+ # so just assume IP in that case.
+ remote_ip = self.address[0]
+ else:
+ # Unix (or other) socket; fake the remote address
+ remote_ip = '0.0.0.0'
+
self._request = HTTPRequest(
connection=self, method=method, uri=uri, version=version,
- headers=headers, remote_ip=self.address[0])
+ headers=headers, remote_ip=remote_ip)
content_length = headers.get("Content-Length")
if content_length:
@@ -257,7 +269,7 @@
def _on_request_body(self, data):
self._request.body = data
content_type = self._request.headers.get("Content-Type", "")
- if self._request.method in ("POST", "PUT"):
+ if self._request.method in ("POST", "PATCH", "PUT"):
if content_type.startswith("application/x-www-form-urlencoded"):
arguments = parse_qs_bytes(native_str(self._request.body))
for name, values in arguments.iteritems():
@@ -336,8 +348,8 @@
GET/POST arguments are available in the arguments property, which
maps arguments names to lists of values (to support multiple values
for individual names). Names are of type `str`, while arguments
- are byte strings. Note that this is different from
- `RequestHandler.get_argument`, which returns argument values as
+ are byte strings. Note that this is different from
+ `RequestHandler.get_argument`, which returns argument values as
unicode strings.
.. attribute:: files
@@ -375,7 +387,7 @@
self.remote_ip = remote_ip
if protocol:
self.protocol = protocol
- elif connection and isinstance(connection.stream,
+ elif connection and isinstance(connection.stream,
iostream.SSLIOStream):
self.protocol = "https"
else:
@@ -386,14 +398,13 @@
self._start_time = time.time()
self._finish_time = None
- scheme, netloc, path, query, fragment = urlparse.urlsplit(native_str(uri))
- self.path = path
- self.query = query
- arguments = parse_qs_bytes(query)
+ self.path, sep, self.query = uri.partition('?')
+ arguments = parse_qs_bytes(self.query)
self.arguments = {}
for name, values in arguments.iteritems():
values = [v for v in values if v]
- if values: self.arguments[name] = values
+ if values:
+ self.arguments[name] = values
def supports_http_1_1(self):
"""Returns True if this request supports HTTP/1.1 semantics"""
@@ -473,4 +484,3 @@
return False
raise
return True
-
diff --git a/tornado/httputil.py b/tornado/httputil.py
index 8aec4b4..6201dd1 100644
--- a/tornado/httputil.py
+++ b/tornado/httputil.py
@@ -16,12 +16,15 @@
"""HTTP utility code shared by clients and servers."""
+from __future__ import absolute_import, division, with_statement
+
import logging
import urllib
import re
from tornado.util import b, ObjectDict
+
class HTTPHeaders(dict):
"""A dictionary that maintains Http-Header-Case for all keys.
@@ -55,7 +58,14 @@
dict.__init__(self)
self._as_list = {}
self._last_key = None
- self.update(*args, **kwargs)
+ if (len(args) == 1 and len(kwargs) == 0 and
+ isinstance(args[0], HTTPHeaders)):
+ # Copy constructor
+ for k, v in args[0].get_all():
+ self.add(k, v)
+ else:
+ # Dict-style initialization
+ self.update(*args, **kwargs)
# new public methods
@@ -144,6 +154,10 @@
for k, v in dict(*args, **kwargs).iteritems():
self[k] = v
+ def copy(self):
+ # default implementation returns dict(self), not the subclass
+ return HTTPHeaders(self)
+
_NORMALIZED_HEADER_RE = re.compile(r'^[A-Z0-9][a-z0-9]*(-[A-Z0-9][a-z0-9]*)*$')
_normalized_headers = {}
@@ -172,7 +186,8 @@
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
"""
- if not args: return url
+ if not args:
+ return url
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urllib.urlencode(args)
@@ -204,13 +219,14 @@
# in the wild.
if boundary.startswith(b('"')) and boundary.endswith(b('"')):
boundary = boundary[1:-1]
- if data.endswith(b("\r\n")):
- footer_length = len(boundary) + 6
- else:
- footer_length = len(boundary) + 4
- parts = data[:-footer_length].split(b("--") + boundary + b("\r\n"))
+ final_boundary_index = data.rfind(b("--") + boundary + b("--"))
+ if final_boundary_index == -1:
+ logging.warning("Invalid multipart/form-data: no final boundary")
+ return
+ parts = data[:final_boundary_index].split(b("--") + boundary + b("\r\n"))
for part in parts:
- if not part: continue
+ if not part:
+ continue
eoh = part.find(b("\r\n\r\n"))
if eoh == -1:
logging.warning("multipart/form-data missing headers")
@@ -250,6 +266,7 @@
yield f.strip()
s = s[end:]
+
def _parse_header(line):
"""Parse a Content-type like header.
@@ -263,7 +280,7 @@
i = p.find('=')
if i >= 0:
name = p[:i].strip().lower()
- value = p[i+1:].strip()
+ value = p[i + 1:].strip()
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
value = value.replace('\\\\', '\\').replace('\\"', '"')
@@ -274,7 +291,3 @@
def doctests():
import doctest
return doctest.DocTestSuite()
-
-if __name__ == "__main__":
- import doctest
- doctest.testmod()
diff --git a/tornado/ioloop.py b/tornado/ioloop.py
index edd2fec..3e14229 100644
--- a/tornado/ioloop.py
+++ b/tornado/ioloop.py
@@ -26,7 +26,7 @@
`IOLoop.add_timeout` is a non-blocking alternative to `time.sleep`.
"""
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
import datetime
import errno
@@ -104,6 +104,9 @@
WRITE = _EPOLLOUT
ERROR = _EPOLLERR | _EPOLLHUP
+ # Global lock for creating global IOLoop instance
+ _instance_lock = threading.Lock()
+
def __init__(self, impl=None):
self._impl = impl or _poll()
if hasattr(self._impl, 'fileno'):
@@ -142,7 +145,10 @@
self.io_loop = io_loop or IOLoop.instance()
"""
if not hasattr(IOLoop, "_instance"):
- IOLoop._instance = IOLoop()
+ with IOLoop._instance_lock:
+ if not hasattr(IOLoop, "_instance"):
+ # New instance after double check
+ IOLoop._instance = IOLoop()
return IOLoop._instance
@staticmethod
@@ -164,7 +170,20 @@
"""Closes the IOLoop, freeing any resources used.
If ``all_fds`` is true, all file descriptors registered on the
- IOLoop will be closed (not just the ones created by the IOLoop itself.
+ IOLoop will be closed (not just the ones created by the IOLoop itself).
+
+ Many applications will only use a single IOLoop that runs for the
+ entire lifetime of the process. In that case closing the IOLoop
+ is not necessary since everything will be cleaned up when the
+ process exits. `IOLoop.close` is provided mainly for scenarios
+ such as unit tests, which create and destroy a large number of
+ IOLoops.
+
+ An IOLoop must be completely stopped before it can be closed. This
+ means that `IOLoop.stop()` must be called *and* `IOLoop.start()` must
+ be allowed to return before attempting to call `IOLoop.close()`.
+ Therefore the call to `close` will usually appear just after
+ the call to `start` rather than near the call to `stop`.
"""
self.remove_handler(self._waker.fileno())
if all_fds:
@@ -335,6 +354,9 @@
ioloop.start() will return after async_method has run its callback,
whether that callback was invoked before or after ioloop.start.
+
+ Note that even after `stop` has been called, the IOLoop is not
+ completely stopped until `IOLoop.start` has also returned.
"""
self._running = False
self._stopped = True
@@ -431,7 +453,7 @@
@staticmethod
def timedelta_to_seconds(td):
"""Equivalent to td.total_seconds() (introduced in python 2.7)."""
- return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / float(10**6)
+ return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
# Comparison methods to sort by deadline, with object id as a tiebreaker
# to guarantee a consistent ordering. The heapq module uses __le__
@@ -474,7 +496,8 @@
self._timeout = None
def _run(self):
- if not self._running: return
+ if not self._running:
+ return
try:
self.callback()
except Exception:
@@ -591,8 +614,10 @@
pass
def register(self, fd, events):
- if events & IOLoop.READ: self.read_fds.add(fd)
- if events & IOLoop.WRITE: self.write_fds.add(fd)
+ if events & IOLoop.READ:
+ self.read_fds.add(fd)
+ if events & IOLoop.WRITE:
+ self.write_fds.add(fd)
if events & IOLoop.ERROR:
self.error_fds.add(fd)
# Closed connections are reported as errors by epoll and kqueue,
@@ -633,7 +658,7 @@
else:
try:
# Linux systems with our C module installed
- import epoll
+ from tornado import epoll
_poll = _EPoll
except Exception:
# All other systems
diff --git a/tornado/iostream.py b/tornado/iostream.py
index db7895f..6db0fdf 100644
--- a/tornado/iostream.py
+++ b/tornado/iostream.py
@@ -16,11 +16,12 @@
"""A utility class to write to and read from a non-blocking socket."""
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
import collections
import errno
import logging
+import os
import socket
import sys
import re
@@ -30,16 +31,17 @@
from tornado.util import b, bytes_type
try:
- import ssl # Python 2.6+
+ import ssl # Python 2.6+
except ImportError:
ssl = None
+
class IOStream(object):
r"""A utility class to write to and read from a non-blocking socket.
We support a non-blocking ``write()`` and a family of ``read_*()`` methods.
All of the methods take callbacks (since writing and reading are
- non-blocking and asynchronous).
+ non-blocking and asynchronous).
The socket parameter may either be connected or unconnected. For
server operations the socket is the result of calling socket.accept().
@@ -47,6 +49,9 @@
and may either be connected before passing it to the IOStream or
connected with IOStream.connect.
+ When a stream is closed due to an error, the IOStream's `error`
+ attribute contains the exception object.
+
A very simple (and broken) HTTP client using this class::
from tornado import ioloop
@@ -83,6 +88,7 @@
self.io_loop = io_loop or ioloop.IOLoop.instance()
self.max_buffer_size = max_buffer_size
self.read_chunk_size = read_chunk_size
+ self.error = None
self._read_buffer = collections.deque()
self._write_buffer = collections.deque()
self._read_buffer_size = 0
@@ -136,31 +142,15 @@
def read_until_regex(self, regex, callback):
"""Call callback when we read the given regex pattern."""
- assert not self._read_callback, "Already reading"
+ self._set_read_callback(callback)
self._read_regex = re.compile(regex)
- self._read_callback = stack_context.wrap(callback)
- while True:
- # See if we've already got the data from a previous read
- if self._read_from_buffer():
- return
- self._check_closed()
- if self._read_to_buffer() == 0:
- break
- self._add_io_state(self.io_loop.READ)
-
+ self._try_inline_read()
+
def read_until(self, delimiter, callback):
"""Call callback when we read the given delimiter."""
- assert not self._read_callback, "Already reading"
+ self._set_read_callback(callback)
self._read_delimiter = delimiter
- self._read_callback = stack_context.wrap(callback)
- while True:
- # See if we've already got the data from a previous read
- if self._read_from_buffer():
- return
- self._check_closed()
- if self._read_to_buffer() == 0:
- break
- self._add_io_state(self.io_loop.READ)
+ self._try_inline_read()
def read_bytes(self, num_bytes, callback, streaming_callback=None):
"""Call callback when we read the given number of bytes.
@@ -169,18 +159,11 @@
of data as they become available, and the argument to the final
``callback`` will be empty.
"""
- assert not self._read_callback, "Already reading"
+ self._set_read_callback(callback)
assert isinstance(num_bytes, (int, long))
self._read_bytes = num_bytes
- self._read_callback = stack_context.wrap(callback)
self._streaming_callback = stack_context.wrap(streaming_callback)
- while True:
- if self._read_from_buffer():
- return
- self._check_closed()
- if self._read_to_buffer() == 0:
- break
- self._add_io_state(self.io_loop.READ)
+ self._try_inline_read()
def read_until_close(self, callback, streaming_callback=None):
"""Reads all data from the socket until it is closed.
@@ -192,12 +175,12 @@
Subject to ``max_buffer_size`` limit from `IOStream` constructor if
a ``streaming_callback`` is not used.
"""
- assert not self._read_callback, "Already reading"
+ self._set_read_callback(callback)
if self.closed():
self._run_callback(callback, self._consume(self._read_buffer_size))
+ self._read_callback = None
return
self._read_until_close = True
- self._read_callback = stack_context.wrap(callback)
self._streaming_callback = stack_context.wrap(streaming_callback)
self._add_io_state(self.io_loop.READ)
@@ -211,10 +194,18 @@
"""
assert isinstance(data, bytes_type)
self._check_closed()
+ # We use bool(_write_buffer) as a proxy for write_buffer_size>0,
+ # so never put empty strings in the buffer.
if data:
- # We use bool(_write_buffer) as a proxy for write_buffer_size>0,
- # so never put empty strings in the buffer.
- self._write_buffer.append(data)
+ # Break up large contiguous strings before inserting them in the
+ # write buffer, so we don't have to recopy the entire thing
+ # as we slice off pieces to send to the socket.
+ WRITE_BUFFER_CHUNK_SIZE = 128 * 1024
+ if len(data) > WRITE_BUFFER_CHUNK_SIZE:
+ for i in range(0, len(data), WRITE_BUFFER_CHUNK_SIZE):
+ self._write_buffer.append(data[i:i + WRITE_BUFFER_CHUNK_SIZE])
+ else:
+ self._write_buffer.append(data)
self._write_callback = stack_context.wrap(callback)
self._handle_write()
if self._write_buffer:
@@ -228,6 +219,8 @@
def close(self):
"""Close this stream."""
if self.socket is not None:
+ if any(sys.exc_info()):
+ self.error = sys.exc_info()[1]
if self._read_until_close:
callback = self._read_callback
self._read_callback = None
@@ -239,12 +232,16 @@
self._state = None
self.socket.close()
self.socket = None
- if self._close_callback and self._pending_callbacks == 0:
- # if there are pending callbacks, don't run the close callback
- # until they're done (see _maybe_add_error_handler)
- cb = self._close_callback
- self._close_callback = None
- self._run_callback(cb)
+ self._maybe_run_close_callback()
+
+ def _maybe_run_close_callback(self):
+ if (self.socket is None and self._close_callback and
+ self._pending_callbacks == 0):
+ # if there are pending callbacks, don't run the close callback
+ # until they're done (see _maybe_add_error_handler)
+ cb = self._close_callback
+ self._close_callback = None
+ self._run_callback(cb)
def reading(self):
"""Returns true if we are currently reading from the stream."""
@@ -274,6 +271,9 @@
if not self.socket:
return
if events & self.io_loop.ERROR:
+ errno = self.socket.getsockopt(socket.SOL_SOCKET,
+ socket.SO_ERROR)
+ self.error = socket.error(errno, os.strerror(errno))
# We may have queued up a user callback in _handle_read or
# _handle_write, so don't close the IOStream until those
# callbacks have had a chance to run.
@@ -332,22 +332,65 @@
self.io_loop.add_callback(wrapper)
def _handle_read(self):
- while True:
+ try:
try:
- # Read from the socket until we get EWOULDBLOCK or equivalent.
- # SSL sockets do some internal buffering, and if the data is
- # sitting in the SSL object's buffer select() and friends
- # can't see it; the only way to find out if it's there is to
- # try to read it.
- result = self._read_to_buffer()
- except Exception:
- self.close()
- return
- if result == 0:
- break
- else:
- if self._read_from_buffer():
- return
+ # Pretend to have a pending callback so that an EOF in
+ # _read_to_buffer doesn't trigger an immediate close
+ # callback. At the end of this method we'll either
+ # estabilsh a real pending callback via
+ # _read_from_buffer or run the close callback.
+ #
+ # We need two try statements here so that
+ # pending_callbacks is decremented before the `except`
+ # clause below (which calls `close` and does need to
+ # trigger the callback)
+ self._pending_callbacks += 1
+ while True:
+ # Read from the socket until we get EWOULDBLOCK or equivalent.
+ # SSL sockets do some internal buffering, and if the data is
+ # sitting in the SSL object's buffer select() and friends
+ # can't see it; the only way to find out if it's there is to
+ # try to read it.
+ if self._read_to_buffer() == 0:
+ break
+ finally:
+ self._pending_callbacks -= 1
+ except Exception:
+ logging.warning("error on read", exc_info=True)
+ self.close()
+ return
+ if self._read_from_buffer():
+ return
+ else:
+ self._maybe_run_close_callback()
+
+ def _set_read_callback(self, callback):
+ assert not self._read_callback, "Already reading"
+ self._read_callback = stack_context.wrap(callback)
+
+ def _try_inline_read(self):
+ """Attempt to complete the current read operation from buffered data.
+
+ If the read can be completed without blocking, schedules the
+ read callback on the next IOLoop iteration; otherwise starts
+ listening for reads on the socket.
+ """
+ # See if we've already got the data from a previous read
+ if self._read_from_buffer():
+ return
+ self._check_closed()
+ try:
+ # See comments in _handle_read about incrementing _pending_callbacks
+ self._pending_callbacks += 1
+ while True:
+ if self._read_to_buffer() == 0:
+ break
+ self._check_closed()
+ finally:
+ self._pending_callbacks -= 1
+ if self._read_from_buffer():
+ return
+ self._add_io_state(self.io_loop.READ)
def _read_from_socket(self):
"""Attempts to read from the socket.
@@ -397,20 +440,21 @@
Returns True if the read was completed.
"""
- if self._read_bytes is not None:
- if self._streaming_callback is not None and self._read_buffer_size:
- bytes_to_consume = min(self._read_bytes, self._read_buffer_size)
+ if self._streaming_callback is not None and self._read_buffer_size:
+ bytes_to_consume = self._read_buffer_size
+ if self._read_bytes is not None:
+ bytes_to_consume = min(self._read_bytes, bytes_to_consume)
self._read_bytes -= bytes_to_consume
- self._run_callback(self._streaming_callback,
- self._consume(bytes_to_consume))
- if self._read_buffer_size >= self._read_bytes:
- num_bytes = self._read_bytes
- callback = self._read_callback
- self._read_callback = None
- self._streaming_callback = None
- self._read_bytes = None
- self._run_callback(callback, self._consume(num_bytes))
- return True
+ self._run_callback(self._streaming_callback,
+ self._consume(bytes_to_consume))
+ if self._read_bytes is not None and self._read_buffer_size >= self._read_bytes:
+ num_bytes = self._read_bytes
+ callback = self._read_callback
+ self._read_callback = None
+ self._streaming_callback = None
+ self._read_bytes = None
+ self._run_callback(callback, self._consume(num_bytes))
+ return True
elif self._read_delimiter is not None:
# Multi-byte delimiters (e.g. '\r\n') may straddle two
# chunks in the read buffer, so we can't easily find them
@@ -420,56 +464,41 @@
# to be in the first few chunks. Merge the buffer gradually
# since large merges are relatively expensive and get undone in
# consume().
- loc = -1
if self._read_buffer:
- loc = self._read_buffer[0].find(self._read_delimiter)
- while loc == -1 and len(self._read_buffer) > 1:
- # Grow by doubling, but don't split the second chunk just
- # because the first one is small.
- new_len = max(len(self._read_buffer[0]) * 2,
- (len(self._read_buffer[0]) +
- len(self._read_buffer[1])))
- _merge_prefix(self._read_buffer, new_len)
- loc = self._read_buffer[0].find(self._read_delimiter)
- if loc != -1:
- callback = self._read_callback
- delimiter_len = len(self._read_delimiter)
- self._read_callback = None
- self._streaming_callback = None
- self._read_delimiter = None
- self._run_callback(callback,
- self._consume(loc + delimiter_len))
- return True
+ while True:
+ loc = self._read_buffer[0].find(self._read_delimiter)
+ if loc != -1:
+ callback = self._read_callback
+ delimiter_len = len(self._read_delimiter)
+ self._read_callback = None
+ self._streaming_callback = None
+ self._read_delimiter = None
+ self._run_callback(callback,
+ self._consume(loc + delimiter_len))
+ return True
+ if len(self._read_buffer) == 1:
+ break
+ _double_prefix(self._read_buffer)
elif self._read_regex is not None:
- m = None
if self._read_buffer:
- m = self._read_regex.search(self._read_buffer[0])
- while m is None and len(self._read_buffer) > 1:
- # Grow by doubling, but don't split the second chunk just
- # because the first one is small.
- new_len = max(len(self._read_buffer[0]) * 2,
- (len(self._read_buffer[0]) +
- len(self._read_buffer[1])))
- _merge_prefix(self._read_buffer, new_len)
- m = self._read_regex.search(self._read_buffer[0])
- _merge_prefix(self._read_buffer, sys.maxint)
- m = self._read_regex.search(self._read_buffer[0])
- if m:
- callback = self._read_callback
- self._read_callback = None
- self._streaming_callback = None
- self._read_regex = None
- self._run_callback(callback, self._consume(m.end()))
- return True
- elif self._read_until_close:
- if self._streaming_callback is not None and self._read_buffer_size:
- self._run_callback(self._streaming_callback,
- self._consume(self._read_buffer_size))
+ while True:
+ m = self._read_regex.search(self._read_buffer[0])
+ if m is not None:
+ callback = self._read_callback
+ self._read_callback = None
+ self._streaming_callback = None
+ self._read_regex = None
+ self._run_callback(callback, self._consume(m.end()))
+ return True
+ if len(self._read_buffer) == 1:
+ break
+ _double_prefix(self._read_buffer)
return False
def _handle_connect(self):
err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err != 0:
+ self.error = socket.error(err, os.strerror(err))
# IOLoop implementations may vary: some of them return
# an error state before the socket becomes writable, so
# in that case a connection failure would be handled by the
@@ -537,10 +566,7 @@
def _maybe_add_error_listener(self):
if self._state is None and self._pending_callbacks == 0:
if self.socket is None:
- cb = self._close_callback
- if cb is not None:
- self._close_callback = None
- self._run_callback(cb)
+ self._maybe_run_close_callback()
else:
self._add_io_state(ioloop.IOLoop.READ)
@@ -655,7 +681,6 @@
# until we've completed the SSL handshake (so certificates are
# available, etc).
-
def _read_from_socket(self):
if self._ssl_accepting:
# If the handshake hasn't finished yet, there can't be anything
@@ -686,6 +711,16 @@
return None
return chunk
+
+def _double_prefix(deque):
+ """Grow by doubling, but don't split the second chunk just because the
+ first one is small.
+ """
+ new_len = max(len(deque[0]) * 2,
+ (len(deque[0]) + len(deque[1])))
+ _merge_prefix(deque, new_len)
+
+
def _merge_prefix(deque, size):
"""Replace the first entries in a deque of strings with a single
string of up to size bytes.
@@ -723,6 +758,7 @@
if not deque:
deque.appendleft(b(""))
+
def doctests():
import doctest
return doctest.DocTestSuite()
diff --git a/tornado/locale.py b/tornado/locale.py
index 61cdb7e..415b344 100644
--- a/tornado/locale.py
+++ b/tornado/locale.py
@@ -39,6 +39,8 @@
the locale.translate method will simply return the original string.
"""
+from __future__ import absolute_import, division, with_statement
+
import csv
import datetime
import logging
@@ -50,6 +52,7 @@
_supported_locales = frozenset([_default_locale])
_use_gettext = False
+
def get(*locale_codes):
"""Returns the closest match for the given locale codes.
@@ -109,7 +112,8 @@
global _supported_locales
_translations = {}
for path in os.listdir(directory):
- if not path.endswith(".csv"): continue
+ if not path.endswith(".csv"):
+ continue
locale, extension = path.split(".")
if not re.match("[a-z]+(_[A-Z]+)?$", locale):
logging.error("Unrecognized locale %r (path: %s)", locale,
@@ -118,7 +122,8 @@
f = open(os.path.join(directory, path), "r")
_translations[locale] = {}
for i, row in enumerate(csv.reader(f)):
- if not row or len(row) < 2: continue
+ if not row or len(row) < 2:
+ continue
row = [c.decode("utf-8").strip() for c in row]
english, translation = row[:2]
if len(row) > 2:
@@ -134,6 +139,7 @@
_supported_locales = frozenset(_translations.keys() + [_default_locale])
logging.info("Supported locales: %s", sorted(_supported_locales))
+
def load_gettext_translations(directory, domain):
"""Loads translations from gettext's locale tree
@@ -158,10 +164,12 @@
global _use_gettext
_translations = {}
for lang in os.listdir(directory):
- if lang.startswith('.'): continue # skip .svn, etc
- if os.path.isfile(os.path.join(directory, lang)): continue
+ if lang.startswith('.'):
+ continue # skip .svn, etc
+ if os.path.isfile(os.path.join(directory, lang)):
+ continue
try:
- os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain+".mo"))
+ os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo"))
_translations[lang] = gettext.translation(domain, directory,
languages=[lang])
except Exception, e:
@@ -172,7 +180,7 @@
logging.info("Supported locales: %s", sorted(_supported_locales))
-def get_supported_locales(cls):
+def get_supported_locales():
"""Returns a list of all the supported locale codes."""
return _supported_locales
@@ -187,7 +195,8 @@
def get_closest(cls, *locale_codes):
"""Returns the closest match for the given locale code."""
for code in locale_codes:
- if not code: continue
+ if not code:
+ continue
code = code.replace("-", "_")
parts = code.split("_")
if len(parts) > 2:
@@ -289,16 +298,16 @@
if relative and days == 0:
if seconds < 50:
return _("1 second ago", "%(seconds)d seconds ago",
- seconds) % { "seconds": seconds }
+ seconds) % {"seconds": seconds}
if seconds < 50 * 60:
minutes = round(seconds / 60.0)
return _("1 minute ago", "%(minutes)d minutes ago",
- minutes) % { "minutes": minutes }
+ minutes) % {"minutes": minutes}
hours = round(seconds / (60.0 * 60))
return _("1 hour ago", "%(hours)d hours ago",
- hours) % { "hours": hours }
+ hours) % {"hours": hours}
if days == 0:
format = _("%(time)s")
@@ -364,8 +373,10 @@
of size 1.
"""
_ = self.translate
- if len(parts) == 0: return ""
- if len(parts) == 1: return parts[0]
+ if len(parts) == 0:
+ return ""
+ if len(parts) == 1:
+ return parts[0]
comma = u' \u0648 ' if self.code.startswith("fa") else u", "
return _("%(commas)s and %(last)s") % {
"commas": comma.join(parts[:-1]),
@@ -383,6 +394,7 @@
value = value[:-3]
return ",".join(reversed(parts))
+
class CSVLocale(Locale):
"""Locale implementation using tornado's CSV translation format."""
def translate(self, message, plural_message=None, count=None):
@@ -397,6 +409,7 @@
message_dict = self.translations.get("unknown", {})
return message_dict.get(message, message)
+
class GettextLocale(Locale):
"""Locale implementation using the gettext module."""
def translate(self, message, plural_message=None, count=None):
diff --git a/tornado/netutil.py b/tornado/netutil.py
index 1e1bcbf..d06a176 100644
--- a/tornado/netutil.py
+++ b/tornado/netutil.py
@@ -16,6 +16,8 @@
"""Miscellaneous network utility code."""
+from __future__ import absolute_import, division, with_statement
+
import errno
import logging
import os
@@ -28,10 +30,11 @@
from tornado.platform.auto import set_close_exec
try:
- import ssl # Python 2.6+
+ import ssl # Python 2.6+
except ImportError:
ssl = None
+
class TCPServer(object):
r"""A non-blocking, single-threaded TCP server.
@@ -231,7 +234,7 @@
or socket.AF_INET6 to restrict to ipv4 or ipv6 addresses, otherwise
both will be used if available.
- The ``backlog`` argument has the same meaning as for
+ The ``backlog`` argument has the same meaning as for
``socket.listen()``.
"""
sockets = []
@@ -275,7 +278,7 @@
If any other file with that name exists, an exception will be
raised.
- Returns a socket object (not a list of socket objects like
+ Returns a socket object (not a list of socket objects like
`bind_sockets`)
"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@@ -297,6 +300,7 @@
sock.listen(backlog)
return sock
+
def add_accept_handler(sock, callback, io_loop=None):
"""Adds an ``IOLoop`` event handler to accept new connections on ``sock``.
@@ -308,6 +312,7 @@
"""
if io_loop is None:
io_loop = IOLoop.instance()
+
def accept_handler(fd, events):
while True:
try:
diff --git a/tornado/options.py b/tornado/options.py
index f9f472f..537891e 100644
--- a/tornado/options.py
+++ b/tornado/options.py
@@ -48,12 +48,16 @@
for define() below.
"""
+from __future__ import absolute_import, division, with_statement
+
import datetime
import logging
import logging.handlers
import re
import sys
+import os
import time
+import textwrap
from tornado.escape import _unicode
@@ -64,136 +68,123 @@
curses = None
-def define(name, default=None, type=None, help=None, metavar=None,
- multiple=False, group=None):
- """Defines a new command line option.
-
- If type is given (one of str, float, int, datetime, or timedelta)
- or can be inferred from the default, we parse the command line
- arguments based on the given type. If multiple is True, we accept
- comma-separated values, and the option value is always a list.
-
- For multi-value integers, we also accept the syntax x:y, which
- turns into range(x, y) - very useful for long integer ranges.
-
- help and metavar are used to construct the automatically generated
- command line help string. The help message is formatted like::
-
- --name=METAVAR help string
-
- group is used to group the defined options in logical groups. By default,
- command line options are grouped by the defined file.
-
- Command line option names must be unique globally. They can be parsed
- from the command line with parse_command_line() or parsed from a
- config file with parse_config_file.
- """
- if name in options:
- raise Error("Option %r already defined in %s", name,
- options[name].file_name)
- frame = sys._getframe(0)
- options_file = frame.f_code.co_filename
- file_name = frame.f_back.f_code.co_filename
- if file_name == options_file: file_name = ""
- if type is None:
- if not multiple and default is not None:
- type = default.__class__
- else:
- type = str
- if group:
- group_name = group
- else:
- group_name = file_name
- options[name] = _Option(name, file_name=file_name, default=default,
- type=type, help=help, metavar=metavar,
- multiple=multiple, group_name=group_name)
-
-
-def parse_command_line(args=None):
- """Parses all options given on the command line.
-
- We return all command line arguments that are not options as a list.
- """
- if args is None: args = sys.argv
- remaining = []
- for i in xrange(1, len(args)):
- # All things after the last option are command line arguments
- if not args[i].startswith("-"):
- remaining = args[i:]
- break
- if args[i] == "--":
- remaining = args[i+1:]
- break
- arg = args[i].lstrip("-")
- name, equals, value = arg.partition("=")
- name = name.replace('-', '_')
- if not name in options:
- print_help()
- raise Error('Unrecognized command line option: %r' % name)
- option = options[name]
- if not equals:
- if option.type == bool:
- value = "true"
- else:
- raise Error('Option %r requires a value' % name)
- option.parse(value)
- if options.help:
- print_help()
- sys.exit(0)
-
- # Set up log level and pretty console logging by default
- if options.logging != 'none':
- logging.getLogger().setLevel(getattr(logging, options.logging.upper()))
- enable_pretty_logging()
-
- return remaining
-
-
-def parse_config_file(path):
- """Parses and loads the Python config file at the given path."""
- config = {}
- execfile(path, config, config)
- for name in config:
- if name in options:
- options[name].set(config[name])
-
-
-def print_help(file=sys.stdout):
- """Prints all the command line options to stdout."""
- print >> file, "Usage: %s [OPTIONS]" % sys.argv[0]
- print >> file, ""
- print >> file, "Options:"
- by_group = {}
- for option in options.itervalues():
- by_group.setdefault(option.group_name, []).append(option)
-
- for filename, o in sorted(by_group.items()):
- if filename: print >> file, filename
- o.sort(key=lambda option: option.name)
- for option in o:
- prefix = option.name
- if option.metavar:
- prefix += "=" + option.metavar
- print >> file, " --%-30s %s" % (prefix, option.help or "")
- print >> file
+class Error(Exception):
+ """Exception raised by errors in the options module."""
+ pass
class _Options(dict):
- """Our global program options, an dictionary with object-like access."""
- @classmethod
- def instance(cls):
- if not hasattr(cls, "_instance"):
- cls._instance = cls()
- return cls._instance
+ """A collection of options, a dictionary with object-like access.
+ Normally accessed via static functions in the `tornado.options` module,
+ which reference a global instance.
+ """
def __getattr__(self, name):
if isinstance(self.get(name), _Option):
return self[name].value()
raise AttributeError("Unrecognized option %r" % name)
+ def __setattr__(self, name, value):
+ if isinstance(self.get(name), _Option):
+ return self[name].set(value)
+ raise AttributeError("Unrecognized option %r" % name)
+
+ def define(self, name, default=None, type=None, help=None, metavar=None,
+ multiple=False, group=None):
+ if name in self:
+ raise Error("Option %r already defined in %s", name,
+ self[name].file_name)
+ frame = sys._getframe(0)
+ options_file = frame.f_code.co_filename
+ file_name = frame.f_back.f_code.co_filename
+ if file_name == options_file:
+ file_name = ""
+ if type is None:
+ if not multiple and default is not None:
+ type = default.__class__
+ else:
+ type = str
+ if group:
+ group_name = group
+ else:
+ group_name = file_name
+ self[name] = _Option(name, file_name=file_name, default=default,
+ type=type, help=help, metavar=metavar,
+ multiple=multiple, group_name=group_name)
+
+ def parse_command_line(self, args=None):
+ if args is None:
+ args = sys.argv
+ remaining = []
+ for i in xrange(1, len(args)):
+ # All things after the last option are command line arguments
+ if not args[i].startswith("-"):
+ remaining = args[i:]
+ break
+ if args[i] == "--":
+ remaining = args[i + 1:]
+ break
+ arg = args[i].lstrip("-")
+ name, equals, value = arg.partition("=")
+ name = name.replace('-', '_')
+ if not name in self:
+ print_help()
+ raise Error('Unrecognized command line option: %r' % name)
+ option = self[name]
+ if not equals:
+ if option.type == bool:
+ value = "true"
+ else:
+ raise Error('Option %r requires a value' % name)
+ option.parse(value)
+ if self.help:
+ print_help()
+ sys.exit(0)
+
+ # Set up log level and pretty console logging by default
+ if self.logging != 'none':
+ logging.getLogger().setLevel(getattr(logging, self.logging.upper()))
+ enable_pretty_logging()
+
+ return remaining
+
+ def parse_config_file(self, path):
+ config = {}
+ execfile(path, config, config)
+ for name in config:
+ if name in self:
+ self[name].set(config[name])
+
+ def print_help(self, file=sys.stdout):
+ """Prints all the command line options to stdout."""
+ print >> file, "Usage: %s [OPTIONS]" % sys.argv[0]
+ print >> file, "\nOptions:\n"
+ by_group = {}
+ for option in self.itervalues():
+ by_group.setdefault(option.group_name, []).append(option)
+
+ for filename, o in sorted(by_group.items()):
+ if filename:
+ print >> file, "\n%s options:\n" % os.path.normpath(filename)
+ o.sort(key=lambda option: option.name)
+ for option in o:
+ prefix = option.name
+ if option.metavar:
+ prefix += "=" + option.metavar
+ description = option.help or ""
+ if option.default is not None and option.default != '':
+ description += " (default %s)" % option.default
+ lines = textwrap.wrap(description, 79 - 35)
+ if len(prefix) > 30 or len(lines) == 0:
+ lines.insert(0, '')
+ print >> file, " --%-30s %s" % (prefix, lines[0])
+ for line in lines[1:]:
+ print >> file, "%-34s %s" % (' ', line)
+ print >> file
+
class _Option(object):
- def __init__(self, name, default=None, type=str, help=None, metavar=None,
+ def __init__(self, name, default=None, type=basestring, help=None, metavar=None,
multiple=False, file_name=None, group_name=None):
if default is None and multiple:
default = []
@@ -215,18 +206,17 @@
datetime.datetime: self._parse_datetime,
datetime.timedelta: self._parse_timedelta,
bool: self._parse_bool,
- str: self._parse_string,
+ basestring: self._parse_string,
}.get(self.type, self.type)
if self.multiple:
- if self._value is None:
- self._value = []
+ self._value = []
for part in value.split(","):
if self.type in (int, long):
# allow ranges of the form X:Y (inclusive at both ends)
lo, _, hi = part.partition(":")
lo = _parse(lo)
hi = _parse(hi) if hi else lo
- self._value.extend(range(lo, hi+1))
+ self._value.extend(range(lo, hi + 1))
else:
self._value.append(_parse(part))
else:
@@ -244,8 +234,8 @@
(self.name, self.type.__name__))
else:
if value != None and not isinstance(value, self.type):
- raise Error("Option %r is required to be a %s" %
- (self.name, self.type.__name__))
+ raise Error("Option %r is required to be a %s (%s given)" %
+ (self.name, self.type.__name__, type(value)))
self._value = value
# Supported date/time formats in our options
@@ -313,14 +303,64 @@
return _unicode(value)
-class Error(Exception):
- """Exception raised by errors in the options module."""
- pass
+options = _Options()
+"""Global options dictionary.
+
+Supports both attribute-style and dict-style access.
+"""
-def enable_pretty_logging():
+def define(name, default=None, type=None, help=None, metavar=None,
+ multiple=False, group=None):
+ """Defines a new command line option.
+
+ If type is given (one of str, float, int, datetime, or timedelta)
+ or can be inferred from the default, we parse the command line
+ arguments based on the given type. If multiple is True, we accept
+ comma-separated values, and the option value is always a list.
+
+ For multi-value integers, we also accept the syntax x:y, which
+ turns into range(x, y) - very useful for long integer ranges.
+
+ help and metavar are used to construct the automatically generated
+ command line help string. The help message is formatted like::
+
+ --name=METAVAR help string
+
+ group is used to group the defined options in logical groups. By default,
+ command line options are grouped by the defined file.
+
+ Command line option names must be unique globally. They can be parsed
+ from the command line with parse_command_line() or parsed from a
+ config file with parse_config_file.
+ """
+ return options.define(name, default=default, type=type, help=help,
+ metavar=metavar, multiple=multiple, group=group)
+
+
+def parse_command_line(args=None):
+ """Parses all options given on the command line (defaults to sys.argv).
+
+ Note that args[0] is ignored since it is the program name in sys.argv.
+
+ We return a list of all arguments that are not parsed as options.
+ """
+ return options.parse_command_line(args)
+
+
+def parse_config_file(path):
+ """Parses and loads the Python config file at the given path."""
+ return options.parse_config_file(path)
+
+
+def print_help(file=sys.stdout):
+ """Prints all the command line options to stdout."""
+ return options.print_help(file)
+
+
+def enable_pretty_logging(options=options):
"""Turns on formatted logging output as configured.
-
+
This is called automatically by `parse_command_line`.
"""
root_logger = logging.getLogger()
@@ -348,26 +388,30 @@
root_logger.addHandler(channel)
-
class _LogFormatter(logging.Formatter):
def __init__(self, color, *args, **kwargs):
logging.Formatter.__init__(self, *args, **kwargs)
self._color = color
if color:
- # The curses module has some str/bytes confusion in python3.
- # Most methods return bytes, but only accept strings.
- # The explict calls to unicode() below are harmless in python2,
- # but will do the right conversion in python3.
- fg_color = unicode(curses.tigetstr("setaf") or
- curses.tigetstr("setf") or "", "ascii")
+ # The curses module has some str/bytes confusion in
+ # python3. Until version 3.2.3, most methods return
+ # bytes, but only accept strings. In addition, we want to
+ # output these strings with the logging module, which
+ # works with unicode strings. The explicit calls to
+ # unicode() below are harmless in python2 but will do the
+ # right conversion in python 3.
+ fg_color = (curses.tigetstr("setaf") or
+ curses.tigetstr("setf") or "")
+ if (3, 0) < sys.version_info < (3, 2, 3):
+ fg_color = unicode(fg_color, "ascii")
self._colors = {
- logging.DEBUG: unicode(curses.tparm(fg_color, 4), # Blue
+ logging.DEBUG: unicode(curses.tparm(fg_color, 4), # Blue
"ascii"),
- logging.INFO: unicode(curses.tparm(fg_color, 2), # Green
+ logging.INFO: unicode(curses.tparm(fg_color, 2), # Green
"ascii"),
- logging.WARNING: unicode(curses.tparm(fg_color, 3), # Yellow
+ logging.WARNING: unicode(curses.tparm(fg_color, 3), # Yellow
"ascii"),
- logging.ERROR: unicode(curses.tparm(fg_color, 1), # Red
+ logging.ERROR: unicode(curses.tparm(fg_color, 1), # Red
"ascii"),
}
self._normal = unicode(curses.tigetstr("sgr0"), "ascii")
@@ -393,9 +437,6 @@
return formatted.replace("\n", "\n ")
-options = _Options.instance()
-
-
# Default options
define("help", type=bool, help="show this help information")
define("logging", default="info",
diff --git a/tornado/platform/auto.py b/tornado/platform/auto.py
index e76d731..7bfec11 100644
--- a/tornado/platform/auto.py
+++ b/tornado/platform/auto.py
@@ -23,9 +23,12 @@
from tornado.platform.auto import set_close_exec
"""
+from __future__ import absolute_import, division, with_statement
+
import os
if os.name == 'nt':
- from tornado.platform.windows import set_close_exec, Waker
+ from tornado.platform.common import Waker
+ from tornado.platform.windows import set_close_exec
else:
from tornado.platform.posix import set_close_exec, Waker
diff --git a/tornado/platform/common.py b/tornado/platform/common.py
new file mode 100644
index 0000000..176ce2e
--- /dev/null
+++ b/tornado/platform/common.py
@@ -0,0 +1,89 @@
+"""Lowest-common-denominator implementations of platform functionality."""
+from __future__ import absolute_import, division, with_statement
+
+import errno
+import socket
+
+from tornado.platform import interface
+from tornado.util import b
+
+
+class Waker(interface.Waker):
+ """Create an OS independent asynchronous pipe.
+
+ For use on platforms that don't have os.pipe() (or where pipes cannot
+ be passed to select()), but do have sockets. This includes Windows
+ and Jython.
+ """
+ def __init__(self):
+ # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py
+
+ self.writer = socket.socket()
+ # Disable buffering -- pulling the trigger sends 1 byte,
+ # and we want that sent immediately, to wake up ASAP.
+ self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+
+ count = 0
+ while 1:
+ count += 1
+ # Bind to a local port; for efficiency, let the OS pick
+ # a free port for us.
+ # Unfortunately, stress tests showed that we may not
+ # be able to connect to that port ("Address already in
+ # use") despite that the OS picked it. This appears
+ # to be a race bug in the Windows socket implementation.
+ # So we loop until a connect() succeeds (almost always
+ # on the first try). See the long thread at
+ # http://mail.zope.org/pipermail/zope/2005-July/160433.html
+ # for hideous details.
+ a = socket.socket()
+ a.bind(("127.0.0.1", 0))
+ a.listen(1)
+ connect_address = a.getsockname() # assigned (host, port) pair
+ try:
+ self.writer.connect(connect_address)
+ break # success
+ except socket.error, detail:
+ if (not hasattr(errno, 'WSAEADDRINUSE') or
+ detail[0] != errno.WSAEADDRINUSE):
+ # "Address already in use" is the only error
+ # I've seen on two WinXP Pro SP2 boxes, under
+ # Pythons 2.3.5 and 2.4.1.
+ raise
+ # (10048, 'Address already in use')
+ # assert count <= 2 # never triggered in Tim's tests
+ if count >= 10: # I've never seen it go above 2
+ a.close()
+ self.writer.close()
+ raise socket.error("Cannot bind trigger!")
+ # Close `a` and try again. Note: I originally put a short
+ # sleep() here, but it didn't appear to help or hurt.
+ a.close()
+
+ self.reader, addr = a.accept()
+ self.reader.setblocking(0)
+ self.writer.setblocking(0)
+ a.close()
+ self.reader_fd = self.reader.fileno()
+
+ def fileno(self):
+ return self.reader.fileno()
+
+ def wake(self):
+ try:
+ self.writer.send(b("x"))
+ except (IOError, socket.error):
+ pass
+
+ def consume(self):
+ try:
+ while True:
+ result = self.reader.recv(1024)
+ if not result:
+ break
+ except (IOError, socket.error):
+ pass
+
+ def close(self):
+ self.reader.close()
+ self.writer.close()
diff --git a/tornado/platform/interface.py b/tornado/platform/interface.py
index 20f0f71..21e72cd 100644
--- a/tornado/platform/interface.py
+++ b/tornado/platform/interface.py
@@ -21,10 +21,14 @@
implementation from `tornado.platform.auto`.
"""
+from __future__ import absolute_import, division, with_statement
+
+
def set_close_exec(fd):
"""Sets the close-on-exec bit (``FD_CLOEXEC``)for a file descriptor."""
raise NotImplementedError()
+
class Waker(object):
"""A socket-like object that can wake another thread from ``select()``.
@@ -36,7 +40,7 @@
"""
def fileno(self):
"""Returns a file descriptor for this waker.
-
+
Must be suitable for use with ``select()`` or equivalent on the
local platform.
"""
@@ -53,5 +57,3 @@
def close(self):
"""Closes the waker's file descriptor(s)."""
raise NotImplementedError()
-
-
diff --git a/tornado/platform/posix.py b/tornado/platform/posix.py
index aa09b31..8d674c0 100644
--- a/tornado/platform/posix.py
+++ b/tornado/platform/posix.py
@@ -16,20 +16,25 @@
"""Posix implementations of platform-specific functionality."""
+from __future__ import absolute_import, division, with_statement
+
import fcntl
import os
from tornado.platform import interface
from tornado.util import b
+
def set_close_exec(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
+
def _set_nonblocking(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
-
+
+
class Waker(interface.Waker):
def __init__(self):
r, w = os.pipe()
@@ -53,7 +58,8 @@
try:
while True:
result = self.reader.read()
- if not result: break;
+ if not result:
+ break
except IOError:
pass
diff --git a/tornado/platform/twisted.py b/tornado/platform/twisted.py
index 5d406d3..044c333 100644
--- a/tornado/platform/twisted.py
+++ b/tornado/platform/twisted.py
@@ -41,10 +41,10 @@
before closing the `IOLoop`.
-This module has been tested with Twisted versions 11.0.0 and 11.1.0.
+This module has been tested with Twisted versions 11.0.0, 11.1.0, and 12.0.0
"""
-from __future__ import with_statement, absolute_import
+from __future__ import absolute_import, division, with_statement
import functools
import logging
@@ -66,6 +66,11 @@
class TornadoDelayedCall(object):
"""DelayedCall object for Tornado."""
+ # Note that zope.interface.implements is deprecated in
+ # zope.interface 4.0, because it cannot work in python 3. The
+ # replacement is a class decorator, which cannot work on python
+ # 2.5. So when twisted supports python 3, we'll need to drop 2.5
+ # support on this module to make it work.
implements(IDelayedCall)
def __init__(self, reactor, seconds, f, *args, **kw):
@@ -107,6 +112,7 @@
def active(self):
return self._active
+
class TornadoReactor(PosixReactorBase):
"""Twisted reactor built on the Tornado IOLoop.
@@ -125,7 +131,7 @@
self._io_loop = io_loop
self._readers = {} # map of reader objects to fd
self._writers = {} # map of writer objects to fd
- self._fds = {} # a map of fd to a (reader, writer) tuple
+ self._fds = {} # a map of fd to a (reader, writer) tuple
self._delayedCalls = {}
PosixReactorBase.__init__(self)
@@ -295,6 +301,7 @@
if self._stopped:
self.fireSystemEvent("shutdown")
+
class _TestReactor(TornadoReactor):
"""Subclass of TornadoReactor for use in unittests.
@@ -319,7 +326,6 @@
port, protocol, interface=interface, maxPacketSize=maxPacketSize)
-
def install(io_loop=None):
"""Install this package as the default Twisted reactor."""
if not io_loop:
diff --git a/tornado/platform/windows.py b/tornado/platform/windows.py
index 1735f1b..80c8a6e 100644
--- a/tornado/platform/windows.py
+++ b/tornado/platform/windows.py
@@ -1,13 +1,10 @@
# NOTE: win32 support is currently experimental, and not recommended
# for production use.
+
+from __future__ import absolute_import, division, with_statement
import ctypes
import ctypes.wintypes
-import socket
-import errno
-
-from tornado.platform import interface
-from tornado.util import b
# See: http://msdn.microsoft.com/en-us/library/ms724935(VS.85).aspx
SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation
@@ -21,77 +18,3 @@
success = SetHandleInformation(fd, HANDLE_FLAG_INHERIT, 0)
if not success:
raise ctypes.GetLastError()
-
-
-class Waker(interface.Waker):
- """Create an OS independent asynchronous pipe"""
- def __init__(self):
- # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py
-
- self.writer = socket.socket()
- # Disable buffering -- pulling the trigger sends 1 byte,
- # and we want that sent immediately, to wake up ASAP.
- self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-
- count = 0
- while 1:
- count += 1
- # Bind to a local port; for efficiency, let the OS pick
- # a free port for us.
- # Unfortunately, stress tests showed that we may not
- # be able to connect to that port ("Address already in
- # use") despite that the OS picked it. This appears
- # to be a race bug in the Windows socket implementation.
- # So we loop until a connect() succeeds (almost always
- # on the first try). See the long thread at
- # http://mail.zope.org/pipermail/zope/2005-July/160433.html
- # for hideous details.
- a = socket.socket()
- a.bind(("127.0.0.1", 0))
- connect_address = a.getsockname() # assigned (host, port) pair
- a.listen(1)
- try:
- self.writer.connect(connect_address)
- break # success
- except socket.error, detail:
- if detail[0] != errno.WSAEADDRINUSE:
- # "Address already in use" is the only error
- # I've seen on two WinXP Pro SP2 boxes, under
- # Pythons 2.3.5 and 2.4.1.
- raise
- # (10048, 'Address already in use')
- # assert count <= 2 # never triggered in Tim's tests
- if count >= 10: # I've never seen it go above 2
- a.close()
- self.writer.close()
- raise socket.error("Cannot bind trigger!")
- # Close `a` and try again. Note: I originally put a short
- # sleep() here, but it didn't appear to help or hurt.
- a.close()
-
- self.reader, addr = a.accept()
- self.reader.setblocking(0)
- self.writer.setblocking(0)
- a.close()
- self.reader_fd = self.reader.fileno()
-
- def fileno(self):
- return self.reader.fileno()
-
- def wake(self):
- try:
- self.writer.send(b("x"))
- except IOError:
- pass
-
- def consume(self):
- try:
- while True:
- result = self.reader.recv(1024)
- if not result: break
- except IOError:
- pass
-
- def close(self):
- self.reader.close()
- self.writer.close()
diff --git a/tornado/process.py b/tornado/process.py
index 06f6aa9..28a61bc 100644
--- a/tornado/process.py
+++ b/tornado/process.py
@@ -16,6 +16,8 @@
"""Utilities for working with multiple processes."""
+from __future__ import absolute_import, division, with_statement
+
import errno
import logging
import os
@@ -27,10 +29,11 @@
from tornado import ioloop
try:
- import multiprocessing # Python 2.6+
+ import multiprocessing # Python 2.6+
except ImportError:
multiprocessing = None
+
def cpu_count():
"""Returns the number of processors on this machine."""
if multiprocessing is not None:
@@ -45,6 +48,7 @@
logging.error("Could not detect number of processors; assuming 1")
return 1
+
def _reseed_random():
if 'random' not in sys.modules:
return
@@ -61,6 +65,7 @@
_task_id = None
+
def fork_processes(num_processes, max_restarts=100):
"""Starts multiple worker processes.
@@ -95,6 +100,7 @@
"IOLoop.instance() before calling start_processes()")
logging.info("Starting %d processes", num_processes)
children = {}
+
def start_child(i):
pid = os.fork()
if pid == 0:
@@ -108,7 +114,8 @@
return None
for i in range(num_processes):
id = start_child(i)
- if id is not None: return id
+ if id is not None:
+ return id
num_restarts = 0
while children:
try:
@@ -133,13 +140,15 @@
if num_restarts > max_restarts:
raise RuntimeError("Too many child restarts, giving up")
new_id = start_child(id)
- if new_id is not None: return new_id
+ if new_id is not None:
+ return new_id
# All child processes exited cleanly, so exit the master process
# instead of just returning to right after the call to
# fork_processes (which will probably just start up another IOLoop
# unless the caller checks the return value).
sys.exit(0)
+
def task_id():
"""Returns the current task id, if any.
diff --git a/tornado/simple_httpclient.py b/tornado/simple_httpclient.py
index 376d410..80b3fb0 100644
--- a/tornado/simple_httpclient.py
+++ b/tornado/simple_httpclient.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
from tornado.escape import utf8, _unicode, native_str
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main
@@ -28,12 +28,13 @@
from cStringIO import StringIO as BytesIO # python 2
try:
- import ssl # python 2.6+
+ import ssl # python 2.6+
except ImportError:
ssl = None
_DEFAULT_CA_CERTS = os.path.dirname(__file__) + '/ca-certificates.crt'
+
class SimpleAsyncHTTPClient(AsyncHTTPClient):
"""Non-blocking HTTP client with no external dependencies.
@@ -93,8 +94,10 @@
def fetch(self, request, callback, **kwargs):
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
- if not isinstance(request.headers, HTTPHeaders):
- request.headers = HTTPHeaders(request.headers)
+ # We're going to modify this (to add Host, Accept-Encoding, etc),
+ # so make sure we don't modify the caller's object. This is also
+ # where normal dicts get converted to HTTPHeaders objects.
+ request.headers = HTTPHeaders(request.headers)
callback = stack_context.wrap(callback)
self.queue.append((request, callback))
self._process_queue()
@@ -119,9 +122,8 @@
self._process_queue()
-
class _HTTPConnection(object):
- _SUPPORTED_METHODS = set(["GET", "HEAD", "POST", "PUT", "DELETE"])
+ _SUPPORTED_METHODS = set(["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"])
def __init__(self, io_loop, client, request, release_callback,
final_callback, max_buffer_size):
@@ -160,6 +162,7 @@
if re.match(r'^\[.*\]$', host):
# raw ipv6 addresses in urls are enclosed in brackets
host = host[1:-1]
+ parsed_hostname = host # save final parsed host for _on_connect
if self.client.hostname_mapping is not None:
host = self.client.hostname_mapping.get(host, host)
@@ -198,7 +201,7 @@
# compatibility with servers configured for TLSv1 only,
# but nearly all servers support SSLv3:
# http://blog.ivanristic.com/2011/09/ssl-survey-protocol-support.html
- if sys.version_info >= (2,7):
+ if sys.version_info >= (2, 7):
ssl_options["ciphers"] = "DEFAULT:!SSLv2"
else:
# This is really only necessary for pre-1.0 versions
@@ -221,7 +224,8 @@
self._on_timeout)
self.stream.set_close_callback(self._on_close)
self.stream.connect(sockaddr,
- functools.partial(self._on_connect, parsed))
+ functools.partial(self._on_connect, parsed,
+ parsed_hostname))
def _on_timeout(self):
self._timeout = None
@@ -230,7 +234,7 @@
error=HTTPError(599, "Timeout")))
self.stream.close()
- def _on_connect(self, parsed):
+ def _on_connect(self, parsed, parsed_hostname):
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
@@ -241,7 +245,11 @@
if (self.request.validate_cert and
isinstance(self.stream, SSLIOStream)):
match_hostname(self.stream.socket.getpeercert(),
- parsed.hostname)
+ # ipv6 addresses are broken (in
+ # parsed.hostname) until 2.7, here is
+ # correctly parsed value calculated in
+ # __init__
+ parsed_hostname)
if (self.request.method not in self._SUPPORTED_METHODS and
not self.request.allow_nonstandard_methods):
raise KeyError("unknown method %s" % self.request.method)
@@ -250,8 +258,13 @@
'proxy_username', 'proxy_password'):
if getattr(self.request, key, None):
raise NotImplementedError('%s not supported' % key)
+ if "Connection" not in self.request.headers:
+ self.request.headers["Connection"] = "close"
if "Host" not in self.request.headers:
- self.request.headers["Host"] = parsed.netloc
+ if '@' in parsed.netloc:
+ self.request.headers["Host"] = parsed.netloc.rpartition('@')[-1]
+ else:
+ self.request.headers["Host"] = parsed.netloc
username, password = None, None
if parsed.username is not None:
username, password = parsed.username, parsed.password
@@ -265,7 +278,7 @@
if self.request.user_agent:
self.request.headers["User-Agent"] = self.request.user_agent
if not self.request.allow_nonstandard_methods:
- if self.request.method in ("POST", "PUT"):
+ if self.request.method in ("POST", "PATCH", "PUT"):
assert self.request.body is not None
else:
assert self.request.body is None
@@ -313,6 +326,8 @@
self._run_callback(HTTPResponse(self.request, 599, error=e,
request_time=time.time() - self.start_time,
))
+ if hasattr(self, "stream"):
+ self.stream.close()
def _on_close(self):
self._run_callback(HTTPResponse(
@@ -363,7 +378,7 @@
self.headers.get("Content-Encoding") == "gzip"):
# Magic parameter makes zlib module understand gzip header
# http://stackoverflow.com/questions/1838699/how-can-i-decompress-a-gzip-stream-with-zlib
- self._decompressor = zlib.decompressobj(16+zlib.MAX_WBITS)
+ self._decompressor = zlib.decompressobj(16 + zlib.MAX_WBITS)
if self.headers.get("Transfer-Encoding") == "chunked":
self.chunks = []
self.stream.read_until(b("\r\n"), self._on_chunk_length)
@@ -413,7 +428,7 @@
self.request.streaming_callback(data)
buffer = BytesIO()
else:
- buffer = BytesIO(data) # TODO: don't require one big string?
+ buffer = BytesIO(data) # TODO: don't require one big string?
response = HTTPResponse(original_request,
self.code, headers=self.headers,
request_time=time.time() - self.start_time,
@@ -452,6 +467,7 @@
class CertificateError(ValueError):
pass
+
def _dnsname_to_pat(dn):
pats = []
for frag in dn.split(r'.'):
@@ -465,6 +481,7 @@
pats.append(frag.replace(r'\*', '[^.]*'))
return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
diff --git a/tornado/stack_context.py b/tornado/stack_context.py
index 1ba3730..1843762 100644
--- a/tornado/stack_context.py
+++ b/tornado/stack_context.py
@@ -66,19 +66,24 @@
block that references your `StackContext`.
'''
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
import contextlib
import functools
import itertools
+import operator
import sys
import threading
+from tornado.util import raise_exc_info
+
+
class _State(threading.local):
def __init__(self):
self.contexts = ()
_state = _State()
+
class StackContext(object):
'''Establishes the given context as a StackContext that will be transferred.
@@ -91,24 +96,33 @@
StackContext takes the function itself rather than its result::
with StackContext(my_context):
+
+ The result of ``with StackContext() as cb:`` is a deactivation
+ callback. Run this callback when the StackContext is no longer
+ needed to ensure that it is not propagated any further (note that
+ deactivating a context does not affect any instances of that
+ context that are currently pending). This is an advanced feature
+ and not necessary in most applications.
'''
- def __init__(self, context_factory):
+ def __init__(self, context_factory, _active_cell=None):
self.context_factory = context_factory
+ self.active_cell = _active_cell or [True]
# Note that some of this code is duplicated in ExceptionStackContext
# below. ExceptionStackContext is more common and doesn't need
# the full generality of this class.
def __enter__(self):
self.old_contexts = _state.contexts
- # _state.contexts is a tuple of (class, arg) pairs
+ # _state.contexts is a tuple of (class, arg, active_cell) tuples
_state.contexts = (self.old_contexts +
- ((StackContext, self.context_factory),))
+ ((StackContext, self.context_factory, self.active_cell),))
try:
self.context = self.context_factory()
self.context.__enter__()
except Exception:
_state.contexts = self.old_contexts
raise
+ return lambda: operator.setitem(self.active_cell, 0, False)
def __exit__(self, type, value, traceback):
try:
@@ -116,6 +130,7 @@
finally:
_state.contexts = self.old_contexts
+
class ExceptionStackContext(object):
'''Specialization of StackContext for exception handling.
@@ -129,13 +144,16 @@
If the exception handler returns true, the exception will be
consumed and will not be propagated to other exception handlers.
'''
- def __init__(self, exception_handler):
+ def __init__(self, exception_handler, _active_cell=None):
self.exception_handler = exception_handler
+ self.active_cell = _active_cell or [True]
def __enter__(self):
self.old_contexts = _state.contexts
_state.contexts = (self.old_contexts +
- ((ExceptionStackContext, self.exception_handler),))
+ ((ExceptionStackContext, self.exception_handler,
+ self.active_cell),))
+ return lambda: operator.setitem(self.active_cell, 0, False)
def __exit__(self, type, value, traceback):
try:
@@ -144,6 +162,7 @@
finally:
_state.contexts = self.old_contexts
+
class NullContext(object):
'''Resets the StackContext.
@@ -158,9 +177,11 @@
def __exit__(self, type, value, traceback):
_state.contexts = self.old_contexts
+
class _StackContextWrapper(functools.partial):
pass
+
def wrap(fn):
'''Returns a callable object that will restore the current StackContext
when executed.
@@ -173,12 +194,15 @@
return fn
# functools.wraps doesn't appear to work on functools.partial objects
#@functools.wraps(fn)
+
def wrapped(callback, contexts, *args, **kwargs):
if contexts is _state.contexts or not contexts:
callback(*args, **kwargs)
return
if not _state.contexts:
- new_contexts = [cls(arg) for (cls, arg) in contexts]
+ new_contexts = [cls(arg, active_cell)
+ for (cls, arg, active_cell) in contexts
+ if active_cell[0]]
# If we're moving down the stack, _state.contexts is a prefix
# of contexts. For each element of contexts not in that prefix,
# create a new StackContext object.
@@ -190,10 +214,13 @@
for a, b in itertools.izip(_state.contexts, contexts))):
# contexts have been removed or changed, so start over
new_contexts = ([NullContext()] +
- [cls(arg) for (cls,arg) in contexts])
+ [cls(arg, active_cell)
+ for (cls, arg, active_cell) in contexts
+ if active_cell[0]])
else:
- new_contexts = [cls(arg)
- for (cls, arg) in contexts[len(_state.contexts):]]
+ new_contexts = [cls(arg, active_cell)
+ for (cls, arg, active_cell) in contexts[len(_state.contexts):]
+ if active_cell[0]]
if len(new_contexts) > 1:
with _nested(*new_contexts):
callback(*args, **kwargs)
@@ -207,6 +234,7 @@
else:
return _StackContextWrapper(fn)
+
@contextlib.contextmanager
def _nested(*managers):
"""Support multiple context managers in a single with-statement.
@@ -240,5 +268,4 @@
# Don't rely on sys.exc_info() still containing
# the right information. Another exception may
# have been raised and caught by an exit method
- raise exc[0], exc[1], exc[2]
-
+ raise_exc_info(exc)
diff --git a/tornado/template.py b/tornado/template.py
index 139667d..7072760 100644
--- a/tornado/template.py
+++ b/tornado/template.py
@@ -135,7 +135,7 @@
``{% for *var* in *expr* %}...{% end %}``
Same as the python ``for`` statement.
-
+
``{% from *x* import *y* %}``
Same as the python ``import`` statement.
@@ -165,14 +165,14 @@
``{% set *x* = *y* %}``
Sets a local variable.
-``{% try %}...{% except %}...{% finally %}...{% end %}``
+``{% try %}...{% except %}...{% finally %}...{% else %}...{% end %}``
Same as the python ``try`` statement.
``{% while *condition* %}... {% end %}``
Same as the python ``while`` statement.
"""
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
import cStringIO
import datetime
@@ -189,6 +189,7 @@
_DEFAULT_AUTOESCAPE = "xhtml_escape"
_UNSET = object()
+
class Template(object):
"""A compiled template.
@@ -217,7 +218,7 @@
# the module name used in __name__ below.
self.compiled = compile(
escape.to_unicode(self.code),
- "%s.generated.py" % self.name.replace('.','_'),
+ "%s.generated.py" % self.name.replace('.', '_'),
"exec")
except Exception:
formatted_code = _format_code(self.code).rstrip()
@@ -326,6 +327,7 @@
def _create_template(self, name):
raise NotImplementedError()
+
class Loader(BaseLoader):
"""A template loader that loads from a single root directory.
@@ -350,7 +352,7 @@
def _create_template(self, name):
path = os.path.join(self.root, name)
- f = open(path, "r")
+ f = open(path, "rb")
template = Template(f.read(), name=name, loader=self)
f.close()
return template
@@ -404,7 +406,6 @@
return (self.body,)
-
class _ChunkList(_Node):
def __init__(self, chunks):
self.chunks = chunks
@@ -531,11 +532,13 @@
writer.current_template.autoescape, self.line)
writer.write_line("_append(_tmp)", self.line)
+
class _Module(_Expression):
def __init__(self, expression, line):
super(_Module, self).__init__("_modules." + expression, line,
raw=True)
+
class _Text(_Node):
def __init__(self, value, line):
self.value = value
@@ -608,7 +611,7 @@
ancestors = ["%s:%d" % (tmpl.name, lineno)
for (tmpl, lineno) in self.include_stack]
line_comment += ' (via %s)' % ', '.join(reversed(ancestors))
- print >> self.file, " "*indent + line + line_comment
+ print >> self.file, " " * indent + line + line_comment
class _TemplateReader(object):
@@ -651,9 +654,12 @@
if type(key) is slice:
size = len(self)
start, stop, step = key.indices(size)
- if start is None: start = self.pos
- else: start += self.pos
- if stop is not None: stop += self.pos
+ if start is None:
+ start = self.pos
+ else:
+ start += self.pos
+ if stop is not None:
+ stop += self.pos
return self.text[slice(start, stop, step)]
elif key < 0:
return self.text[key]
@@ -751,7 +757,7 @@
# Intermediate ("else", "elif", etc) blocks
intermediate_blocks = {
- "else": set(["if", "for", "while"]),
+ "else": set(["if", "for", "while", "try"]),
"elif": set(["if"]),
"except": set(["try"]),
"finally": set(["try"]),
@@ -796,7 +802,8 @@
block = _Statement(suffix, line)
elif operator == "autoescape":
fn = suffix.strip()
- if fn == "None": fn = None
+ if fn == "None":
+ fn = None
template.autoescape = fn
continue
elif operator == "raw":
diff --git a/tornado/test/auth_test.py b/tornado/test/auth_test.py
index 2047904..748c526 100644
--- a/tornado/test/auth_test.py
+++ b/tornado/test/auth_test.py
@@ -3,12 +3,15 @@
# and ensure that it doesn't blow up (e.g. with unicode/bytes issues in
# python 3)
+
+from __future__ import absolute_import, division, with_statement
from tornado.auth import OpenIdMixin, OAuthMixin, OAuth2Mixin
from tornado.escape import json_decode
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.util import b
from tornado.web import RequestHandler, Application, asynchronous
+
class OpenIdClientLoginHandler(RequestHandler, OpenIdMixin):
def initialize(self, test):
self._OPENID_ENDPOINT = test.get_url('/openid/server/authenticate')
@@ -25,11 +28,13 @@
assert user is not None
self.finish(user)
+
class OpenIdServerAuthenticateHandler(RequestHandler):
def post(self):
assert self.get_argument('openid.mode') == 'check_authentication'
self.write('is_valid:true')
+
class OAuth1ClientLoginHandler(RequestHandler, OAuthMixin):
def initialize(self, test, version):
self._OAUTH_VERSION = version
@@ -56,6 +61,7 @@
assert access_token == dict(key=b('uiop'), secret=b('5678')), access_token
callback(dict(email='foo@example.com'))
+
class OAuth1ClientRequestParametersHandler(RequestHandler, OAuthMixin):
def initialize(self, version):
self._OAUTH_VERSION = version
@@ -68,17 +74,21 @@
'http://www.example.com/api/asdf',
dict(key='uiop', secret='5678'),
parameters=dict(foo='bar'))
- import urllib; urllib.urlencode(params)
+ import urllib
+ urllib.urlencode(params)
self.write(params)
+
class OAuth1ServerRequestTokenHandler(RequestHandler):
def get(self):
self.write('oauth_token=zxcv&oauth_token_secret=1234')
+
class OAuth1ServerAccessTokenHandler(RequestHandler):
def get(self):
self.write('oauth_token=uiop&oauth_token_secret=5678')
+
class OAuth2ClientLoginHandler(RequestHandler, OAuth2Mixin):
def initialize(self, test):
self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth2/server/authorize')
@@ -137,7 +147,7 @@
def test_oauth10_get_user(self):
response = self.fetch(
'/oauth10/client/login?oauth_token=zxcv',
- headers={'Cookie':'_oauth_request_token=enhjdg==|MTIzNA=='})
+ headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
response.rethrow()
parsed = json_decode(response.body)
self.assertEqual(parsed['email'], 'foo@example.com')
@@ -165,7 +175,7 @@
def test_oauth10a_get_user(self):
response = self.fetch(
'/oauth10a/client/login?oauth_token=zxcv',
- headers={'Cookie':'_oauth_request_token=enhjdg==|MTIzNA=='})
+ headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
response.rethrow()
parsed = json_decode(response.body)
self.assertEqual(parsed['email'], 'foo@example.com')
diff --git a/tornado/test/curl_httpclient_test.py b/tornado/test/curl_httpclient_test.py
index afa56f8..a6da39f 100644
--- a/tornado/test/curl_httpclient_test.py
+++ b/tornado/test/curl_httpclient_test.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import, division, with_statement
from tornado.test.httpclient_test import HTTPClientCommonTestCase
try:
@@ -8,6 +9,7 @@
if pycurl is not None:
from tornado.curl_httpclient import CurlAsyncHTTPClient
+
class CurlHTTPClientCommonTestCase(HTTPClientCommonTestCase):
def get_http_client(self):
client = CurlAsyncHTTPClient(io_loop=self.io_loop)
diff --git a/tornado/test/escape_test.py b/tornado/test/escape_test.py
index 42ba50b..defca1b 100644
--- a/tornado/test/escape_test.py
+++ b/tornado/test/escape_test.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python
+
+from __future__ import absolute_import, division, with_statement
import tornado.escape
import unittest
@@ -64,11 +66,11 @@
("http://www.example.com/wpstyle/?p=364.", {},
u'<a href="http://www.example.com/wpstyle/?p=364">http://www.example.com/wpstyle/?p=364</a>.'),
- ("rdar://1234",
+ ("rdar://1234",
{"permitted_protocols": ["http", "rdar"]},
u'<a href="rdar://1234">rdar://1234</a>'),
- ("rdar:/1234",
+ ("rdar:/1234",
{"permitted_protocols": ["rdar"]},
u'<a href="rdar:/1234">rdar:/1234</a>'),
@@ -97,7 +99,7 @@
("Just a www.example.com link.", {},
u'Just a <a href="http://www.example.com">www.example.com</a> link.'),
- ("Just a www.example.com link.",
+ ("Just a www.example.com link.",
{"require_protocol": True},
u'Just a www.example.com link.'),
@@ -119,6 +121,14 @@
("www.external-link.com",
{"extra_params": 'rel="nofollow" class="external"'},
u'<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>'),
+
+ ("www.external-link.com and www.internal-link.com/blogs extra",
+ {"extra_params": lambda(href):'class="internal"' if href.startswith("http://www.internal-link.com") else 'rel="nofollow" class="external"'},
+ u'<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a> and <a href="http://www.internal-link.com/blogs" class="internal">www.internal-link.com/blogs</a> extra'),
+
+ ("www.external-link.com",
+ {"extra_params": lambda(href):' rel="nofollow" class="external" '},
+ u'<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>'),
]
diff --git a/tornado/test/gen_test.py b/tornado/test/gen_test.py
index 935b409..d9ac9da 100644
--- a/tornado/test/gen_test.py
+++ b/tornado/test/gen_test.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import, division, with_statement
import functools
from tornado.escape import url_escape
from tornado.httpclient import AsyncHTTPClient
@@ -7,6 +8,7 @@
from tornado import gen
+
class GenTest(AsyncTestCase):
def run_gen(self, f):
f()
@@ -46,7 +48,7 @@
def test_exception_phase1(self):
@gen.engine
def f():
- 1/0
+ 1 / 0
self.assertRaises(ZeroDivisionError, self.run_gen, f)
def test_exception_phase2(self):
@@ -54,12 +56,12 @@
def f():
self.io_loop.add_callback((yield gen.Callback("k1")))
yield gen.Wait("k1")
- 1/0
+ 1 / 0
self.assertRaises(ZeroDivisionError, self.run_gen, f)
def test_exception_in_task_phase1(self):
def fail_task(callback):
- 1/0
+ 1 / 0
@gen.engine
def f():
@@ -73,7 +75,7 @@
def test_exception_in_task_phase2(self):
# This is the case that requires the use of stack_context in gen.engine
def fail_task(callback):
- self.io_loop.add_callback(lambda: 1/0)
+ self.io_loop.add_callback(lambda: 1 / 0)
@gen.engine
def f():
@@ -166,7 +168,7 @@
def f():
try:
yield gen.Wait("k1")
- raise "did not get expected exception"
+ raise Exception("did not get expected exception")
except gen.UnknownKeyError:
pass
self.stop()
@@ -177,7 +179,7 @@
def f():
try:
yield gen.Wait("k1")
- raise "did not get expected exception"
+ raise Exception("did not get expected exception")
except gen.UnknownKeyError:
pass
(yield gen.Callback("k2"))("v2")
@@ -191,7 +193,7 @@
self.orphaned_callback = yield gen.Callback(1)
try:
self.run_gen(f)
- raise "did not get expected exception"
+ raise Exception("did not get expected exception")
except gen.LeakedCallbackError:
pass
self.orphaned_callback()
@@ -247,6 +249,26 @@
self.stop()
self.run_gen(f)
+ def test_stack_context_leak(self):
+ # regression test: repeated invocations of a gen-based
+ # function should not result in accumulated stack_contexts
+ from tornado import stack_context
+
+ @gen.engine
+ def inner(callback):
+ yield gen.Task(self.io_loop.add_callback)
+ callback()
+
+ @gen.engine
+ def outer():
+ for i in xrange(10):
+ yield gen.Task(inner)
+ stack_increase = len(stack_context._state.contexts) - initial_stack_depth
+ self.assertTrue(stack_increase <= 2)
+ self.stop()
+ initial_stack_depth = len(stack_context._state.contexts)
+ self.run_gen(outer)
+
class GenSequenceHandler(RequestHandler):
@asynchronous
@@ -264,6 +286,7 @@
yield gen.Wait("k1")
self.finish("3")
+
class GenTaskHandler(RequestHandler):
@asynchronous
@gen.engine
@@ -274,6 +297,7 @@
response.rethrow()
self.finish(b("got response: ") + response.body)
+
class GenExceptionHandler(RequestHandler):
@asynchronous
@gen.engine
@@ -283,20 +307,23 @@
yield gen.Task(io_loop.add_callback)
raise Exception("oops")
+
class GenYieldExceptionHandler(RequestHandler):
@asynchronous
@gen.engine
def get(self):
io_loop = self.request.connection.stream.io_loop
# Test the interaction of the two stack_contexts.
+
def fail_task(callback):
- io_loop.add_callback(lambda: 1/0)
+ io_loop.add_callback(lambda: 1 / 0)
try:
yield gen.Task(fail_task)
raise Exception("did not get expected exception")
except ZeroDivisionError:
self.finish('ok')
+
class GenWebTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([
diff --git a/tornado/test/httpclient_test.py b/tornado/test/httpclient_test.py
index 8388338..9ec9679 100644
--- a/tornado/test/httpclient_test.py
+++ b/tornado/test/httpclient_test.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
import base64
import binascii
@@ -15,27 +15,32 @@
from tornado.util import b, bytes_type
from tornado.web import Application, RequestHandler, url
+
class HelloWorldHandler(RequestHandler):
def get(self):
name = self.get_argument("name", "world")
self.set_header("Content-Type", "text/plain")
self.finish("Hello %s!" % name)
+
class PostHandler(RequestHandler):
def post(self):
self.finish("Post arg1: %s, arg2: %s" % (
self.get_argument("arg1"), self.get_argument("arg2")))
+
class ChunkHandler(RequestHandler):
def get(self):
self.write("asdf")
self.flush()
self.write("qwer")
+
class AuthHandler(RequestHandler):
def get(self):
self.finish(self.request.headers["Authorization"])
+
class CountdownHandler(RequestHandler):
def get(self, count):
count = int(count)
@@ -44,6 +49,7 @@
else:
self.write("Zero")
+
class EchoPostHandler(RequestHandler):
def post(self):
self.write(self.request.body)
@@ -51,6 +57,8 @@
# These tests end up getting run redundantly: once here with the default
# HTTPClient implementation, and then again in each implementation's own
# test suite.
+
+
class HTTPClientCommonTestCase(AsyncHTTPTestCase, LogTrapTestCase):
def get_http_client(self):
"""Returns AsyncHTTPClient instance. May be overridden in subclass."""
@@ -124,6 +132,7 @@
0
""").replace(b("\n"), b("\r\n")), callback=stream.close)
+
def accept_callback(conn, address):
# fake an HTTP server using chunked encoding where the final chunks
# and connection close all happen at once
@@ -135,7 +144,6 @@
resp = self.wait()
resp.rethrow()
self.assertEqual(resp.body, b("12"))
-
def test_basic_auth(self):
self.assertEqual(self.fetch("/auth", auth_username="Aladdin",
diff --git a/tornado/test/httpserver_test.py b/tornado/test/httpserver_test.py
index 1a53a34..3b49a50 100644
--- a/tornado/test/httpserver_test.py
+++ b/tornado/test/httpserver_test.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python
+
+from __future__ import absolute_import, division, with_statement
from tornado import httpclient, simple_httpclient, netutil
from tornado.escape import json_decode, utf8, _unicode, recursive_unicode, native_str
from tornado.httpserver import HTTPServer
@@ -20,6 +22,7 @@
except ImportError:
ssl = None
+
class HandlerBaseTestCase(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', self.__class__.Handler)])
@@ -29,6 +32,7 @@
response.rethrow()
return json_decode(response.body)
+
class HelloWorldRequestHandler(RequestHandler):
def initialize(self, protocol="http"):
self.expected_protocol = protocol
@@ -40,6 +44,7 @@
def post(self):
self.finish("Got %d bytes in POST" % len(self.request.body))
+
class BaseSSLTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_ssl_version(self):
raise NotImplementedError()
@@ -53,7 +58,7 @@
force_instance=True)
def get_app(self):
- return Application([('/', HelloWorldRequestHandler,
+ return Application([('/', HelloWorldRequestHandler,
dict(protocol="https"))])
def get_httpserver_options(self):
@@ -72,6 +77,7 @@
**kwargs)
return self.wait()
+
class SSLTestMixin(object):
def test_ssl(self):
response = self.fetch('/')
@@ -80,7 +86,7 @@
def test_large_post(self):
response = self.fetch('/',
method='POST',
- body='A'*5000)
+ body='A' * 5000)
self.assertEqual(response.body, b("Got 5000 bytes in POST"))
def test_non_ssl_request(self):
@@ -97,16 +103,26 @@
# For example, SSLv3 and TLSv1 throw an exception if you try to read
# from the socket before the handshake is complete, but the default
# of SSLv23 allows it.
+
+
class SSLv23Test(BaseSSLTest, SSLTestMixin):
- def get_ssl_version(self): return ssl.PROTOCOL_SSLv23
+ def get_ssl_version(self):
+ return ssl.PROTOCOL_SSLv23
+
+
class SSLv3Test(BaseSSLTest, SSLTestMixin):
- def get_ssl_version(self): return ssl.PROTOCOL_SSLv3
+ def get_ssl_version(self):
+ return ssl.PROTOCOL_SSLv3
+
+
class TLSv1Test(BaseSSLTest, SSLTestMixin):
- def get_ssl_version(self): return ssl.PROTOCOL_TLSv1
+ def get_ssl_version(self):
+ return ssl.PROTOCOL_TLSv1
if hasattr(ssl, 'PROTOCOL_SSLv2'):
class SSLv2Test(BaseSSLTest):
- def get_ssl_version(self): return ssl.PROTOCOL_SSLv2
+ def get_ssl_version(self):
+ return ssl.PROTOCOL_SSLv2
def test_sslv2_fail(self):
# This is really more of a client test, but run it here since
@@ -134,7 +150,7 @@
del SSLv23Test
del SSLv3Test
del TLSv1Test
-elif getattr(ssl, 'OPENSSL_VERSION_INFO', (0,0)) < (1,0):
+elif getattr(ssl, 'OPENSSL_VERSION_INFO', (0, 0)) < (1, 0):
# In pre-1.0 versions of openssl, SSLv23 clients always send SSLv2
# ClientHello messages, which are rejected by SSLv3 and TLSv1
# servers. Note that while the OPENSSL_VERSION_INFO was formally
@@ -143,6 +159,7 @@
del SSLv3Test
del TLSv1Test
+
class MultipartTestHandler(RequestHandler):
def post(self):
self.finish({"header": self.request.headers["X-Header-Encoding-Test"],
@@ -151,16 +168,19 @@
"filebody": _unicode(self.request.files["files"][0]["body"]),
})
+
class RawRequestHTTPConnection(simple_httpclient._HTTPConnection):
def set_request(self, request):
self.__next_request = request
- def _on_connect(self, parsed):
+ def _on_connect(self, parsed, parsed_hostname):
self.stream.write(self.__next_request)
self.__next_request = None
self.stream.read_until(b("\r\n\r\n"), self._on_headers)
# This test is also called from wsgi_test
+
+
class HTTPConnectionTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_handlers(self):
return [("/multipart", MultipartTestHandler),
@@ -174,7 +194,7 @@
conn = RawRequestHTTPConnection(self.io_loop, client,
httpclient.HTTPRequest(self.get_url("/")),
None, self.stop,
- 1024*1024)
+ 1024 * 1024)
conn.set_request(
b("\r\n").join(headers +
[utf8("Content-Length: %d\r\n" % len(body))]) +
@@ -217,9 +237,10 @@
stream.connect(("localhost", self.get_http_port()), callback=self.stop)
self.wait()
stream.write(b("\r\n").join([b("POST /hello HTTP/1.1"),
- b("Content-Length: 1024"),
- b("Expect: 100-continue"),
- b("\r\n")]), callback=self.stop)
+ b("Content-Length: 1024"),
+ b("Expect: 100-continue"),
+ b("Connection: close"),
+ b("\r\n")]), callback=self.stop)
self.wait()
stream.read_until(b("\r\n\r\n"), self.stop)
data = self.wait()
@@ -234,11 +255,14 @@
stream.read_bytes(int(headers["Content-Length"]), self.stop)
body = self.wait()
self.assertEqual(body, b("Got 1024 bytes in POST"))
+ stream.close()
+
class EchoHandler(RequestHandler):
def get(self):
self.write(recursive_unicode(self.request.arguments))
+
class TypeCheckHandler(RequestHandler):
def prepare(self):
self.errors = {}
@@ -275,13 +299,15 @@
def check_type(self, name, obj, expected_type):
actual_type = type(obj)
if expected_type != actual_type:
- self.errors[name] = "expected %s, got %s" % (expected_type,
+ self.errors[name] = "expected %s, got %s" % (expected_type,
actual_type)
+
class HTTPServerTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([("/echo", EchoHandler),
("/typecheck", TypeCheckHandler),
+ ("//doubleslash", EchoHandler),
])
def test_query_string_encoding(self):
@@ -299,6 +325,15 @@
data = json_decode(response.body)
self.assertEqual(data, {})
+ def test_double_slash(self):
+ # urlparse.urlsplit (which tornado.httpserver used to use
+ # incorrectly) would parse paths beginning with "//" as
+ # protocol-relative urls.
+ response = self.fetch("//doubleslash")
+ self.assertEqual(200, response.code)
+ self.assertEqual(json_decode(response.body), {})
+
+
class XHeaderTest(HandlerBaseTestCase):
class Handler(RequestHandler):
def get(self):
@@ -368,6 +403,8 @@
stream.read_bytes(int(headers["Content-Length"]), self.stop)
body = self.wait()
self.assertEqual(body, b("Hello world"))
+ stream.close()
+ server.stop()
if not hasattr(socket, 'AF_UNIX') or sys.platform == 'cygwin':
del UnixSocketTest
diff --git a/tornado/test/httputil_test.py b/tornado/test/httputil_test.py
index 0566b6e..736ed6d 100644
--- a/tornado/test/httputil_test.py
+++ b/tornado/test/httputil_test.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python
+
+from __future__ import absolute_import, division, with_statement
from tornado.httputil import url_concat, parse_multipart_form_data, HTTPHeaders
from tornado.escape import utf8
from tornado.testing import LogTrapTestCase
@@ -13,42 +15,42 @@
def test_url_concat_no_query_params(self):
url = url_concat(
"https://localhost/path",
- [('y','y'), ('z','z')],
+ [('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?y=y&z=z")
def test_url_concat_encode_args(self):
url = url_concat(
"https://localhost/path",
- [('y','/y'), ('z','z')],
+ [('y', '/y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?y=%2Fy&z=z")
def test_url_concat_trailing_q(self):
url = url_concat(
"https://localhost/path?",
- [('y','y'), ('z','z')],
+ [('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?y=y&z=z")
def test_url_concat_q_with_no_trailing_amp(self):
url = url_concat(
"https://localhost/path?x",
- [('y','y'), ('z','z')],
+ [('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?x&y=y&z=z")
def test_url_concat_trailing_amp(self):
url = url_concat(
"https://localhost/path?x&",
- [('y','y'), ('z','z')],
+ [('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?x&y=y&z=z")
def test_url_concat_mult_params(self):
url = url_concat(
"https://localhost/path?a=1&b=2",
- [('y','y'), ('z','z')],
+ [('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?a=1&b=2&y=y&z=z")
@@ -59,6 +61,7 @@
)
self.assertEqual(url, "https://localhost/path?r=1&t=2")
+
class MultipartFormDataTest(LogTrapTestCase):
def test_file_upload(self):
data = b("""\
@@ -73,7 +76,7 @@
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b("Foo"))
-
+
def test_unquoted_names(self):
# quotes are optional unless special characters are present
data = b("""\
@@ -88,7 +91,7 @@
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b("Foo"))
-
+
def test_special_filenames(self):
filenames = ['a;b.txt',
'a"b.txt',
@@ -114,6 +117,85 @@
self.assertEqual(file["filename"], filename)
self.assertEqual(file["body"], b("Foo"))
+ def test_boundary_starts_and_ends_with_quotes(self):
+ data = b('''\
+--1234
+Content-Disposition: form-data; name="files"; filename="ab.txt"
+
+Foo
+--1234--''').replace(b("\n"), b("\r\n"))
+ args = {}
+ files = {}
+ parse_multipart_form_data(b('"1234"'), data, args, files)
+ file = files["files"][0]
+ self.assertEqual(file["filename"], "ab.txt")
+ self.assertEqual(file["body"], b("Foo"))
+
+ def test_missing_headers(self):
+ data = b('''\
+--1234
+
+Foo
+--1234--''').replace(b("\n"), b("\r\n"))
+ args = {}
+ files = {}
+ parse_multipart_form_data(b("1234"), data, args, files)
+ self.assertEqual(files, {})
+
+ def test_invalid_content_disposition(self):
+ data = b('''\
+--1234
+Content-Disposition: invalid; name="files"; filename="ab.txt"
+
+Foo
+--1234--''').replace(b("\n"), b("\r\n"))
+ args = {}
+ files = {}
+ parse_multipart_form_data(b("1234"), data, args, files)
+ self.assertEqual(files, {})
+
+ def test_line_does_not_end_with_correct_line_break(self):
+ data = b('''\
+--1234
+Content-Disposition: form-data; name="files"; filename="ab.txt"
+
+Foo--1234--''').replace(b("\n"), b("\r\n"))
+ args = {}
+ files = {}
+ parse_multipart_form_data(b("1234"), data, args, files)
+ self.assertEqual(files, {})
+
+ def test_content_disposition_header_without_name_parameter(self):
+ data = b("""\
+--1234
+Content-Disposition: form-data; filename="ab.txt"
+
+Foo
+--1234--""").replace(b("\n"), b("\r\n"))
+ args = {}
+ files = {}
+ parse_multipart_form_data(b("1234"), data, args, files)
+ self.assertEqual(files, {})
+
+ def test_data_after_final_boundary(self):
+ # The spec requires that data after the final boundary be ignored.
+ # http://www.w3.org/Protocols/rfc1341/7_2_Multipart.html
+ # In practice, some libraries include an extra CRLF after the boundary.
+ data = b("""\
+--1234
+Content-Disposition: form-data; name="files"; filename="ab.txt"
+
+Foo
+--1234--
+""").replace(b("\n"), b("\r\n"))
+ args = {}
+ files = {}
+ parse_multipart_form_data(b("1234"), data, args, files)
+ file = files["files"][0]
+ self.assertEqual(file["filename"], "ab.txt")
+ self.assertEqual(file["body"], b("Foo"))
+
+
class HTTPHeadersTest(unittest.TestCase):
def test_multi_line(self):
# Lines beginning with whitespace are appended to the previous line
diff --git a/tornado/test/import_test.py b/tornado/test/import_test.py
index 7da1a1e..584f070 100644
--- a/tornado/test/import_test.py
+++ b/tornado/test/import_test.py
@@ -1,5 +1,7 @@
+from __future__ import absolute_import, division, with_statement
import unittest
+
class ImportTest(unittest.TestCase):
def test_import_everything(self):
# Some of our modules are not otherwise tested. Import them
diff --git a/tornado/test/ioloop_test.py b/tornado/test/ioloop_test.py
index 74bb602..df2cd77 100644
--- a/tornado/test/ioloop_test.py
+++ b/tornado/test/ioloop_test.py
@@ -1,11 +1,14 @@
#!/usr/bin/env python
+
+from __future__ import absolute_import, division, with_statement
import datetime
import unittest
import time
from tornado.testing import AsyncTestCase, LogTrapTestCase
+
class TestIOLoop(AsyncTestCase, LogTrapTestCase):
def test_add_callback_wakeup(self):
# Make sure that add_callback from inside a running IOLoop
diff --git a/tornado/test/iostream_test.py b/tornado/test/iostream_test.py
index 43b2e17..5aa1d9b 100644
--- a/tornado/test/iostream_test.py
+++ b/tornado/test/iostream_test.py
@@ -1,16 +1,21 @@
+from __future__ import absolute_import, division, with_statement
from tornado import netutil
from tornado.ioloop import IOLoop
from tornado.iostream import IOStream
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase, get_unused_port
from tornado.util import b
from tornado.web import RequestHandler, Application
+import errno
import socket
+import sys
import time
+
class HelloHandler(RequestHandler):
def get(self):
self.write("Hello")
+
class TestIOStream(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', HelloHandler)])
@@ -20,9 +25,11 @@
[listener] = netutil.bind_sockets(port, '127.0.0.1',
family=socket.AF_INET)
streams = [None, None]
+
def accept_callback(connection, address):
streams[0] = IOStream(connection, io_loop=self.io_loop, **kwargs)
self.stop()
+
def connect_callback():
streams[1] = client_stream
self.stop()
@@ -58,6 +65,8 @@
data = self.wait()
self.assertEqual(data, b("200"))
+ s.close()
+
def test_write_zero_bytes(self):
# Attempting to write zero bytes should run the callback without
# going into an infinite loop.
@@ -66,7 +75,9 @@
self.wait()
# As a side effect, the stream is now listening for connection
# close (if it wasn't already), but is not listening for writes
- self.assertEqual(server._state, IOLoop.READ|IOLoop.ERROR)
+ self.assertEqual(server._state, IOLoop.READ | IOLoop.ERROR)
+ server.close()
+ client.close()
def test_connection_refused(self):
# When a connection is refused, the connect callback should not
@@ -75,12 +86,25 @@
port = get_unused_port()
stream = IOStream(socket.socket(), self.io_loop)
self.connect_called = False
+
def connect_callback():
self.connect_called = True
stream.set_close_callback(self.stop)
stream.connect(("localhost", port), connect_callback)
self.wait()
self.assertFalse(self.connect_called)
+ self.assertTrue(isinstance(stream.error, socket.error), stream.error)
+ if sys.platform != 'cygwin':
+ # cygwin's errnos don't match those used on native windows python
+ self.assertEqual(stream.error.args[0], errno.ECONNREFUSED)
+
+ def test_gaierror(self):
+ # Test that IOStream sets its exc_info on getaddrinfo error
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
+ stream = IOStream(s, io_loop=self.io_loop)
+ stream.set_close_callback(self.stop)
+ stream.connect(('adomainthatdoesntexist.asdf', 54321))
+ self.assertTrue(isinstance(stream.error, socket.gaierror), stream.error)
def test_connection_closed(self):
# When a server sends a response and then closes the connection,
@@ -97,7 +121,7 @@
s.connect(("localhost", self.get_http_port()))
stream = IOStream(s, io_loop=self.io_loop)
stream.write(b("GET / HTTP/1.0\r\n\r\n"))
-
+
stream.read_until_close(self.stop)
data = self.wait()
self.assertTrue(data.startswith(b("HTTP/1.0 200")))
@@ -108,9 +132,11 @@
try:
chunks = []
final_called = []
+
def streaming_callback(data):
chunks.append(data)
self.stop()
+
def final_callback(data):
assert not data
final_called.append(True)
@@ -135,6 +161,7 @@
server, client = self.make_iostream_pair()
try:
chunks = []
+
def callback(data):
chunks.append(data)
self.stop()
@@ -161,10 +188,12 @@
client.set_close_callback(self.stop)
server.write(b("12"))
chunks = []
+
def callback1(data):
chunks.append(data)
client.read_bytes(1, callback2)
server.close()
+
def callback2(data):
chunks.append(data)
client.read_bytes(1, callback1)
@@ -201,3 +230,20 @@
finally:
server.close()
client.close()
+
+ def test_large_read_until(self):
+ # Performance test: read_until used to have a quadratic component
+ # so a read_until of 4MB would take 8 seconds; now it takes 0.25
+ # seconds.
+ server, client = self.make_iostream_pair()
+ try:
+ NUM_KB = 4096
+ for i in xrange(NUM_KB):
+ client.write(b("A") * 1024)
+ client.write(b("\r\n"))
+ server.read_until(b("\r\n"), self.stop)
+ data = self.wait()
+ self.assertEqual(len(data), NUM_KB * 1024 + 2)
+ finally:
+ server.close()
+ client.close()
diff --git a/tornado/test/options_test.py b/tornado/test/options_test.py
new file mode 100644
index 0000000..80b1929
--- /dev/null
+++ b/tornado/test/options_test.py
@@ -0,0 +1,19 @@
+from __future__ import absolute_import, division, with_statement
+import unittest
+
+from tornado.options import _Options
+
+
+class OptionsTest(unittest.TestCase):
+ def setUp(self):
+ self.options = _Options()
+ define = self.options.define
+ # these are currently required
+ define("logging", default="none")
+ define("help", default=False)
+
+ define("port", default=80)
+
+ def test_parse_command_line(self):
+ self.options.parse_command_line(["main.py", "--port=443"])
+ self.assertEqual(self.options.port, 443)
diff --git a/tornado/test/process_test.py b/tornado/test/process_test.py
index de9ae52..04bcf16 100644
--- a/tornado/test/process_test.py
+++ b/tornado/test/process_test.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python
+
+from __future__ import absolute_import, division, with_statement
import logging
import os
import signal
@@ -16,6 +18,8 @@
# Not using AsyncHTTPTestCase because we need control over the IOLoop.
# Logging is tricky here so you may want to replace LogTrapTestCase
# with unittest.TestCase when debugging.
+
+
class ProcessTest(LogTrapTestCase):
def get_app(self):
class ProcessHandler(RequestHandler):
@@ -41,16 +45,19 @@
logging.error("aborting child process from tearDown")
logging.shutdown()
os._exit(1)
+ # In the surviving process, clear the alarm we set earlier
+ signal.alarm(0)
super(ProcessTest, self).tearDown()
def test_multi_process(self):
self.assertFalse(IOLoop.initialized())
port = get_unused_port()
+
def get_url(path):
return "http://127.0.0.1:%d%s" % (port, path)
sockets = bind_sockets(port, "127.0.0.1")
# ensure that none of these processes live too long
- signal.alarm(5) # master process
+ signal.alarm(5)
try:
id = fork_processes(3, max_restarts=3)
except SystemExit, e:
@@ -58,21 +65,19 @@
# finished with status 0
self.assertEqual(e.code, 0)
self.assertTrue(task_id() is None)
- for sock in sockets: sock.close()
- signal.alarm(0)
+ for sock in sockets:
+ sock.close()
return
- signal.alarm(5) # child process
try:
if id in (0, 1):
- signal.alarm(5)
self.assertEqual(id, task_id())
server = HTTPServer(self.get_app())
server.add_sockets(sockets)
IOLoop.instance().start()
elif id == 2:
- signal.alarm(5)
self.assertEqual(id, task_id())
- for sock in sockets: sock.close()
+ for sock in sockets:
+ sock.close()
# Always use SimpleAsyncHTTPClient here; the curl
# version appears to get confused sometimes if the
# connection gets closed before it's had a chance to
@@ -116,7 +121,7 @@
except Exception:
logging.error("exception in child process %d", id, exc_info=True)
raise
-
+
if os.name != 'posix' or sys.platform == 'cygwin':
# All sorts of unixisms here
diff --git a/tornado/test/run_pyversion_tests.py b/tornado/test/run_pyversion_tests.py
index 8c4e967..1680645 100755
--- a/tornado/test/run_pyversion_tests.py
+++ b/tornado/test/run_pyversion_tests.py
@@ -1,6 +1,8 @@
#!/usr/bin/env python
"""Runs the tornado test suite with all supported python interpreters."""
+from __future__ import absolute_import, division, with_statement
+
import os
import subprocess
import sys
@@ -13,12 +15,14 @@
"pypy",
]
+
def exists_on_path(filename):
for dir in os.environ["PATH"].split(":"):
if os.path.exists(os.path.join(dir, filename)):
return True
return False
+
def main():
for interpreter in INTERPRETERS:
print "=================== %s =======================" % interpreter
diff --git a/tornado/test/runtests.py b/tornado/test/runtests.py
index 188aee8..4235aba 100755
--- a/tornado/test/runtests.py
+++ b/tornado/test/runtests.py
@@ -1,4 +1,6 @@
#!/usr/bin/env python
+
+from __future__ import absolute_import, division, with_statement
import unittest
TEST_MODULES = [
@@ -15,19 +17,43 @@
'tornado.test.import_test',
'tornado.test.ioloop_test',
'tornado.test.iostream_test',
+ 'tornado.test.options_test',
'tornado.test.process_test',
'tornado.test.simple_httpclient_test',
'tornado.test.stack_context_test',
'tornado.test.template_test',
'tornado.test.testing_test',
'tornado.test.twisted_test',
+ 'tornado.test.util_test',
'tornado.test.web_test',
'tornado.test.wsgi_test',
]
+
def all():
return unittest.defaultTestLoader.loadTestsFromNames(TEST_MODULES)
if __name__ == '__main__':
+ # The -W command-line option does not work in a virtualenv with
+ # python 3 (as of virtualenv 1.7), so configure warnings
+ # programmatically instead.
+ import warnings
+ # Be strict about most warnings. This also turns on warnings that are
+ # ignored by default, including DeprecationWarnings and
+ # python 3.2's ResourceWarnings.
+ warnings.filterwarnings("error")
+ # Tornado generally shouldn't use anything deprecated, but some of
+ # our dependencies do (last match wins).
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
+ warnings.filterwarnings("error", category=DeprecationWarning,
+ module=r"tornado\..*")
+ # tornado.platform.twisted uses a deprecated function from
+ # zope.interface in order to maintain compatibility with
+ # python 2.5
+ warnings.filterwarnings("ignore", category=DeprecationWarning,
+ module=r"tornado\.platform\.twisted")
+ warnings.filterwarnings("ignore", category=DeprecationWarning,
+ module=r"tornado\.test\.twisted_test")
+
import tornado.testing
tornado.testing.main()
diff --git a/tornado/test/simple_httpclient_test.py b/tornado/test/simple_httpclient_test.py
index ebb265b..db7562f 100644
--- a/tornado/test/simple_httpclient_test.py
+++ b/tornado/test/simple_httpclient_test.py
@@ -1,17 +1,22 @@
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
import collections
+from contextlib import closing
import gzip
import logging
+import re
import socket
+from tornado.httpclient import AsyncHTTPClient
+from tornado.httputil import HTTPHeaders
from tornado.ioloop import IOLoop
from tornado.simple_httpclient import SimpleAsyncHTTPClient, _DEFAULT_CA_CERTS
from tornado.test.httpclient_test import HTTPClientCommonTestCase, ChunkHandler, CountdownHandler, HelloWorldHandler
-from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
+from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, LogTrapTestCase
from tornado.util import b
from tornado.web import RequestHandler, Application, asynchronous, url
+
class SimpleHTTPClientCommonTestCase(HTTPClientCommonTestCase):
def get_http_client(self):
client = SimpleAsyncHTTPClient(io_loop=self.io_loop,
@@ -23,6 +28,7 @@
# try to run it again.
del HTTPClientCommonTestCase
+
class TriggerHandler(RequestHandler):
def initialize(self, queue, wake_callback):
self.queue = queue
@@ -32,40 +38,58 @@
def get(self):
logging.info("queuing trigger")
self.queue.append(self.finish)
- self.wake_callback()
+ if self.get_argument("wake", "true") == "true":
+ self.wake_callback()
+
class HangHandler(RequestHandler):
@asynchronous
def get(self):
pass
+
class ContentLengthHandler(RequestHandler):
def get(self):
self.set_header("Content-Length", self.get_argument("value"))
self.write("ok")
+
class HeadHandler(RequestHandler):
def head(self):
self.set_header("Content-Length", "7")
+
+class OptionsHandler(RequestHandler):
+ def options(self):
+ self.set_header("Access-Control-Allow-Origin", "*")
+ self.write("ok")
+
+
class NoContentHandler(RequestHandler):
def get(self):
if self.get_argument("error", None):
self.set_header("Content-Length", "7")
self.set_status(204)
+
class SeeOther303PostHandler(RequestHandler):
def post(self):
assert self.request.body == b("blah")
self.set_header("Location", "/303_get")
self.set_status(303)
+
class SeeOther303GetHandler(RequestHandler):
def get(self):
assert not self.request.body
self.write("ok")
+class HostEchoHandler(RequestHandler):
+ def get(self):
+ self.write(self.request.headers["Host"])
+
+
class SimpleHTTPClientTestCase(AsyncHTTPTestCase, LogTrapTestCase):
def setUp(self):
super(SimpleHTTPClientTestCase, self).setUp()
@@ -83,9 +107,11 @@
url("/hello", HelloWorldHandler),
url("/content_length", ContentLengthHandler),
url("/head", HeadHandler),
+ url("/options", OptionsHandler),
url("/no_content", NoContentHandler),
url("/303_post", SeeOther303PostHandler),
url("/303_get", SeeOther303GetHandler),
+ url("/host_echo", HostEchoHandler),
], gzip=True)
def test_singleton(self):
@@ -164,19 +190,28 @@
self.assertTrue(response.effective_url.endswith("/countdown/2"))
self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
+ def test_header_reuse(self):
+ # Apps may reuse a headers object if they are only passing in constant
+ # headers like user-agent. The header object should not be modified.
+ headers = HTTPHeaders({'User-Agent': 'Foo'})
+ self.fetch("/hello", headers=headers)
+ self.assertEqual(list(headers.get_all()), [('User-Agent', 'Foo')])
+
def test_303_redirect(self):
- response = self.fetch("/303_post", method="POST", body="blah")
- self.assertEqual(200, response.code)
- self.assertTrue(response.request.url.endswith("/303_post"))
- self.assertTrue(response.effective_url.endswith("/303_get"))
- #request is the original request, is a POST still
- self.assertEqual("POST", response.request.method)
+ response = self.fetch("/303_post", method="POST", body="blah")
+ self.assertEqual(200, response.code)
+ self.assertTrue(response.request.url.endswith("/303_post"))
+ self.assertTrue(response.effective_url.endswith("/303_get"))
+ #request is the original request, is a POST still
+ self.assertEqual("POST", response.request.method)
def test_request_timeout(self):
- response = self.fetch('/hang', request_timeout=0.1)
+ response = self.fetch('/trigger?wake=false', request_timeout=0.1)
self.assertEqual(response.code, 599)
self.assertTrue(0.099 < response.request_time < 0.11, response.request_time)
self.assertEqual(str(response.error), "HTTP 599: Timeout")
+ # trigger the hanging request to let it clean up after itself
+ self.triggers.popleft()()
def test_ipv6(self):
if not socket.has_ipv6:
@@ -218,6 +253,13 @@
self.assertEqual(response.headers["content-length"], "7")
self.assertFalse(response.body)
+ def test_options_request(self):
+ response = self.fetch("/options", method="OPTIONS")
+ self.assertEqual(response.code, 200)
+ self.assertEqual(response.headers["content-length"], "2")
+ self.assertEqual(response.headers["access-control-allow-origin"], "*")
+ self.assertEqual(response.body, b("ok"))
+
def test_no_content(self):
response = self.fetch("/no_content")
self.assertEqual(response.code, 204)
@@ -228,3 +270,50 @@
# 204 status with non-zero content length is malformed
response = self.fetch("/no_content?error=1")
self.assertEqual(response.code, 599)
+
+ def test_host_header(self):
+ host_re = re.compile(b("^localhost:[0-9]+$"))
+ response = self.fetch("/host_echo")
+ self.assertTrue(host_re.match(response.body))
+
+ url = self.get_url("/host_echo").replace("http://", "http://me:secret@")
+ self.http_client.fetch(url, self.stop)
+ response = self.wait()
+ self.assertTrue(host_re.match(response.body), response.body)
+
+
+class CreateAsyncHTTPClientTestCase(AsyncTestCase, LogTrapTestCase):
+ def setUp(self):
+ super(CreateAsyncHTTPClientTestCase, self).setUp()
+ self.saved = AsyncHTTPClient._save_configuration()
+
+ def tearDown(self):
+ AsyncHTTPClient._restore_configuration(self.saved)
+ super(CreateAsyncHTTPClientTestCase, self).tearDown()
+
+ def test_max_clients(self):
+ # The max_clients argument is tricky because it was originally
+ # allowed to be passed positionally; newer arguments are keyword-only.
+ AsyncHTTPClient.configure(SimpleAsyncHTTPClient)
+ with closing(AsyncHTTPClient(
+ self.io_loop, force_instance=True)) as client:
+ self.assertEqual(client.max_clients, 10)
+ with closing(AsyncHTTPClient(
+ self.io_loop, 11, force_instance=True)) as client:
+ self.assertEqual(client.max_clients, 11)
+ with closing(AsyncHTTPClient(
+ self.io_loop, max_clients=11, force_instance=True)) as client:
+ self.assertEqual(client.max_clients, 11)
+
+ # Now configure max_clients statically and try overriding it
+ # with each way max_clients can be passed
+ AsyncHTTPClient.configure(SimpleAsyncHTTPClient, max_clients=12)
+ with closing(AsyncHTTPClient(
+ self.io_loop, force_instance=True)) as client:
+ self.assertEqual(client.max_clients, 12)
+ with closing(AsyncHTTPClient(
+ self.io_loop, max_clients=13, force_instance=True)) as client:
+ self.assertEqual(client.max_clients, 13)
+ with closing(AsyncHTTPClient(
+ self.io_loop, max_clients=14, force_instance=True)) as client:
+ self.assertEqual(client.max_clients, 14)
diff --git a/tornado/test/stack_context_test.py b/tornado/test/stack_context_test.py
index b79f1e3..dc71759 100644
--- a/tornado/test/stack_context_test.py
+++ b/tornado/test/stack_context_test.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
from tornado.stack_context import StackContext, wrap
from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, LogTrapTestCase
@@ -10,6 +10,7 @@
import logging
import unittest
+
class TestRequestHandler(RequestHandler):
def __init__(self, app, request, io_loop):
super(TestRequestHandler, self).__init__(app, request)
@@ -38,6 +39,7 @@
else:
return 'unexpected failure'
+
class HTTPStackContextTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', TestRequestHandler,
@@ -53,6 +55,7 @@
self.response = response
self.stop()
+
class StackContextTest(AsyncTestCase, LogTrapTestCase):
def setUp(self):
super(StackContextTest, self).setUp()
@@ -73,10 +76,12 @@
with StackContext(functools.partial(self.context, 'library')):
self.io_loop.add_callback(
functools.partial(library_inner_callback, callback))
+
def library_inner_callback(callback):
self.assertEqual(self.active_contexts[-2:],
['application', 'library'])
callback()
+
def final_callback():
# implementation detail: the full context stack at this point
# is ['application', 'library', 'application']. The 'library'
@@ -88,5 +93,37 @@
library_function(final_callback)
self.wait()
+ def test_deactivate(self):
+ deactivate_callbacks = []
+
+ def f1():
+ with StackContext(functools.partial(self.context, 'c1')) as c1:
+ deactivate_callbacks.append(c1)
+ self.io_loop.add_callback(f2)
+
+ def f2():
+ with StackContext(functools.partial(self.context, 'c2')) as c2:
+ deactivate_callbacks.append(c2)
+ self.io_loop.add_callback(f3)
+
+ def f3():
+ with StackContext(functools.partial(self.context, 'c3')) as c3:
+ deactivate_callbacks.append(c3)
+ self.io_loop.add_callback(f4)
+
+ def f4():
+ self.assertEqual(self.active_contexts, ['c1', 'c2', 'c3'])
+ deactivate_callbacks[1]()
+ # deactivating a context doesn't remove it immediately,
+ # but it will be missing from the next iteration
+ self.assertEqual(self.active_contexts, ['c1', 'c2', 'c3'])
+ self.io_loop.add_callback(f5)
+
+ def f5():
+ self.assertEqual(self.active_contexts, ['c1', 'c3'])
+ self.stop()
+ self.io_loop.add_callback(f1)
+ self.wait()
+
if __name__ == '__main__':
unittest.main()
diff --git a/tornado/test/template_test.py b/tornado/test/template_test.py
index c2a0533..d70e487 100644
--- a/tornado/test/template_test.py
+++ b/tornado/test/template_test.py
@@ -1,12 +1,14 @@
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
+import os
import traceback
-from tornado.escape import utf8, native_str
-from tornado.template import Template, DictLoader, ParseError
+from tornado.escape import utf8, native_str, to_unicode
+from tornado.template import Template, DictLoader, ParseError, Loader
from tornado.testing import LogTrapTestCase
from tornado.util import b, bytes_type, ObjectDict
+
class TemplateTest(LogTrapTestCase):
def test_simple(self):
template = Template("Hello {{ name }}!")
@@ -85,11 +87,12 @@
self.assertEqual(template.generate(), utf8(u"\u00e9"))
def test_custom_namespace(self):
- loader = DictLoader({"test.html": "{{ inc(5) }}"}, namespace={"inc": lambda x: x+1})
+ loader = DictLoader({"test.html": "{{ inc(5) }}"}, namespace={"inc": lambda x: x + 1})
self.assertEqual(loader.load("test.html").generate(), b("6"))
def test_apply(self):
- def upper(s): return s.upper()
+ def upper(s):
+ return s.upper()
template = Template(utf8("{% apply upper %}foo{% end %}"))
self.assertEqual(template.generate(upper=upper), b("FOO"))
@@ -98,10 +101,21 @@
self.assertEqual(template.generate(x=5), b("yes"))
self.assertEqual(template.generate(x=3), b("no"))
+ def test_try(self):
+ template = Template(utf8("""{% try %}
+try{% set y = 1/x %}
+{% except %}-except
+{% else %}-else
+{% finally %}-finally
+{% end %}"""))
+ self.assertEqual(template.generate(x=1), b("\ntry\n-else\n-finally\n"))
+ self.assertEqual(template.generate(x=0), b("\ntry-except\n-finally\n"))
+
def test_comment_directive(self):
template = Template(utf8("{% comment blah blah %}foo"))
self.assertEqual(template.generate(), b("foo"))
+
class StackTraceTest(LogTrapTestCase):
def test_error_line_number_expression(self):
loader = DictLoader({"test.html": """one
@@ -157,7 +171,6 @@
exc_stack = traceback.format_exc()
self.assertTrue("# base.html:1" in exc_stack)
-
def test_error_line_number_extends_sub_error(self):
loader = DictLoader({
"base.html": "{% block 'block' %}{% end %}",
@@ -228,7 +241,7 @@
expr: {{ name }}
raw: {% raw name %}""",
}
-
+
def test_default_off(self):
loader = DictLoader(self.templates, autoescape=None)
name = "Bobby <table>s"
@@ -243,7 +256,7 @@
b("escaped: Bobby <table>s\n"
"unescaped: Bobby <table>s\n"
"default: Bobby <table>s\n"))
-
+
def test_default_on(self):
loader = DictLoader(self.templates, autoescape="xhtml_escape")
name = "Bobby <table>s"
@@ -253,7 +266,7 @@
b("Bobby <table>s"))
self.assertEqual(loader.load("default.html").generate(name=name),
b("Bobby <table>s"))
-
+
self.assertEqual(loader.load("include.html").generate(name=name),
b("escaped: Bobby <table>s\n"
"unescaped: Bobby <table>s\n"
@@ -269,7 +282,9 @@
def test_extended_block(self):
loader = DictLoader(self.templates)
- def render(name): return loader.load(name).generate(name="<script>")
+
+ def render(name):
+ return loader.load(name).generate(name="<script>")
self.assertEqual(render("escaped_extends_unescaped.html"),
b("base: <script>"))
self.assertEqual(render("escaped_overrides_unescaped.html"),
@@ -282,19 +297,23 @@
def test_raw_expression(self):
loader = DictLoader(self.templates)
- def render(name): return loader.load(name).generate(name='<>&"')
+
+ def render(name):
+ return loader.load(name).generate(name='<>&"')
self.assertEqual(render("raw_expression.html"),
b("expr: <>&"\n"
"raw: <>&\""))
def test_custom_escape(self):
- loader = DictLoader({"foo.py":
+ loader = DictLoader({"foo.py":
"{% autoescape py_escape %}s = {{ name }}\n"})
+
def py_escape(s):
self.assertEqual(type(s), bytes_type)
return repr(native_str(s))
+
def render(template, name):
- return loader.load(template).generate(py_escape=py_escape,
+ return loader.load(template).generate(py_escape=py_escape,
name=name)
self.assertEqual(render("foo.py", "<html>"),
b("s = '<html>'\n"))
@@ -302,3 +321,13 @@
b("""s = "';sys.exit()"\n"""))
self.assertEqual(render("foo.py", ["not a string"]),
b("""s = "['not a string']"\n"""))
+
+
+class TemplateLoaderTest(LogTrapTestCase):
+ def setUp(self):
+ self.loader = Loader(os.path.join(os.path.dirname(__file__), "templates"))
+
+ def test_utf8_in_file(self):
+ tmpl = self.loader.load("utf8.html")
+ result = tmpl.generate()
+ self.assertEqual(to_unicode(result).strip(), u"H\u00e9llo")
diff --git a/tornado/test/templates/utf8.html b/tornado/test/templates/utf8.html
new file mode 100644
index 0000000..c5253df
--- /dev/null
+++ b/tornado/test/templates/utf8.html
@@ -0,0 +1 @@
+Héllo
diff --git a/tornado/test/testing_test.py b/tornado/test/testing_test.py
index bdca031..1de20df 100644
--- a/tornado/test/testing_test.py
+++ b/tornado/test/testing_test.py
@@ -1,16 +1,31 @@
#!/usr/bin/env python
+
+from __future__ import absolute_import, division, with_statement
import unittest
+import time
from tornado.testing import AsyncTestCase, LogTrapTestCase
+
class AsyncTestCaseTest(AsyncTestCase, LogTrapTestCase):
def test_exception_in_callback(self):
- self.io_loop.add_callback(lambda: 1/0)
+ self.io_loop.add_callback(lambda: 1 / 0)
try:
self.wait()
self.fail("did not get expected exception")
except ZeroDivisionError:
pass
+ def test_subsequent_wait_calls(self):
+ """
+ This test makes sure that a second call to wait()
+ clears the first timeout.
+ """
+ self.io_loop.add_timeout(time.time() + 0.01, self.stop)
+ self.wait(timeout=0.02)
+ self.io_loop.add_timeout(time.time() + 0.03, self.stop)
+ self.wait(timeout=0.1)
+
+
class SetUpTearDownTest(unittest.TestCase):
def test_set_up_tear_down(self):
"""
diff --git a/tornado/test/twisted_test.py b/tornado/test/twisted_test.py
index ba53c78..3fe7ee9 100644
--- a/tornado/test/twisted_test.py
+++ b/tornado/test/twisted_test.py
@@ -17,7 +17,10 @@
Unittest for the twisted-style reactor.
"""
+from __future__ import absolute_import, division, with_statement
+
import os
+import signal
import thread
import threading
import unittest
@@ -38,7 +41,9 @@
fcntl = None
twisted = None
IReadDescriptor = IWriteDescriptor = None
- def implements(f): pass
+
+ def implements(f):
+ pass
from tornado.httpclient import AsyncHTTPClient
from tornado.ioloop import IOLoop
@@ -47,13 +52,30 @@
from tornado.util import import_object
from tornado.web import RequestHandler, Application
+
+def save_signal_handlers():
+ saved = {}
+ for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGCHLD]:
+ saved[sig] = signal.getsignal(sig)
+ assert "twisted" not in repr(saved), repr(saved)
+ return saved
+
+
+def restore_signal_handlers(saved):
+ for sig, handler in saved.iteritems():
+ signal.signal(sig, handler)
+
+
class ReactorTestCase(unittest.TestCase):
def setUp(self):
+ self._saved_signals = save_signal_handlers()
self._io_loop = IOLoop()
self._reactor = TornadoReactor(self._io_loop)
def tearDown(self):
self._io_loop.close(all_fds=True)
+ restore_signal_handlers(self._saved_signals)
+
class ReactorWhenRunningTest(ReactorTestCase):
def test_whenRunning(self):
@@ -72,6 +94,7 @@
def anotherWhenRunningCallback(self):
self._anotherWhenRunningCalled = True
+
class ReactorCallLaterTest(ReactorTestCase):
def test_callLater(self):
self._laterCalled = False
@@ -89,6 +112,7 @@
self._called = self._reactor.seconds()
self._reactor.stop()
+
class ReactorTwoCallLaterTest(ReactorTestCase):
def test_callLater(self):
self._later1Called = False
@@ -116,6 +140,7 @@
self._called2 = self._reactor.seconds()
self._reactor.stop()
+
class ReactorCallFromThreadTest(ReactorTestCase):
def setUp(self):
super(ReactorCallFromThreadTest, self).setUp()
@@ -143,6 +168,7 @@
self._reactor.callWhenRunning(self._whenRunningCallback)
self._reactor.run()
+
class ReactorCallInThread(ReactorTestCase):
def setUp(self):
super(ReactorCallInThread, self).setUp()
@@ -159,6 +185,7 @@
self._reactor.callWhenRunning(self._whenRunningCallback)
self._reactor.run()
+
class Reader:
implements(IReadDescriptor)
@@ -166,7 +193,8 @@
self._fd = fd
self._callback = callback
- def logPrefix(self): return "Reader"
+ def logPrefix(self):
+ return "Reader"
def close(self):
self._fd.close()
@@ -180,6 +208,7 @@
def doRead(self):
self._callback(self._fd)
+
class Writer:
implements(IWriteDescriptor)
@@ -187,7 +216,8 @@
self._fd = fd
self._callback = callback
- def logPrefix(self): return "Writer"
+ def logPrefix(self):
+ return "Writer"
def close(self):
self._fd.close()
@@ -201,6 +231,7 @@
def doWrite(self):
self._callback(self._fd)
+
class ReactorReaderWriterTest(ReactorTestCase):
def _set_nonblocking(self, fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
@@ -227,9 +258,11 @@
reads it, check the value and ends the test.
"""
self.shouldWrite = True
+
def checkReadInput(fd):
self.assertEquals(fd.read(), 'x')
self._reactor.stop()
+
def writeOnce(fd):
if self.shouldWrite:
self.shouldWrite = False
@@ -283,18 +316,23 @@
# Test various combinations of twisted and tornado http servers,
# http clients, and event loop interfaces.
+
+
class CompatibilityTests(unittest.TestCase):
def setUp(self):
+ self.saved_signals = save_signal_handlers()
self.io_loop = IOLoop()
self.reactor = TornadoReactor(self.io_loop)
def tearDown(self):
self.reactor.disconnectAll()
self.io_loop.close(all_fds=True)
+ restore_signal_handlers(self.saved_signals)
def start_twisted_server(self):
class HelloResource(Resource):
isLeaf = True
+
def render_GET(self, request):
return "Hello from twisted!"
site = Site(HelloResource())
@@ -323,6 +361,7 @@
def tornado_fetch(self, url, runner):
responses = []
client = AsyncHTTPClient(self.io_loop)
+
def callback(response):
responses.append(response)
self.stop_loop()
@@ -337,18 +376,23 @@
chunks = []
client = Agent(self.reactor)
d = client.request('GET', url)
+
class Accumulator(Protocol):
def __init__(self, finished):
self.finished = finished
+
def dataReceived(self, data):
chunks.append(data)
+
def connectionLost(self, reason):
self.finished.callback(None)
+
def callback(response):
finished = Deferred()
response.deliverBody(Accumulator(finished))
return finished
d.addCallback(callback)
+
def shutdown(ignored):
self.stop_loop()
d.addBoth(shutdown)
@@ -412,11 +456,20 @@
# Doesn't clean up its temp files
'test_shebang',
],
- 'twisted.internet.test.test_process.PTYProcessTestsBuilder': [
- 'test_systemCallUninterruptedByChildExit',
+ # Process tests appear to work on OSX 10.7, but not 10.6
+ #'twisted.internet.test.test_process.PTYProcessTestsBuilder': [
+ # 'test_systemCallUninterruptedByChildExit',
+ # ],
+ 'twisted.internet.test.test_tcp.TCPClientTestsBuilder': [
+ 'test_badContext', # ssl-related; see also SSLClientTestsMixin
],
- 'twisted.internet.test.test_tcp.TCPClientTestsBuilder': [],
- 'twisted.internet.test.test_tcp.TCPPortTestsBuilder': [],
+ 'twisted.internet.test.test_tcp.TCPPortTestsBuilder': [
+ # These use link-local addresses and cause firewall prompts on mac
+ 'test_buildProtocolIPv6AddressScopeID',
+ 'test_portGetHostOnIPv6ScopeID',
+ 'test_serverGetHostOnIPv6ScopeID',
+ 'test_serverGetPeerOnIPv6ScopeID',
+ ],
'twisted.internet.test.test_tcp.TCPConnectionTestsBuilder': [],
'twisted.internet.test.test_tcp.WriteSequenceTests': [],
'twisted.internet.test.test_tcp.AbortConnectionTestCase': [],
@@ -446,9 +499,15 @@
# The test_func may be defined in a mixin, so clobber
# it instead of delattr()
setattr(test_class, test_func, lambda self: None)
+
def make_test_subclass(test_class):
class TornadoTest(test_class):
_reactors = ["tornado.platform.twisted._TestReactor"]
+
+ def buildReactor(self):
+ self.__saved_signals = save_signal_handlers()
+ return test_class.buildReactor(self)
+
def unbuildReactor(self, reactor):
test_class.unbuildReactor(self, reactor)
# Clean up file descriptors (especially epoll/kqueue
@@ -457,6 +516,8 @@
# since twisted expects to be able to unregister
# connections in a post-shutdown hook.
reactor._io_loop.close(all_fds=True)
+ restore_signal_handlers(self.__saved_signals)
+
TornadoTest.__name__ = test_class.__name__
return TornadoTest
test_subclass = make_test_subclass(test_class)
diff --git a/tornado/test/util_test.py b/tornado/test/util_test.py
new file mode 100644
index 0000000..8707b0d
--- /dev/null
+++ b/tornado/test/util_test.py
@@ -0,0 +1,26 @@
+from __future__ import absolute_import, division, with_statement
+import sys
+import unittest
+
+from tornado.util import raise_exc_info
+
+
+class RaiseExcInfoTest(unittest.TestCase):
+ def test_two_arg_exception(self):
+ # This test would fail on python 3 if raise_exc_info were simply
+ # a three-argument raise statement, because TwoArgException
+ # doesn't have a "copy constructor"
+ class TwoArgException(Exception):
+ def __init__(self, a, b):
+ super(TwoArgException, self).__init__()
+ self.a, self.b = a, b
+
+ try:
+ raise TwoArgException(1, 2)
+ except TwoArgException:
+ exc_info = sys.exc_info()
+ try:
+ raise_exc_info(exc_info)
+ self.fail("didn't get expected exception")
+ except TwoArgException, e:
+ self.assertTrue(e is exc_info[1])
diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index 9f4c860..23d8d07 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -1,9 +1,11 @@
+from __future__ import absolute_import, division, with_statement
+from tornado import gen
from tornado.escape import json_decode, utf8, to_unicode, recursive_unicode, native_str
from tornado.iostream import IOStream
from tornado.template import DictLoader
from tornado.testing import LogTrapTestCase, AsyncHTTPTestCase
from tornado.util import b, bytes_type, ObjectDict
-from tornado.web import RequestHandler, authenticated, Application, asynchronous, url, HTTPError, StaticFileHandler, _create_signature
+from tornado.web import RequestHandler, authenticated, Application, asynchronous, url, HTTPError, StaticFileHandler, _create_signature, create_signed_value
import binascii
import logging
@@ -12,6 +14,17 @@
import socket
import sys
+
+class SimpleHandlerTestCase(AsyncHTTPTestCase):
+ """Simplified base class for tests that work with a single handler class.
+
+ To use, define a nested class named ``Handler``.
+ """
+ def get_app(self):
+ return Application([('/', self.Handler)],
+ log_function=lambda x: None)
+
+
class CookieTestRequestHandler(RequestHandler):
# stub out enough methods to make the secure_cookie functions work
def __init__(self):
@@ -25,6 +38,7 @@
def set_cookie(self, name, value, expires_days=None):
self._cookies[name] = value
+
class SecureCookieTest(LogTrapTestCase):
def test_round_trip(self):
handler = CookieTestRequestHandler()
@@ -56,6 +70,14 @@
# it gets rejected
assert handler.get_secure_cookie('foo') is None
+ def test_arbitrary_bytes(self):
+ # Secure cookies accept arbitrary data (which is base64 encoded).
+ # Note that normal cookies accept only a subset of ascii.
+ handler = CookieTestRequestHandler()
+ handler.set_secure_cookie('foo', b('\xe9'))
+ self.assertEqual(handler.get_secure_cookie('foo'), b('\xe9'))
+
+
class CookieTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
class SetCookieHandler(RequestHandler):
@@ -83,20 +105,29 @@
self.set_cookie("semicolon", "a;b")
self.set_cookie("quote", 'a"b')
+ class SetCookieOverwriteHandler(RequestHandler):
+ def get(self):
+ self.set_cookie("a", "b", domain="example.com")
+ self.set_cookie("c", "d", domain="example.com")
+ # A second call with the same name clobbers the first.
+ # Attributes from the first call are not carried over.
+ self.set_cookie("a", "e")
return Application([
("/set", SetCookieHandler),
("/get", GetCookieHandler),
("/set_domain", SetCookieDomainHandler),
("/special_char", SetCookieSpecialCharHandler),
+ ("/set_overwrite", SetCookieOverwriteHandler),
])
def test_set_cookie(self):
response = self.fetch("/set")
- self.assertEqual(response.headers.get_list("Set-Cookie"),
- ["str=asdf; Path=/",
+ self.assertEqual(sorted(response.headers.get_list("Set-Cookie")),
+ ["bytes=zxcv; Path=/",
+ "str=asdf; Path=/",
"unicode=qwer; Path=/",
- "bytes=zxcv; Path=/"])
+ ])
def test_get_cookie(self):
response = self.fetch("/get", headers={"Cookie": "foo=bar"})
@@ -115,14 +146,14 @@
def test_cookie_special_char(self):
response = self.fetch("/special_char")
- headers = response.headers.get_list("Set-Cookie")
+ headers = sorted(response.headers.get_list("Set-Cookie"))
self.assertEqual(len(headers), 3)
self.assertEqual(headers[0], 'equals="a=b"; Path=/')
+ self.assertEqual(headers[1], 'quote="a\\"b"; Path=/')
# python 2.7 octal-escapes the semicolon; older versions leave it alone
- self.assertTrue(headers[1] in ('semicolon="a;b"; Path=/',
+ self.assertTrue(headers[2] in ('semicolon="a;b"; Path=/',
'semicolon="a\\073b"; Path=/'),
- headers[1])
- self.assertEqual(headers[2], 'quote="a\\"b"; Path=/')
+ headers[2])
data = [('foo=a=b', 'a=b'),
('foo="a=b"', 'a=b'),
@@ -136,6 +167,13 @@
response = self.fetch("/get", headers={"Cookie": header})
self.assertEqual(response.body, utf8(expected))
+ def test_set_cookie_overwrite(self):
+ response = self.fetch("/set_overwrite")
+ headers = response.headers.get_list("Set-Cookie")
+ self.assertEqual(sorted(headers),
+ ["a=e; Path=/", "c=d; Domain=example.com; Path=/"])
+
+
class AuthRedirectRequestHandler(RequestHandler):
def initialize(self, login_url):
self.login_url = login_url
@@ -148,6 +186,7 @@
# we'll never actually get here because the test doesn't follow redirects
self.send_error(500)
+
class AuthRedirectTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/relative', AuthRedirectRequestHandler,
@@ -183,6 +222,7 @@
def on_connection_close(self):
self.test.on_connection_close()
+
class ConnectionCloseTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
return Application([('/', ConnectionCloseHandler, dict(test=self))])
@@ -202,8 +242,9 @@
logging.info('connection closed')
self.stop()
+
class EchoHandler(RequestHandler):
- def get(self, path):
+ def get(self, *path_args):
# Type checks: web.py interfaces convert argument values to
# unicode strings (by default, but see also decode_argument).
# In httpserver.py (i.e. self.request.arguments), they're left
@@ -214,27 +255,50 @@
assert type(value) == bytes_type, repr(value)
for value in self.get_arguments(key):
assert type(value) == unicode, repr(value)
- assert type(path) == unicode, repr(path)
- self.write(dict(path=path,
+ for arg in path_args:
+ assert type(arg) == unicode, repr(arg)
+ self.write(dict(path=self.request.path,
+ path_args=path_args,
args=recursive_unicode(self.request.arguments)))
+
class RequestEncodingTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
- return Application([("/(.*)", EchoHandler)])
+ return Application([
+ ("/group/(.*)", EchoHandler),
+ ("/slashes/([^/]*)/([^/]*)", EchoHandler),
+ ])
- def test_question_mark(self):
+ def fetch_json(self, path):
+ return json_decode(self.fetch(path).body)
+
+ def test_group_question_mark(self):
# Ensure that url-encoded question marks are handled properly
- self.assertEqual(json_decode(self.fetch('/%3F').body),
- dict(path='?', args={}))
- self.assertEqual(json_decode(self.fetch('/%3F?%3F=%3F').body),
- dict(path='?', args={'?': ['?']}))
+ self.assertEqual(self.fetch_json('/group/%3F'),
+ dict(path='/group/%3F', path_args=['?'], args={}))
+ self.assertEqual(self.fetch_json('/group/%3F?%3F=%3F'),
+ dict(path='/group/%3F', path_args=['?'], args={'?': ['?']}))
- def test_path_encoding(self):
+ def test_group_encoding(self):
# Path components and query arguments should be decoded the same way
- self.assertEqual(json_decode(self.fetch('/%C3%A9?arg=%C3%A9').body),
- {u"path":u"\u00e9",
+ self.assertEqual(self.fetch_json('/group/%C3%A9?arg=%C3%A9'),
+ {u"path": u"/group/%C3%A9",
+ u"path_args": [u"\u00e9"],
u"args": {u"arg": [u"\u00e9"]}})
+ def test_slashes(self):
+ # Slashes may be escaped to appear as a single "directory" in the path,
+ # but they are then unescaped when passed to the get() method.
+ self.assertEqual(self.fetch_json('/slashes/foo/bar'),
+ dict(path="/slashes/foo/bar",
+ path_args=["foo", "bar"],
+ args={}))
+ self.assertEqual(self.fetch_json('/slashes/a%2Fb/c%2Fd'),
+ dict(path="/slashes/a%2Fb/c%2Fd",
+ path_args=["a/b", "c/d"],
+ args={}))
+
+
class TypeCheckHandler(RequestHandler):
def prepare(self):
self.errors = {}
@@ -247,6 +311,12 @@
self.check_type('cookie_key', self.cookies.keys()[0], str)
self.check_type('cookie_value', self.cookies.values()[0].value, str)
+ # Secure cookies return bytes because they can contain arbitrary
+ # data, but regular cookies are native strings.
+ assert self.cookies.keys() == ['asdf']
+ self.check_type('get_secure_cookie', self.get_secure_cookie('asdf'), bytes_type)
+ self.check_type('get_cookie', self.get_cookie('asdf'), str)
+
self.check_type('xsrf_token', self.xsrf_token, bytes_type)
self.check_type('xsrf_form_html', self.xsrf_form_html(), str)
@@ -270,6 +340,7 @@
self.errors[name] = "expected %s, got %s" % (expected_type,
actual_type)
+
class DecodeArgHandler(RequestHandler):
def decode_argument(self, value, name=None):
assert type(value) == bytes_type, repr(value)
@@ -290,18 +361,22 @@
'query': describe(self.get_argument("foo")),
})
+
class LinkifyHandler(RequestHandler):
def get(self):
self.render("linkify.html", message="http://example.com")
+
class UIModuleResourceHandler(RequestHandler):
def get(self):
- self.render("page.html", entries=[1,2])
+ self.render("page.html", entries=[1, 2])
+
class OptionalPathHandler(RequestHandler):
def get(self, path):
self.write({"path": path})
+
class FlowControlHandler(RequestHandler):
# These writes are too small to demonstrate real flow control,
# but at least it shows that the callbacks get run.
@@ -318,6 +393,7 @@
self.write("3")
self.finish()
+
class MultiHeaderHandler(RequestHandler):
def get(self):
self.set_header("x-overwrite", "1")
@@ -325,6 +401,7 @@
self.add_header("x-multi", 3)
self.add_header("x-multi", "4")
+
class RedirectHandler(RequestHandler):
def get(self):
if self.get_argument('permanent', None) is not None:
@@ -335,7 +412,33 @@
raise Exception("didn't get permanent or status arguments")
+class EmptyFlushCallbackHandler(RequestHandler):
+ @gen.engine
+ @asynchronous
+ def get(self):
+ # Ensure that the flush callback is run whether or not there
+ # was any output.
+ yield gen.Task(self.flush) # "empty" flush, but writes headers
+ yield gen.Task(self.flush) # empty flush
+ self.write("o")
+ yield gen.Task(self.flush) # flushes the "o"
+ yield gen.Task(self.flush) # empty flush
+ self.finish("k")
+
+
+class HeaderInjectionHandler(RequestHandler):
+ def get(self):
+ try:
+ self.set_header("X-Foo", "foo\r\nX-Bar: baz")
+ raise Exception("Didn't get expected exception")
+ except ValueError, e:
+ assert "Unsafe header value" in str(e)
+ self.finish(b("ok"))
+
+
class WebTest(AsyncHTTPTestCase, LogTrapTestCase):
+ COOKIE_SECRET = "WebTest.COOKIE_SECRET"
+
def get_app(self):
loader = DictLoader({
"linkify.html": "{% module linkify(message) %}",
@@ -351,7 +454,7 @@
})
urls = [
url("/typecheck/(.*)", TypeCheckHandler, name='typecheck'),
- url("/decode_arg/(.*)", DecodeArgHandler),
+ url("/decode_arg/(.*)", DecodeArgHandler, name='decode_arg'),
url("/decode_arg_kw/(?P<arg>.*)", DecodeArgHandler),
url("/linkify", LinkifyHandler),
url("/uimodule_resources", UIModuleResourceHandler),
@@ -359,10 +462,14 @@
url("/flow_control", FlowControlHandler),
url("/multi_header", MultiHeaderHandler),
url("/redirect", RedirectHandler),
+ url("/empty_flush", EmptyFlushCallbackHandler),
+ url("/header_injection", HeaderInjectionHandler),
]
- return Application(urls,
- template_loader=loader,
- autoescape="xhtml_escape")
+ self.app = Application(urls,
+ template_loader=loader,
+ autoescape="xhtml_escape",
+ cookie_secret=self.COOKIE_SECRET)
+ return self.app
def fetch_json(self, *args, **kwargs):
response = self.fetch(*args, **kwargs)
@@ -370,13 +477,15 @@
return json_decode(response.body)
def test_types(self):
+ cookie_value = to_unicode(create_signed_value(self.COOKIE_SECRET,
+ "asdf", "qwer"))
response = self.fetch("/typecheck/asdf?foo=bar",
- headers={"Cookie": "cook=ie"})
+ headers={"Cookie": "asdf=" + cookie_value})
data = json_decode(response.body)
self.assertEqual(data, {})
response = self.fetch("/typecheck/asdf?foo=bar", method="POST",
- headers={"Cookie": "cook=ie"},
+ headers={"Cookie": "asdf=" + cookie_value},
body="foo=bar")
def test_decode_argument(self):
@@ -400,6 +509,16 @@
u'query': [u'bytes', u'c3a9'],
})
+ def test_reverse_url(self):
+ self.assertEqual(self.app.reverse_url('decode_arg', 'foo'),
+ '/decode_arg/foo')
+ self.assertEqual(self.app.reverse_url('decode_arg', 42),
+ '/decode_arg/42')
+ self.assertEqual(self.app.reverse_url('decode_arg', b('\xe9')),
+ '/decode_arg/%E9')
+ self.assertEqual(self.app.reverse_url('decode_arg', u'\u00e9'),
+ '/decode_arg/%C3%A9')
+
def test_uimodule_unescaped(self):
response = self.fetch("/linkify")
self.assertEqual(response.body,
@@ -452,6 +571,14 @@
response = self.fetch("/redirect?status=307", follow_redirects=False)
self.assertEqual(response.code, 307)
+ def test_empty_flush(self):
+ response = self.fetch("/empty_flush")
+ self.assertEqual(response.body, b("ok"))
+
+ def test_header_injection(self):
+ response = self.fetch("/header_injection")
+ self.assertEqual(response.body, b("ok"))
+
class ErrorResponseTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
@@ -459,14 +586,14 @@
def get(self):
if self.get_argument("status", None):
raise HTTPError(int(self.get_argument("status")))
- 1/0
+ 1 / 0
class WriteErrorHandler(RequestHandler):
def get(self):
if self.get_argument("status", None):
self.send_error(int(self.get_argument("status")))
else:
- 1/0
+ 1 / 0
def write_error(self, status_code, **kwargs):
self.set_header("Content-Type", "text/plain")
@@ -480,7 +607,7 @@
if self.get_argument("status", None):
self.send_error(int(self.get_argument("status")))
else:
- 1/0
+ 1 / 0
def get_error_html(self, status_code, **kwargs):
self.set_header("Content-Type", "text/plain")
@@ -491,12 +618,11 @@
class FailedWriteErrorHandler(RequestHandler):
def get(self):
- 1/0
+ 1 / 0
def write_error(self, status_code, **kwargs):
raise Exception("exception in write_error")
-
return Application([
url("/default", DefaultHandler),
url("/write_error", WriteErrorHandler),
@@ -536,6 +662,7 @@
self.assertEqual(response.code, 500)
self.assertEqual(b(""), response.body)
+
class StaticFileTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
class StaticUrlHandler(RequestHandler):
@@ -544,6 +671,7 @@
class AbsoluteStaticUrlHandler(RequestHandler):
include_host = True
+
def get(self, path):
self.write(self.static_url(path))
@@ -598,6 +726,15 @@
response = self.fetch(path % int(include_host))
self.assertEqual(response.body, utf8(str(True)))
+ def test_static_304(self):
+ response1 = self.fetch("/static/robots.txt")
+ response2 = self.fetch("/static/robots.txt", headers={
+ 'If-Modified-Since': response1.headers['Last-Modified']})
+ self.assertEqual(response2.code, 304)
+ self.assertTrue('Content-Length' not in response2.headers)
+ self.assertTrue('Last-Modified' not in response2.headers)
+
+
class CustomStaticFileTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
class MyStaticFileHandler(StaticFileHandler):
@@ -608,7 +745,7 @@
@classmethod
def make_static_url(cls, settings, path):
- version_hash = cls.get_version(settings, path)
+ cls.get_version(settings, path)
extension_index = path.rindex('.')
before_version = path[:extension_index]
after_version = path[(extension_index + 1):]
@@ -636,3 +773,54 @@
def test_static_url(self):
response = self.fetch("/static_url/foo.txt")
self.assertEqual(response.body, b("/static/foo.42.txt"))
+
+
+class NamedURLSpecGroupsTest(AsyncHTTPTestCase, LogTrapTestCase):
+ def get_app(self):
+ class EchoHandler(RequestHandler):
+ def get(self, path):
+ self.write(path)
+
+ return Application([("/str/(?P<path>.*)", EchoHandler),
+ (u"/unicode/(?P<path>.*)", EchoHandler)])
+
+ def test_named_urlspec_groups(self):
+ response = self.fetch("/str/foo")
+ self.assertEqual(response.body, b("foo"))
+
+ response = self.fetch("/unicode/bar")
+ self.assertEqual(response.body, b("bar"))
+
+
+class ClearHeaderTest(SimpleHandlerTestCase):
+ class Handler(RequestHandler):
+ def get(self):
+ self.set_header("h1", "foo")
+ self.set_header("h2", "bar")
+ self.clear_header("h1")
+ self.clear_header("nonexistent")
+
+ def test_clear_header(self):
+ response = self.fetch("/")
+ self.assertTrue("h1" not in response.headers)
+ self.assertEqual(response.headers["h2"], "bar")
+
+
+class Header304Test(SimpleHandlerTestCase):
+ class Handler(RequestHandler):
+ def get(self):
+ self.set_header("Content-Language", "en_US")
+ self.write("hello")
+
+ def test_304_headers(self):
+ response1 = self.fetch('/')
+ self.assertEqual(response1.headers["Content-Length"], "5")
+ self.assertEqual(response1.headers["Content-Language"], "en_US")
+
+ response2 = self.fetch('/', headers={
+ 'If-None-Match': response1.headers["Etag"]})
+ self.assertEqual(response2.code, 304)
+ self.assertTrue("Content-Length" not in response2.headers)
+ self.assertTrue("Content-Language" not in response2.headers)
+ # Not an entity header, but should not be added to 304s by chunking
+ self.assertTrue("Transfer-Encoding" not in response2.headers)
diff --git a/tornado/test/wsgi_test.py b/tornado/test/wsgi_test.py
index 9c3ff7f..54c3740 100644
--- a/tornado/test/wsgi_test.py
+++ b/tornado/test/wsgi_test.py
@@ -1,10 +1,14 @@
+from __future__ import absolute_import, division, with_statement
from wsgiref.validate import validator
+from tornado.escape import json_decode
+from tornado.test.httpserver_test import TypeCheckHandler
from tornado.testing import AsyncHTTPTestCase, LogTrapTestCase
from tornado.util import b
from tornado.web import RequestHandler
from tornado.wsgi import WSGIApplication, WSGIContainer
+
class WSGIContainerTest(AsyncHTTPTestCase, LogTrapTestCase):
def wsgi_app(self, environ, start_response):
status = "200 OK"
@@ -19,6 +23,7 @@
response = self.fetch("/")
self.assertEqual(response.body, b("Hello world!"))
+
class WSGIApplicationTest(AsyncHTTPTestCase, LogTrapTestCase):
def get_app(self):
class HelloHandler(RequestHandler):
@@ -36,6 +41,7 @@
return WSGIContainer(validator(WSGIApplication([
("/", HelloHandler),
("/path/(.*)", PathQuotingHandler),
+ ("/typecheck", TypeCheckHandler),
])))
def test_simple(self):
@@ -46,11 +52,22 @@
response = self.fetch("/path/foo%20bar%C3%A9")
self.assertEqual(response.body, u"foo bar\u00e9".encode("utf-8"))
+ def test_types(self):
+ headers = {"Cookie": "foo=bar"}
+ response = self.fetch("/typecheck?foo=bar", headers=headers)
+ data = json_decode(response.body)
+ self.assertEqual(data, {})
+
+ response = self.fetch("/typecheck", method="POST", body="foo=bar", headers=headers)
+ data = json_decode(response.body)
+ self.assertEqual(data, {})
+
# This is kind of hacky, but run some of the HTTPServer tests through
# WSGIContainer and WSGIApplication to make sure everything survives
# repeated disassembly and reassembly.
from tornado.test.httpserver_test import HTTPConnectionTest
+
class WSGIConnectionTest(HTTPConnectionTest):
def get_app(self):
return WSGIContainer(validator(WSGIApplication(self.get_handlers())))
diff --git a/tornado/testing.py b/tornado/testing.py
index b2b983d..fccdb86 100644
--- a/tornado/testing.py
+++ b/tornado/testing.py
@@ -18,7 +18,7 @@
information.
"""
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
from cStringIO import StringIO
try:
@@ -32,6 +32,7 @@
HTTPServer = None
IOLoop = None
from tornado.stack_context import StackContext, NullContext
+from tornado.util import raise_exc_info
import contextlib
import logging
import signal
@@ -40,6 +41,8 @@
import unittest
_next_port = 10000
+
+
def get_unused_port():
"""Returns a (hopefully) unused port number."""
global _next_port
@@ -47,6 +50,7 @@
_next_port = _next_port + 1
return port
+
class AsyncTestCase(unittest.TestCase):
"""TestCase subclass for testing IOLoop-based asynchronous code.
@@ -104,6 +108,7 @@
self.__running = False
self.__failure = None
self.__stop_args = None
+ self.__timeout = None
def setUp(self):
super(AsyncTestCase, self).setUp()
@@ -134,9 +139,18 @@
self.__failure = sys.exc_info()
self.stop()
+ def __rethrow(self):
+ if self.__failure is not None:
+ failure = self.__failure
+ self.__failure = None
+ raise_exc_info(failure)
+
def run(self, result=None):
with StackContext(self._stack_context):
super(AsyncTestCase, self).run(result)
+ # In case an exception escaped super.run or the StackContext caught
+ # an exception when there wasn't a wait() to re-raise it, do so here.
+ self.__rethrow()
def stop(self, _arg=None, **kwargs):
'''Stops the ioloop, causing one pending (or future) call to wait()
@@ -165,12 +179,14 @@
def timeout_func():
try:
raise self.failureException(
- 'Async operation timed out after %d seconds' %
+ 'Async operation timed out after %s seconds' %
timeout)
except Exception:
self.__failure = sys.exc_info()
self.stop()
- self.io_loop.add_timeout(time.time() + timeout, timeout_func)
+ if self.__timeout is not None:
+ self.io_loop.remove_timeout(self.__timeout)
+ self.__timeout = self.io_loop.add_timeout(time.time() + timeout, timeout_func)
while True:
self.__running = True
with NullContext():
@@ -183,13 +199,7 @@
break
assert self.__stopped
self.__stopped = False
- if self.__failure is not None:
- # 2to3 isn't smart enough to convert three-argument raise
- # statements correctly in some cases.
- if isinstance(self.__failure[1], self.__failure[0]):
- raise self.__failure[1], None, self.__failure[2]
- else:
- raise self.__failure[0], self.__failure[1], self.__failure[2]
+ self.__rethrow()
result = self.__stop_args
self.__stop_args = None
return result
@@ -269,6 +279,7 @@
self.http_client.close()
super(AsyncHTTPTestCase, self).tearDown()
+
class LogTrapTestCase(unittest.TestCase):
"""A test case that captures and discards all logging output
if the test passes.
@@ -308,6 +319,7 @@
finally:
handler.stream = old_stream
+
def main():
"""A simple test runner.
diff --git a/tornado/util.py b/tornado/util.py
index 6752401..e19ca90 100644
--- a/tornado/util.py
+++ b/tornado/util.py
@@ -1,5 +1,8 @@
"""Miscellaneous utility functions."""
+from __future__ import absolute_import, division, with_statement
+
+
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
@@ -42,6 +45,25 @@
return s
bytes_type = str
+
+def raise_exc_info(exc_info):
+ """Re-raise an exception (with original traceback) from an exc_info tuple.
+
+ The argument is a ``(type, value, traceback)`` tuple as returned by
+ `sys.exc_info`.
+ """
+ # 2to3 isn't smart enough to convert three-argument raise
+ # statements correctly in some cases.
+ if isinstance(exc_info[1], exc_info[0]):
+ raise exc_info[1], None, exc_info[2]
+ # After 2to3: raise exc_info[1].with_traceback(exc_info[2])
+ else:
+ # I think this branch is only taken for string exceptions,
+ # which were removed in Python 2.6.
+ raise exc_info[0], exc_info[1], exc_info[2]
+ # After 2to3: raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
+
+
def doctests():
import doctest
return doctest.DocTestSuite()
diff --git a/tornado/web.py b/tornado/web.py
index c31eb67..a7c9637 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -49,7 +49,7 @@
back to the main thread before finishing the request.
"""
-from __future__ import with_statement
+from __future__ import absolute_import, division, with_statement
import Cookie
import base64
@@ -83,13 +83,14 @@
from tornado import stack_context
from tornado import template
from tornado.escape import utf8, _unicode
-from tornado.util import b, bytes_type, import_object, ObjectDict
+from tornado.util import b, bytes_type, import_object, ObjectDict, raise_exc_info
try:
from io import BytesIO # python 3
except ImportError:
from cStringIO import StringIO as BytesIO # python 2
+
class RequestHandler(object):
"""Subclass this class and define get() or post() to make a handler.
@@ -97,7 +98,8 @@
should override the class variable SUPPORTED_METHODS in your
RequestHandler class.
"""
- SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PUT", "OPTIONS")
+ SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
+ "OPTIONS")
_template_loaders = {} # {path: template.BaseLoader}
_template_loader_lock = threading.Lock()
@@ -121,7 +123,7 @@
self.ui["modules"] = self.ui["_modules"]
self.clear()
# Check since connection is not available in WSGI
- if hasattr(self.request, "connection"):
+ if getattr(self.request, "connection", None):
self.request.connection.stream.set_close_callback(
self.on_connection_close)
self.initialize(**kwargs)
@@ -164,6 +166,9 @@
def delete(self, *args, **kwargs):
raise HTTPError(405)
+ def patch(self, *args, **kwargs):
+ raise HTTPError(405)
+
def put(self, *args, **kwargs):
raise HTTPError(405)
@@ -208,7 +213,7 @@
"""Resets all headers and content for this response."""
# The performance cost of tornado.httputil.HTTPHeaders is significant
# (slowing down a benchmark with a trivial handler by more than 10%),
- # and its case-normalization is not generally necessary for
+ # and its case-normalization is not generally necessary for
# headers we generate on the server side, so use a plain dict
# and list instead.
self._headers = {
@@ -259,6 +264,15 @@
"""
self._list_headers.append((name, self._convert_header_value(value)))
+ def clear_header(self, name):
+ """Clears an outgoing header, undoing a previous `set_header` call.
+
+ Note that this method does not apply to multi-valued headers
+ set by `add_header`.
+ """
+ if name in self._headers:
+ del self._headers[name]
+
def _convert_header_value(self, value):
if isinstance(value, bytes_type):
pass
@@ -275,12 +289,12 @@
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
- if len(value) > 4000 or re.match(b(r"[\x00-\x1f]"), value):
+ if len(value) > 4000 or re.search(b(r"[\x00-\x1f]"), value):
raise ValueError("Unsafe header value %r", value)
return value
-
_ARG_DEFAULT = []
+
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
@@ -358,25 +372,27 @@
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
- if not hasattr(self, "_new_cookies"):
- self._new_cookies = []
- new_cookie = Cookie.SimpleCookie()
- self._new_cookies.append(new_cookie)
- new_cookie[name] = value
+ if not hasattr(self, "_new_cookie"):
+ self._new_cookie = Cookie.SimpleCookie()
+ if name in self._new_cookie:
+ del self._new_cookie[name]
+ self._new_cookie[name] = value
+ morsel = self._new_cookie[name]
if domain:
- new_cookie[name]["domain"] = domain
+ morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
timestamp = calendar.timegm(expires.utctimetuple())
- new_cookie[name]["expires"] = email.utils.formatdate(
+ morsel["expires"] = email.utils.formatdate(
timestamp, localtime=False, usegmt=True)
if path:
- new_cookie[name]["path"] = path
+ morsel["path"] = path
for k, v in kwargs.iteritems():
- if k == 'max_age': k = 'max-age'
- new_cookie[name][k] = v
+ if k == 'max_age':
+ k = 'max-age'
+ morsel[k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
@@ -401,6 +417,9 @@
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
+
+ Secure cookies may contain arbitrary byte values, not just unicode
+ strings (unlike regular cookies)
"""
self.set_cookie(name, self.create_signed_value(name, value),
expires_days=expires_days, **kwargs)
@@ -417,9 +436,14 @@
name, value)
def get_secure_cookie(self, name, value=None, max_age_days=31):
- """Returns the given signed cookie if it validates, or None."""
+ """Returns the given signed cookie if it validates, or None.
+
+ The decoded cookie value is returned as a byte string (unlike
+ `get_cookie`).
+ """
self.require_setting("cookie_secret", "secure cookies")
- if value is None: value = self.get_cookie(name)
+ if value is None:
+ value = self.get_cookie(name)
return decode_signed_value(self.application.settings["cookie_secret"],
name, value, max_age_days=max_age_days)
@@ -482,7 +506,8 @@
html_bodies = []
for module in getattr(self, "_active_modules", {}).itervalues():
embed_part = module.embedded_javascript()
- if embed_part: js_embed.append(utf8(embed_part))
+ if embed_part:
+ js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode, bytes_type)):
@@ -490,7 +515,8 @@
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
- if embed_part: css_embed.append(utf8(embed_part))
+ if embed_part:
+ css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode, bytes_type)):
@@ -498,9 +524,12 @@
else:
css_files.extend(file_part)
head_part = module.html_head()
- if head_part: html_heads.append(utf8(head_part))
+ if head_part:
+ html_heads.append(utf8(head_part))
body_part = module.html_body()
- if body_part: html_bodies.append(utf8(body_part))
+ if body_part:
+ html_bodies.append(utf8(body_part))
+
def is_absolute(path):
return any(path.startswith(x) for x in ["/", "http:", "https:"])
if js_files:
@@ -579,7 +608,7 @@
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
- reverse_url=self.application.reverse_url
+ reverse_url=self.reverse_url
)
args.update(self.ui)
args.update(kwargs)
@@ -596,10 +625,9 @@
kwargs["autoescape"] = settings["autoescape"]
return template.Loader(template_path, **kwargs)
-
def flush(self, include_footers=False, callback=None):
"""Flushes the current output buffer to the network.
-
+
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
@@ -614,8 +642,9 @@
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
- self._headers, chunk = transform.transform_first_chunk(
- self._headers, chunk, include_footers)
+ self._status_code, self._headers, chunk = \
+ transform.transform_first_chunk(
+ self._status_code, self._headers, chunk, include_footers)
headers = self._generate_headers()
else:
for transform in self._transforms:
@@ -624,11 +653,11 @@
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
- if headers: self.request.write(headers, callback=callback)
+ if headers:
+ self.request.write(headers, callback=callback)
return
- if headers or chunk:
- self.request.write(headers + chunk, callback=callback)
+ self.request.write(headers + chunk, callback=callback)
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
@@ -637,7 +666,8 @@
"by using async operations without the "
"@asynchronous decorator.")
- if chunk is not None: self.write(chunk)
+ if chunk is not None:
+ self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
@@ -647,13 +677,15 @@
"Etag" not in self._headers):
etag = self.compute_etag()
if etag is not None:
+ self.set_header("Etag", etag)
inm = self.request.headers.get("If-None-Match")
if inm and inm.find(etag) != -1:
self._write_buffer = []
self.set_status(304)
- else:
- self.set_header("Etag", etag)
- if "Content-Length" not in self._headers:
+ if self._status_code == 304:
+ assert not self._write_buffer, "Cannot send body with 304"
+ self._clear_headers_for_304()
+ elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
@@ -720,7 +752,7 @@
kwargs['exception'] = exc_info[1]
try:
# Put the traceback into sys.exc_info()
- raise exc_info[0], exc_info[1], exc_info[2]
+ raise_exc_info(exc_info)
except Exception:
self.finish(self.get_error_html(status_code, **kwargs))
else:
@@ -733,7 +765,7 @@
self.write(line)
self.finish()
else:
- self.finish("<html><title>%(code)d: %(message)s</title>"
+ self.finish("<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": httplib.responses[status_code],
@@ -926,6 +958,7 @@
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
+
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
@@ -964,7 +997,7 @@
# the exception value instead of the full triple,
# so re-raise the exception to ensure that it's in
# sys.exc_info()
- raise type, value, traceback
+ raise_exc_info((type, value, traceback))
except Exception:
self._handle_request_exception(value)
return True
@@ -984,7 +1017,7 @@
if not self._finished:
args = [self.decode_argument(arg) for arg in args]
kwargs = dict((k, self.decode_argument(v, name=k))
- for (k,v) in kwargs.iteritems())
+ for (k, v) in kwargs.iteritems())
getattr(self, self.request.method.lower())(*args, **kwargs)
if self._auto_finish and not self._finished:
self.finish()
@@ -995,10 +1028,10 @@
lines = [utf8(self.request.version + " " +
str(self._status_code) +
" " + httplib.responses[self._status_code])]
- lines.extend([(utf8(n) + b(": ") + utf8(v)) for n, v in
+ lines.extend([(utf8(n) + b(": ") + utf8(v)) for n, v in
itertools.chain(self._headers.iteritems(), self._list_headers)])
- for cookie_dict in getattr(self, "_new_cookies", []):
- for cookie in cookie_dict.values():
+ if hasattr(self, "_new_cookie"):
+ for cookie in self._new_cookie.values():
lines.append(utf8("Set-Cookie: " + cookie.OutputString(None)))
return b("\r\n").join(lines) + b("\r\n\r\n")
@@ -1044,6 +1077,17 @@
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
+ def _clear_headers_for_304(self):
+ # 304 responses should not contain entity headers (defined in
+ # http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
+ # not explicitly allowed by
+ # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
+ headers = ["Allow", "Content-Encoding", "Content-Language",
+ "Content-Length", "Content-MD5", "Content-Range",
+ "Content-Type", "Last-Modified"]
+ for h in headers:
+ self.clear_header(h)
+
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
@@ -1088,8 +1132,9 @@
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
- if self.request.query: uri += "?" + self.request.query
- self.redirect(uri)
+ if self.request.query:
+ uri += "?" + self.request.query
+ self.redirect(uri, permanent=True)
return
else:
raise HTTPError(404)
@@ -1109,8 +1154,9 @@
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
- if self.request.query: uri += "?" + self.request.query
- self.redirect(uri)
+ if self.request.query:
+ uri += "?" + self.request.query
+ self.redirect(uri, permanent=True)
return
raise HTTPError(404)
return method(self, *args, **kwargs)
@@ -1200,7 +1246,8 @@
r"/(favicon\.ico)", r"/(robots\.txt)"]:
handlers.insert(0, (pattern, static_handler_class,
static_handler_args))
- if handlers: self.add_handlers(".*$", handlers)
+ if handlers:
+ self.add_handlers(".*$", handlers)
# Automatically reload modified modules
if self.settings.get("debug") and not wsgi:
@@ -1292,7 +1339,8 @@
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
- for m in methods: self._load_ui_methods(m)
+ for m in methods:
+ self._load_ui_methods(m)
else:
for name, fn in methods.iteritems():
if not name.startswith("_") and hasattr(fn, "__call__") \
@@ -1304,7 +1352,8 @@
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
- for m in modules: self._load_ui_modules(m)
+ for m in modules:
+ self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.iteritems():
@@ -1333,7 +1382,8 @@
# None-safe wrapper around url_unescape to handle
# unmatched optional groups correctly
def unquote(s):
- if s is None: return s
+ if s is None:
+ return s
return escape.url_unescape(s, encoding=None)
# Pass matched groups to the handler. Since
# match.groups() includes both named and unnamed groups,
@@ -1343,7 +1393,7 @@
if spec.regex.groupindex:
kwargs = dict(
- (k, unquote(v))
+ (str(k), unquote(v))
for (k, v) in match.groupdict().iteritems())
else:
args = [unquote(s) for s in match.groups()]
@@ -1365,7 +1415,11 @@
def reverse_url(self, name, *args):
"""Returns a URL path for handler named `name`
- The handler must be added to the application as a named URLSpec
+ The handler must be added to the application as a named URLSpec.
+
+ Args will be substituted for capturing groups in the URLSpec regex.
+ They will be converted to strings if necessary, encoded as utf8,
+ and url-escaped.
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
@@ -1393,7 +1447,6 @@
handler._request_summary(), request_time)
-
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response."""
def __init__(self, status_code, log_message=None, *args):
@@ -1455,7 +1508,7 @@
/static/images/myimage.png?v=xxx. Override ``get_cache_time`` method for
more fine-grained cache control.
"""
- CACHE_MAX_AGE = 86400*365*10 #10 years
+ CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {}
_lock = threading.Lock() # protects _static_hashes
@@ -1554,7 +1607,7 @@
This method may be overridden in subclasses (but note that it is
a class method rather than an instance method).
-
+
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
@@ -1639,8 +1692,8 @@
def __init__(self, request):
pass
- def transform_first_chunk(self, headers, chunk, finishing):
- return headers, chunk
+ def transform_first_chunk(self, status_code, headers, chunk, finishing):
+ return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
@@ -1652,7 +1705,7 @@
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
"""
CONTENT_TYPES = set([
- "text/plain", "text/html", "text/css", "text/xml", "application/javascript",
+ "text/plain", "text/html", "text/css", "text/xml", "application/javascript",
"application/x-javascript", "application/xml", "application/atom+xml",
"text/javascript", "application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
@@ -1661,7 +1714,7 @@
self._gzipping = request.supports_http_1_1() and \
"gzip" in request.headers.get("Accept-Encoding", "")
- def transform_first_chunk(self, headers, chunk, finishing):
+ def transform_first_chunk(self, status_code, headers, chunk, finishing):
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = (ctype in self.CONTENT_TYPES) and \
@@ -1675,7 +1728,7 @@
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
- return headers, chunk
+ return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
@@ -1698,15 +1751,17 @@
def __init__(self, request):
self._chunking = request.supports_http_1_1()
- def transform_first_chunk(self, headers, chunk, finishing):
- if self._chunking:
+ def transform_first_chunk(self, status_code, headers, chunk, finishing):
+ # 304 responses have no body (not even a zero-length body), and so
+ # should not have either Content-Length or Transfer-Encoding headers.
+ if self._chunking and status_code != 304:
# No need to chunk the output if a Content-Length is specified
if "Content-Length" in headers or "Transfer-Encoding" in headers:
self._chunking = False
else:
headers["Transfer-Encoding"] = "chunked"
chunk = self.transform_chunk(chunk, finishing)
- return headers, chunk
+ return status_code, headers, chunk
def transform_chunk(self, block, finishing):
if self._chunking:
@@ -1786,14 +1841,17 @@
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
+
class _linkify(UIModule):
def render(self, text, **kwargs):
return escape.linkify(text, **kwargs)
+
class _xsrf_form_html(UIModule):
def render(self):
return self.handler.xsrf_form_html()
+
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
@@ -1806,7 +1864,7 @@
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
- per instantiation of the template, so they must not depend on
+ per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler):
@@ -1862,10 +1920,9 @@
return "".join(self._get_resources("html_body"))
-
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
- def __init__(self, pattern, handler_class, kwargs={}, name=None):
+ def __init__(self, pattern, handler_class, kwargs=None, name=None):
"""Creates a URLSpec.
Parameters:
@@ -1889,7 +1946,7 @@
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
self.handler_class = handler_class
- self.kwargs = kwargs
+ self.kwargs = kwargs or {}
self.name = name
self._path, self._group_count = self._find_groups()
@@ -1928,7 +1985,12 @@
"not found"
if not len(args):
return self._path
- return self._path % tuple([str(a) for a in args])
+ converted_args = []
+ for a in args:
+ if not isinstance(a, (unicode, bytes_type)):
+ a = str(a)
+ converted_args.append(escape.url_escape(utf8(a)))
+ return self._path % tuple(converted_args)
url = URLSpec
@@ -1938,13 +2000,14 @@
return False
result = 0
if type(a[0]) is int: # python3 byte strings
- for x, y in zip(a,b):
+ for x, y in zip(a, b):
result |= x ^ y
else: # python2
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
+
def create_signed_value(secret, name, value):
timestamp = utf8(str(int(time.time())))
value = base64.b64encode(utf8(value))
@@ -1952,10 +2015,13 @@
value = b("|").join([value, timestamp, signature])
return value
+
def decode_signed_value(secret, name, value, max_age_days=31):
- if not value: return None
+ if not value:
+ return None
parts = utf8(value).split(b("|"))
- if len(parts) != 3: return None
+ if len(parts) != 3:
+ return None
signature = _create_signature(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
logging.warning("Invalid cookie signature %r", value)
@@ -1979,7 +2045,9 @@
except Exception:
return None
+
def _create_signature(secret, *parts):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
- for part in parts: hash.update(utf8(part))
+ for part in parts:
+ hash.update(utf8(part))
return utf8(hash.hexdigest())
diff --git a/tornado/websocket.py b/tornado/websocket.py
index 8aa7777..266b114 100644
--- a/tornado/websocket.py
+++ b/tornado/websocket.py
@@ -16,6 +16,8 @@
overriding `WebSocketHandler.allow_draft76` (see that method's
documentation for caveats).
"""
+
+from __future__ import absolute_import, division, with_statement
# Author: Jacob Kristhammar, 2010
import array
@@ -30,6 +32,7 @@
from tornado.util import bytes_type, b
+
class WebSocketHandler(tornado.web.RequestHandler):
"""Subclass this class to create a basic WebSocket handler.
@@ -202,7 +205,7 @@
may wish to override this if they are using an SSL proxy
that does not provide the X-Scheme header as understood
by HTTPServer.
-
+
Note that this is only used by the draft76 protocol.
"""
return "wss" if self.request.protocol == "https" else "ws"
@@ -249,6 +252,7 @@
"""
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
+
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
@@ -471,7 +475,7 @@
sha1 = hashlib.sha1()
sha1.update(tornado.escape.utf8(
self.request.headers.get("Sec-Websocket-Key")))
- sha1.update(b("258EAFA5-E914-47DA-95CA-C5AB0DC85B11")) # Magic value
+ sha1.update(b("258EAFA5-E914-47DA-95CA-C5AB0DC85B11")) # Magic value
return tornado.escape.native_str(base64.b64encode(sha1.digest()))
def _accept_connection(self):
@@ -552,12 +556,12 @@
self.stream.read_bytes(8, self._on_frame_length_64)
def _on_frame_length_16(self, data):
- self._frame_length = struct.unpack("!H", data)[0];
- self.stream.read_bytes(4, self._on_masking_key);
+ self._frame_length = struct.unpack("!H", data)[0]
+ self.stream.read_bytes(4, self._on_masking_key)
def _on_frame_length_64(self, data):
- self._frame_length = struct.unpack("!Q", data)[0];
- self.stream.read_bytes(4, self._on_masking_key);
+ self._frame_length = struct.unpack("!Q", data)[0]
+ self.stream.read_bytes(4, self._on_masking_key)
def _on_masking_key(self, data):
self._frame_mask = array.array("B", data)
@@ -604,9 +608,9 @@
if not self.client_terminated:
self._receive_frame()
-
def _handle_message(self, opcode, data):
- if self.client_terminated: return
+ if self.client_terminated:
+ return
if opcode == 0x1:
# UTF-8 data
diff --git a/tornado/wsgi.py b/tornado/wsgi.py
index e8f878b..e0b11d3 100644
--- a/tornado/wsgi.py
+++ b/tornado/wsgi.py
@@ -20,7 +20,7 @@
between Tornado and other Python web frameworks and servers. This module
provides WSGI support in two ways:
-* `WSGIApplication` is a version of `tornado.web.Application` that can run
+* `WSGIApplication` is a version of `tornado.web.Application` that can run
inside a WSGI server. This is useful for running a Tornado app on another
HTTP server, such as Google App Engine. See the `WSGIApplication` class
documentation for limitations that apply.
@@ -29,8 +29,9 @@
and Tornado handlers in a single server.
"""
+from __future__ import absolute_import, division, with_statement
+
import Cookie
-import cgi
import httplib
import logging
import sys
@@ -41,7 +42,7 @@
from tornado import escape
from tornado import httputil
from tornado import web
-from tornado.escape import native_str, utf8
+from tornado.escape import native_str, utf8, parse_qs_bytes
from tornado.util import b
try:
@@ -49,6 +50,7 @@
except ImportError:
from cStringIO import StringIO as BytesIO # python 2
+
class WSGIApplication(web.Application):
"""A WSGI equivalent of `tornado.web.Application`.
@@ -81,7 +83,7 @@
Since no asynchronous methods are available for WSGI applications, the
httpclient and auth modules are both not available for WSGI applications.
We support the same interface, but handlers running in a WSGIApplication
- do not support flush() or asynchronous methods.
+ do not support flush() or asynchronous methods.
"""
def __init__(self, handlers=None, default_host="", **settings):
web.Application.__init__(self, handlers, default_host, transforms=[],
@@ -93,11 +95,11 @@
status = str(handler._status_code) + " " + \
httplib.responses[handler._status_code]
headers = handler._headers.items()
- for cookie_dict in getattr(handler, "_new_cookies", []):
- for cookie in cookie_dict.values():
+ if hasattr(handler, "_new_cookie"):
+ for cookie in handler._new_cookie.values():
headers.append(("Set-Cookie", cookie.OutputString(None)))
start_response(status,
- [(native_str(k), native_str(v)) for (k,v) in headers])
+ [(native_str(k), native_str(v)) for (k, v) in headers])
return handler._write_buffer
@@ -113,10 +115,11 @@
self.query = environ.get("QUERY_STRING", "")
if self.query:
self.uri += "?" + self.query
- arguments = cgi.parse_qs(self.query)
+ arguments = parse_qs_bytes(native_str(self.query))
for name, values in arguments.iteritems():
values = [v for v in values if v]
- if values: self.arguments[name] = values
+ if values:
+ self.arguments[name] = values
self.version = "HTTP/1.1"
self.headers = httputil.HTTPHeaders()
if environ.get("CONTENT_TYPE"):
@@ -142,11 +145,11 @@
self.files = {}
content_type = self.headers.get("Content-Type", "")
if content_type.startswith("application/x-www-form-urlencoded"):
- for name, values in cgi.parse_qs(self.body).iteritems():
+ for name, values in parse_qs_bytes(native_str(self.body)).iteritems():
self.arguments.setdefault(name, []).extend(values)
elif content_type.startswith("multipart/form-data"):
if 'boundary=' in content_type:
- boundary = content_type.split('boundary=',1)[1]
+ boundary = content_type.split('boundary=', 1)[1]
if boundary:
httputil.parse_multipart_form_data(
utf8(boundary), self.body, self.arguments, self.files)
@@ -215,6 +218,7 @@
def __call__(self, request):
data = {}
response = []
+
def start_response(status, response_headers, exc_info=None):
data["status"] = status
data["headers"] = response_headers
@@ -225,11 +229,12 @@
body = b("").join(response)
if hasattr(app_response, "close"):
app_response.close()
- if not data: raise Exception("WSGI app did not call start_response")
+ if not data:
+ raise Exception("WSGI app did not call start_response")
status_code = int(data["status"].split()[0])
headers = data["headers"]
- header_set = set(k.lower() for (k,v) in headers)
+ header_set = set(k.lower() for (k, v) in headers)
body = escape.utf8(body)
if "content-length" not in header_set:
headers.append(("Content-Length", str(len(body))))
diff --git a/tox.ini b/tox.ini
index 88c8c37..4090988 100644
--- a/tox.ini
+++ b/tox.ini
@@ -13,7 +13,7 @@
[tox]
# "-full" variants include optional dependencies, to ensure
# that things work both in a bare install and with all the extras.
-envlist = py27-full, py27-curl, py25-full, py32, pypy, py25, py26, py26-full, py27
+envlist = py27-full, py27-curl, py25-full, py32, pypy, py25, py26, py26-full, py27, py32-utf8, py33
[testenv]
commands = python -m tornado.test.runtests {posargs:}
@@ -36,7 +36,9 @@
MySQL-python
pycurl
simplejson
- twisted>=11.1.0
+ twisted>=12.0.0
+ # zope.interface (used by twisted) dropped python 2.5 support in 4.0
+ zope.interface<4.0
# py26-full deliberately runs an older version of twisted to ensure
# we're still compatible with the oldest version we support.
@@ -52,7 +54,7 @@
deps =
MySQL-python
pycurl
- twisted>=11.1.0
+ twisted>=12.0.0
[testenv:py27-curl]
# Same as py27-full, but runs the tests with curl_httpclient by default.
@@ -62,11 +64,26 @@
deps =
MySQL-python
pycurl
- twisted>=11.0.0
+ twisted>=11.1.0
commands = python -m tornado.test.runtests --httpclient=tornado.curl_httpclient.CurlAsyncHTTPClient {posargs:}
# No pypy-full yet: pycurl doesn't build with pypy, and installing
# twisted under pypy takes a *very* long time. MySQL-python builds with
# pypy, but doesn't work.
+# In python 3, opening files in text mode uses a system-dependent encoding by
+# default. Run the tests with "C" (ascii) and "utf-8" locales to ensure
+# we don't have hidden dependencies on this setting.
+[testenv:py32]
+basepython = python3.2
+setenv = LANG=C
+
+[testenv:py32-utf8]
+basepython = python3.2
+setenv = LANG=en_US.utf-8
+
# No py32-full yet: none of our dependencies currently work on python3.
+
+[testenv:py33]
+# tox doesn't yet know "py33" by default
+basepython = python3.3
diff --git a/website/app.yaml b/website/app.yaml
index 92e2fbf..9484a91 100644
--- a/website/app.yaml
+++ b/website/app.yaml
@@ -1,15 +1,14 @@
application: python-tornado
version: 2
-runtime: python27
-threadsafe: yes
+runtime: python
api_version: 1
handlers:
- url: /static/tornado-0.1.tar.gz
- script: website.application
+ script: website.py
- url: /static/tornado-0.2.tar.gz
- script: website.application
+ script: website.py
- url: /static/
static_dir: static
@@ -23,10 +22,10 @@
upload: static/favicon.ico
- url: /documentation/?
- script: website.application
+ script: website.py
- url: /documentation
static_dir: sphinx/build/html
- url: /.*
- script: website.application
+ script: website.py
diff --git a/website/sphinx/options.rst b/website/sphinx/options.rst
index a201bc2..026b378 100644
--- a/website/sphinx/options.rst
+++ b/website/sphinx/options.rst
@@ -2,4 +2,16 @@
============================================
.. automodule:: tornado.options
- :members:
+
+ .. autofunction:: define
+
+ .. py:data:: options
+
+ Global options dictionary. Supports both attribute-style and
+ dict-style access.
+
+ .. autofunction:: parse_command_line
+ .. autofunction:: parse_config_file
+ .. autofunction:: print_help(file=sys.stdout)
+ .. autofunction:: enable_pretty_logging()
+ .. autoexception:: Error
diff --git a/website/sphinx/overview.rst b/website/sphinx/overview.rst
index 29c88ee..c3ef03c 100644
--- a/website/sphinx/overview.rst
+++ b/website/sphinx/overview.rst
@@ -81,9 +81,9 @@
::
- class MainHandler(tornado.web.RequestHandler):
+ class MyFormHandler(tornado.web.RequestHandler):
def get(self):
- self.write('<html><body><form action="/" method="post">'
+ self.write('<html><body><form action="/myform" method="post">'
'<input type="text" name="message">'
'<input type="submit" value="Submit">'
'</form></body></html>')
diff --git a/website/sphinx/releases.rst b/website/sphinx/releases.rst
index 05af64f..1db20b5 100644
--- a/website/sphinx/releases.rst
+++ b/website/sphinx/releases.rst
@@ -4,6 +4,8 @@
.. toctree::
:maxdepth: 2
+ releases/v2.3.0
+ releases/v2.2.1
releases/v2.2.0
releases/v2.1.1
releases/v2.1.0
diff --git a/website/sphinx/releases/v2.2.1.rst b/website/sphinx/releases/v2.2.1.rst
new file mode 100644
index 0000000..a47b1b4
--- /dev/null
+++ b/website/sphinx/releases/v2.2.1.rst
@@ -0,0 +1,20 @@
+What's new in Tornado 2.2.1
+===========================
+
+Apr 23, 2012
+------------
+
+Security fixes
+~~~~~~~~~~~~~~
+
+* `tornado.web.RequestHandler.set_header` now properly sanitizes input
+ values to protect against header injection, response splitting, etc.
+ (it has always attempted to do this, but the check was incorrect).
+ Note that redirects, the most likely source of such bugs, are protected
+ by a separate check in `RequestHandler.redirect`.
+
+Bug fixes
+~~~~~~~~~
+
+* Colored logging configuration in `tornado.options` is compatible with
+ Python 3.2.3 (and 3.3).
diff --git a/website/sphinx/releases/v2.3.0.rst b/website/sphinx/releases/v2.3.0.rst
new file mode 100644
index 0000000..e56ef02
--- /dev/null
+++ b/website/sphinx/releases/v2.3.0.rst
@@ -0,0 +1,111 @@
+What's new in Tornado 2.3
+=========================
+
+May 31, 2012
+------------
+
+HTTP clients
+~~~~~~~~~~~~
+
+* `tornado.httpclient.HTTPClient` now supports the same constructor
+ keyword arguments as `AsyncHTTPClient`.
+* The ``max_clients`` keyword argument to `AsyncHTTPClient.configure` now works.
+* `tornado.simple_httpclient` now supports the ``OPTIONS`` and ``PATCH``
+ HTTP methods.
+* `tornado.simple_httpclient` is better about closing its sockets
+ instead of leaving them for garbage collection.
+* `tornado.simple_httpclient` correctly verifies SSL certificates for
+ URLs containing IPv6 literals (This bug affected Python 2.5 and 2.6).
+* `tornado.simple_httpclient` no longer includes basic auth credentials
+ in the ``Host`` header when those credentials are extracted from the URL.
+* `tornado.simple_httpclient` no longer modifies the caller-supplied header
+ dictionary, which caused problems when following redirects.
+* `tornado.curl_httpclient` now supports client SSL certificates (using
+ the same ``client_cert`` and ``client_key`` arguments as
+ `tornado.simple_httpclient`)
+
+HTTP Server
+~~~~~~~~~~~
+
+* `HTTPServer` now works correctly with paths starting with ``//``
+* `HTTPHeaders.copy` (inherited from `dict.copy`) now works correctly.
+* `HTTPConnection.address` is now always the socket address, even for non-IP
+ sockets. `HTTPRequest.remote_ip` is still always an IP-style address
+ (fake data is used for non-IP sockets)
+* Extra data at the end of multipart form bodies is now ignored, which fixes
+ a compatibility problem with an iOS HTTP client library.
+
+
+``IOLoop`` and ``IOStream``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+* `IOStream` now has an ``error`` attribute that can be used to determine
+ why a socket was closed.
+* `tornado.iostream.IOStream.read_until` and ``read_until_regex`` are much
+ faster with large input.
+* `IOStream.write` performs better when given very large strings.
+* `IOLoop.instance()` is now thread-safe.
+
+``tornado.options``
+~~~~~~~~~~~~~~~~~~~
+
+* `tornado.options` options with ``multiple=True`` that are set more than
+ once now overwrite rather than append. This makes it possible to override
+ values set in `parse_config_file` with `parse_command_line`.
+* `tornado.options` ``--help`` output is now prettier.
+* `tornado.options.options` now supports attribute assignment.
+
+``tornado.template``
+~~~~~~~~~~~~~~~~~~~~
+
+* Template files containing non-ASCII (utf8) characters now work on Python 3
+ regardless of the locale environment variables.
+* Templates now support ``else`` clauses in
+ ``try``/``except``/``finally``/``else`` blocks.
+
+``tornado.web``
+~~~~~~~~~~~~~~~
+
+* `tornado.web.RequestHandler` now supports the ``PATCH`` HTTP method.
+ Note that this means any existing methods named ``patch`` in
+ ``RequestHandler`` subclasses will need to be renamed.
+* `tornado.web.addslash` and ``removeslash`` decorators now send permanent
+ redirects (301) instead of temporary (302).
+* `RequestHandler.flush` now invokes its callback whether there was any data
+ to flush or not.
+* Repeated calls to `RequestHandler.set_cookie` with the same name now
+ overwrite the previous cookie instead of producing additional copies.
+* `tornado.web.OutputTransform.transform_first_chunk` now takes and returns
+ a status code in addition to the headers and chunk. This is a
+ backwards-incompatible change to an interface that was never technically
+ private, but was not included in the documentation and does not appear
+ to have been used outside Tornado itself.
+* Fixed a bug on python versions before 2.6.5 when `URLSpec` regexes
+ are constructed from unicode strings and keyword arguments are extracted.
+* The ``reverse_url`` function in the template namespace now comes from
+ the `RequestHandler` rather than the `Application`. (Unless overridden,
+ `RequestHandler.reverse_url` is just an alias for the `Application`
+ method).
+* The ``Etag`` header is now returned on 304 responses to an ``If-None-Match``
+ request, improving compatibility with some caches.
+* `tornado.web` will no longer produce responses with status code 304
+ that also have entity headers such as ``Content-Length``.
+
+Other modules
+~~~~~~~~~~~~~
+
+* `tornado.auth.FacebookGraphMixin` no longer sends ``post_args`` redundantly
+ in the url.
+* The ``extra_params`` argument to `tornado.escape.linkify` may now be
+ a callable, to allow parameters to be chosen separately for each link.
+* `tornado.gen` no longer leaks `StackContexts` when a ``@gen.engine`` wrapped
+ function is called repeatedly.
+* `tornado.locale.get_supported_locales` no longer takes a meaningless
+ ``cls`` argument.
+* `StackContext` instances now have a deactivation callback that can be
+ used to prevent further propagation.
+* `tornado.testing.AsyncTestCase.wait` now resets its timeout on each call.
+* `tornado.wsgi.WSGIApplication` now parses arguments correctly on Python 3.
+* Exception handling on Python 3 has been improved; previously some exceptions
+ such as `UnicodeDecodeError` would generate `TypeErrors`
+
diff --git a/website/static/sphinx.css b/website/static/sphinx.css
index 1a9a5fb..28c9b04 100644
--- a/website/static/sphinx.css
+++ b/website/static/sphinx.css
@@ -36,6 +36,10 @@
background: #fff;
}
+tt {
+ background: #fff;
+}
+
/* "related" = top header */
div.related {
position: fixed;