Code import
This commit is contained in:
@@ -0,0 +1,16 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
Contains user-submitted code that other users may find useful, but which
|
||||
is not part of the Werkzeug core. Anyone can write code for inclusion in
|
||||
the `contrib` package. All modules in this package are distributed as an
|
||||
add-on library and thus are not part of Werkzeug itself.
|
||||
|
||||
This file itself is mostly for informational purposes and to tell the
|
||||
Python interpreter that `contrib` is a package.
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/__init__.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/__init__.pyc
Normal file
Binary file not shown.
355
venv/lib/python2.7/site-packages/werkzeug/contrib/atom.py
Normal file
355
venv/lib/python2.7/site-packages/werkzeug/contrib/atom.py
Normal file
@@ -0,0 +1,355 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib.atom
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module provides a class called :class:`AtomFeed` which can be
|
||||
used to generate feeds in the Atom syndication format (see :rfc:`4287`).
|
||||
|
||||
Example::
|
||||
|
||||
def atom_feed(request):
|
||||
feed = AtomFeed("My Blog", feed_url=request.url,
|
||||
url=request.host_url,
|
||||
subtitle="My example blog for a feed test.")
|
||||
for post in Post.query.limit(10).all():
|
||||
feed.add(post.title, post.body, content_type='html',
|
||||
author=post.author, url=post.url, id=post.uid,
|
||||
updated=post.last_update, published=post.pub_date)
|
||||
return feed.get_response()
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
from werkzeug.utils import escape
|
||||
from werkzeug.wrappers import BaseResponse
|
||||
from werkzeug._compat import implements_to_string, string_types
|
||||
|
||||
|
||||
XHTML_NAMESPACE = 'http://www.w3.org/1999/xhtml'
|
||||
|
||||
|
||||
def _make_text_block(name, content, content_type=None):
|
||||
"""Helper function for the builder that creates an XML text block."""
|
||||
if content_type == 'xhtml':
|
||||
return u'<%s type="xhtml"><div xmlns="%s">%s</div></%s>\n' % \
|
||||
(name, XHTML_NAMESPACE, content, name)
|
||||
if not content_type:
|
||||
return u'<%s>%s</%s>\n' % (name, escape(content), name)
|
||||
return u'<%s type="%s">%s</%s>\n' % (name, content_type,
|
||||
escape(content), name)
|
||||
|
||||
|
||||
def format_iso8601(obj):
|
||||
"""Format a datetime object for iso8601"""
|
||||
iso8601 = obj.isoformat()
|
||||
if obj.tzinfo:
|
||||
return iso8601
|
||||
return iso8601 + 'Z'
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class AtomFeed(object):
|
||||
|
||||
"""A helper class that creates Atom feeds.
|
||||
|
||||
:param title: the title of the feed. Required.
|
||||
:param title_type: the type attribute for the title element. One of
|
||||
``'html'``, ``'text'`` or ``'xhtml'``.
|
||||
:param url: the url for the feed (not the url *of* the feed)
|
||||
:param id: a globally unique id for the feed. Must be an URI. If
|
||||
not present the `feed_url` is used, but one of both is
|
||||
required.
|
||||
:param updated: the time the feed was modified the last time. Must
|
||||
be a :class:`datetime.datetime` object. If not
|
||||
present the latest entry's `updated` is used.
|
||||
Treated as UTC if naive datetime.
|
||||
:param feed_url: the URL to the feed. Should be the URL that was
|
||||
requested.
|
||||
:param author: the author of the feed. Must be either a string (the
|
||||
name) or a dict with name (required) and uri or
|
||||
email (both optional). Can be a list of (may be
|
||||
mixed, too) strings and dicts, too, if there are
|
||||
multiple authors. Required if not every entry has an
|
||||
author element.
|
||||
:param icon: an icon for the feed.
|
||||
:param logo: a logo for the feed.
|
||||
:param rights: copyright information for the feed.
|
||||
:param rights_type: the type attribute for the rights element. One of
|
||||
``'html'``, ``'text'`` or ``'xhtml'``. Default is
|
||||
``'text'``.
|
||||
:param subtitle: a short description of the feed.
|
||||
:param subtitle_type: the type attribute for the subtitle element.
|
||||
One of ``'text'``, ``'html'``, ``'text'``
|
||||
or ``'xhtml'``. Default is ``'text'``.
|
||||
:param links: additional links. Must be a list of dictionaries with
|
||||
href (required) and rel, type, hreflang, title, length
|
||||
(all optional)
|
||||
:param generator: the software that generated this feed. This must be
|
||||
a tuple in the form ``(name, url, version)``. If
|
||||
you don't want to specify one of them, set the item
|
||||
to `None`.
|
||||
:param entries: a list with the entries for the feed. Entries can also
|
||||
be added later with :meth:`add`.
|
||||
|
||||
For more information on the elements see
|
||||
http://www.atomenabled.org/developers/syndication/
|
||||
|
||||
Everywhere where a list is demanded, any iterable can be used.
|
||||
"""
|
||||
|
||||
default_generator = ('Werkzeug', None, None)
|
||||
|
||||
def __init__(self, title=None, entries=None, **kwargs):
|
||||
self.title = title
|
||||
self.title_type = kwargs.get('title_type', 'text')
|
||||
self.url = kwargs.get('url')
|
||||
self.feed_url = kwargs.get('feed_url', self.url)
|
||||
self.id = kwargs.get('id', self.feed_url)
|
||||
self.updated = kwargs.get('updated')
|
||||
self.author = kwargs.get('author', ())
|
||||
self.icon = kwargs.get('icon')
|
||||
self.logo = kwargs.get('logo')
|
||||
self.rights = kwargs.get('rights')
|
||||
self.rights_type = kwargs.get('rights_type')
|
||||
self.subtitle = kwargs.get('subtitle')
|
||||
self.subtitle_type = kwargs.get('subtitle_type', 'text')
|
||||
self.generator = kwargs.get('generator')
|
||||
if self.generator is None:
|
||||
self.generator = self.default_generator
|
||||
self.links = kwargs.get('links', [])
|
||||
self.entries = entries and list(entries) or []
|
||||
|
||||
if not hasattr(self.author, '__iter__') \
|
||||
or isinstance(self.author, string_types + (dict,)):
|
||||
self.author = [self.author]
|
||||
for i, author in enumerate(self.author):
|
||||
if not isinstance(author, dict):
|
||||
self.author[i] = {'name': author}
|
||||
|
||||
if not self.title:
|
||||
raise ValueError('title is required')
|
||||
if not self.id:
|
||||
raise ValueError('id is required')
|
||||
for author in self.author:
|
||||
if 'name' not in author:
|
||||
raise TypeError('author must contain at least a name')
|
||||
|
||||
def add(self, *args, **kwargs):
|
||||
"""Add a new entry to the feed. This function can either be called
|
||||
with a :class:`FeedEntry` or some keyword and positional arguments
|
||||
that are forwarded to the :class:`FeedEntry` constructor.
|
||||
"""
|
||||
if len(args) == 1 and not kwargs and isinstance(args[0], FeedEntry):
|
||||
self.entries.append(args[0])
|
||||
else:
|
||||
kwargs['feed_url'] = self.feed_url
|
||||
self.entries.append(FeedEntry(*args, **kwargs))
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s %r (%d entries)>' % (
|
||||
self.__class__.__name__,
|
||||
self.title,
|
||||
len(self.entries)
|
||||
)
|
||||
|
||||
def generate(self):
|
||||
"""Return a generator that yields pieces of XML."""
|
||||
# atom demands either an author element in every entry or a global one
|
||||
if not self.author:
|
||||
if any(not e.author for e in self.entries):
|
||||
self.author = ({'name': 'Unknown author'},)
|
||||
|
||||
if not self.updated:
|
||||
dates = sorted([entry.updated for entry in self.entries])
|
||||
self.updated = dates and dates[-1] or datetime.utcnow()
|
||||
|
||||
yield u'<?xml version="1.0" encoding="utf-8"?>\n'
|
||||
yield u'<feed xmlns="http://www.w3.org/2005/Atom">\n'
|
||||
yield ' ' + _make_text_block('title', self.title, self.title_type)
|
||||
yield u' <id>%s</id>\n' % escape(self.id)
|
||||
yield u' <updated>%s</updated>\n' % format_iso8601(self.updated)
|
||||
if self.url:
|
||||
yield u' <link href="%s" />\n' % escape(self.url)
|
||||
if self.feed_url:
|
||||
yield u' <link href="%s" rel="self" />\n' % \
|
||||
escape(self.feed_url)
|
||||
for link in self.links:
|
||||
yield u' <link %s/>\n' % ''.join('%s="%s" ' %
|
||||
(k, escape(link[k])) for k in link)
|
||||
for author in self.author:
|
||||
yield u' <author>\n'
|
||||
yield u' <name>%s</name>\n' % escape(author['name'])
|
||||
if 'uri' in author:
|
||||
yield u' <uri>%s</uri>\n' % escape(author['uri'])
|
||||
if 'email' in author:
|
||||
yield ' <email>%s</email>\n' % escape(author['email'])
|
||||
yield ' </author>\n'
|
||||
if self.subtitle:
|
||||
yield ' ' + _make_text_block('subtitle', self.subtitle,
|
||||
self.subtitle_type)
|
||||
if self.icon:
|
||||
yield u' <icon>%s</icon>\n' % escape(self.icon)
|
||||
if self.logo:
|
||||
yield u' <logo>%s</logo>\n' % escape(self.logo)
|
||||
if self.rights:
|
||||
yield ' ' + _make_text_block('rights', self.rights,
|
||||
self.rights_type)
|
||||
generator_name, generator_url, generator_version = self.generator
|
||||
if generator_name or generator_url or generator_version:
|
||||
tmp = [u' <generator']
|
||||
if generator_url:
|
||||
tmp.append(u' uri="%s"' % escape(generator_url))
|
||||
if generator_version:
|
||||
tmp.append(u' version="%s"' % escape(generator_version))
|
||||
tmp.append(u'>%s</generator>\n' % escape(generator_name))
|
||||
yield u''.join(tmp)
|
||||
for entry in self.entries:
|
||||
for line in entry.generate():
|
||||
yield u' ' + line
|
||||
yield u'</feed>\n'
|
||||
|
||||
def to_string(self):
|
||||
"""Convert the feed into a string."""
|
||||
return u''.join(self.generate())
|
||||
|
||||
def get_response(self):
|
||||
"""Return a response object for the feed."""
|
||||
return BaseResponse(self.to_string(), mimetype='application/atom+xml')
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
"""Use the class as WSGI response object."""
|
||||
return self.get_response()(environ, start_response)
|
||||
|
||||
def __str__(self):
|
||||
return self.to_string()
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class FeedEntry(object):
|
||||
|
||||
"""Represents a single entry in a feed.
|
||||
|
||||
:param title: the title of the entry. Required.
|
||||
:param title_type: the type attribute for the title element. One of
|
||||
``'html'``, ``'text'`` or ``'xhtml'``.
|
||||
:param content: the content of the entry.
|
||||
:param content_type: the type attribute for the content element. One
|
||||
of ``'html'``, ``'text'`` or ``'xhtml'``.
|
||||
:param summary: a summary of the entry's content.
|
||||
:param summary_type: the type attribute for the summary element. One
|
||||
of ``'html'``, ``'text'`` or ``'xhtml'``.
|
||||
:param url: the url for the entry.
|
||||
:param id: a globally unique id for the entry. Must be an URI. If
|
||||
not present the URL is used, but one of both is required.
|
||||
:param updated: the time the entry was modified the last time. Must
|
||||
be a :class:`datetime.datetime` object. Treated as
|
||||
UTC if naive datetime. Required.
|
||||
:param author: the author of the entry. Must be either a string (the
|
||||
name) or a dict with name (required) and uri or
|
||||
email (both optional). Can be a list of (may be
|
||||
mixed, too) strings and dicts, too, if there are
|
||||
multiple authors. Required if the feed does not have an
|
||||
author element.
|
||||
:param published: the time the entry was initially published. Must
|
||||
be a :class:`datetime.datetime` object. Treated as
|
||||
UTC if naive datetime.
|
||||
:param rights: copyright information for the entry.
|
||||
:param rights_type: the type attribute for the rights element. One of
|
||||
``'html'``, ``'text'`` or ``'xhtml'``. Default is
|
||||
``'text'``.
|
||||
:param links: additional links. Must be a list of dictionaries with
|
||||
href (required) and rel, type, hreflang, title, length
|
||||
(all optional)
|
||||
:param categories: categories for the entry. Must be a list of dictionaries
|
||||
with term (required), scheme and label (all optional)
|
||||
:param xml_base: The xml base (url) for this feed item. If not provided
|
||||
it will default to the item url.
|
||||
|
||||
For more information on the elements see
|
||||
http://www.atomenabled.org/developers/syndication/
|
||||
|
||||
Everywhere where a list is demanded, any iterable can be used.
|
||||
"""
|
||||
|
||||
def __init__(self, title=None, content=None, feed_url=None, **kwargs):
|
||||
self.title = title
|
||||
self.title_type = kwargs.get('title_type', 'text')
|
||||
self.content = content
|
||||
self.content_type = kwargs.get('content_type', 'html')
|
||||
self.url = kwargs.get('url')
|
||||
self.id = kwargs.get('id', self.url)
|
||||
self.updated = kwargs.get('updated')
|
||||
self.summary = kwargs.get('summary')
|
||||
self.summary_type = kwargs.get('summary_type', 'html')
|
||||
self.author = kwargs.get('author', ())
|
||||
self.published = kwargs.get('published')
|
||||
self.rights = kwargs.get('rights')
|
||||
self.links = kwargs.get('links', [])
|
||||
self.categories = kwargs.get('categories', [])
|
||||
self.xml_base = kwargs.get('xml_base', feed_url)
|
||||
|
||||
if not hasattr(self.author, '__iter__') \
|
||||
or isinstance(self.author, string_types + (dict,)):
|
||||
self.author = [self.author]
|
||||
for i, author in enumerate(self.author):
|
||||
if not isinstance(author, dict):
|
||||
self.author[i] = {'name': author}
|
||||
|
||||
if not self.title:
|
||||
raise ValueError('title is required')
|
||||
if not self.id:
|
||||
raise ValueError('id is required')
|
||||
if not self.updated:
|
||||
raise ValueError('updated is required')
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s %r>' % (
|
||||
self.__class__.__name__,
|
||||
self.title
|
||||
)
|
||||
|
||||
def generate(self):
|
||||
"""Yields pieces of ATOM XML."""
|
||||
base = ''
|
||||
if self.xml_base:
|
||||
base = ' xml:base="%s"' % escape(self.xml_base)
|
||||
yield u'<entry%s>\n' % base
|
||||
yield u' ' + _make_text_block('title', self.title, self.title_type)
|
||||
yield u' <id>%s</id>\n' % escape(self.id)
|
||||
yield u' <updated>%s</updated>\n' % format_iso8601(self.updated)
|
||||
if self.published:
|
||||
yield u' <published>%s</published>\n' % \
|
||||
format_iso8601(self.published)
|
||||
if self.url:
|
||||
yield u' <link href="%s" />\n' % escape(self.url)
|
||||
for author in self.author:
|
||||
yield u' <author>\n'
|
||||
yield u' <name>%s</name>\n' % escape(author['name'])
|
||||
if 'uri' in author:
|
||||
yield u' <uri>%s</uri>\n' % escape(author['uri'])
|
||||
if 'email' in author:
|
||||
yield u' <email>%s</email>\n' % escape(author['email'])
|
||||
yield u' </author>\n'
|
||||
for link in self.links:
|
||||
yield u' <link %s/>\n' % ''.join('%s="%s" ' %
|
||||
(k, escape(link[k])) for k in link)
|
||||
for category in self.categories:
|
||||
yield u' <category %s/>\n' % ''.join('%s="%s" ' %
|
||||
(k, escape(category[k])) for k in category)
|
||||
if self.summary:
|
||||
yield u' ' + _make_text_block('summary', self.summary,
|
||||
self.summary_type)
|
||||
if self.content:
|
||||
yield u' ' + _make_text_block('content', self.content,
|
||||
self.content_type)
|
||||
yield u'</entry>\n'
|
||||
|
||||
def to_string(self):
|
||||
"""Convert the feed item into a unicode object."""
|
||||
return u''.join(self.generate())
|
||||
|
||||
def __str__(self):
|
||||
return self.to_string()
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/atom.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/atom.pyc
Normal file
Binary file not shown.
858
venv/lib/python2.7/site-packages/werkzeug/contrib/cache.py
Normal file
858
venv/lib/python2.7/site-packages/werkzeug/contrib/cache.py
Normal file
@@ -0,0 +1,858 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib.cache
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The main problem with dynamic Web sites is, well, they're dynamic. Each
|
||||
time a user requests a page, the webserver executes a lot of code, queries
|
||||
the database, renders templates until the visitor gets the page he sees.
|
||||
|
||||
This is a lot more expensive than just loading a file from the file system
|
||||
and sending it to the visitor.
|
||||
|
||||
For most Web applications, this overhead isn't a big deal but once it
|
||||
becomes, you will be glad to have a cache system in place.
|
||||
|
||||
How Caching Works
|
||||
=================
|
||||
|
||||
Caching is pretty simple. Basically you have a cache object lurking around
|
||||
somewhere that is connected to a remote cache or the file system or
|
||||
something else. When the request comes in you check if the current page
|
||||
is already in the cache and if so, you're returning it from the cache.
|
||||
Otherwise you generate the page and put it into the cache. (Or a fragment
|
||||
of the page, you don't have to cache the full thing)
|
||||
|
||||
Here is a simple example of how to cache a sidebar for 5 minutes::
|
||||
|
||||
def get_sidebar(user):
|
||||
identifier = 'sidebar_for/user%d' % user.id
|
||||
value = cache.get(identifier)
|
||||
if value is not None:
|
||||
return value
|
||||
value = generate_sidebar_for(user=user)
|
||||
cache.set(identifier, value, timeout=60 * 5)
|
||||
return value
|
||||
|
||||
Creating a Cache Object
|
||||
=======================
|
||||
|
||||
To create a cache object you just import the cache system of your choice
|
||||
from the cache module and instantiate it. Then you can start working
|
||||
with that object:
|
||||
|
||||
>>> from werkzeug.contrib.cache import SimpleCache
|
||||
>>> c = SimpleCache()
|
||||
>>> c.set("foo", "value")
|
||||
>>> c.get("foo")
|
||||
'value'
|
||||
>>> c.get("missing") is None
|
||||
True
|
||||
|
||||
Please keep in mind that you have to create the cache and put it somewhere
|
||||
you have access to it (either as a module global you can import or you just
|
||||
put it into your WSGI application).
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import errno
|
||||
import tempfile
|
||||
import platform
|
||||
from hashlib import md5
|
||||
from time import time
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError: # pragma: no cover
|
||||
import pickle
|
||||
|
||||
from werkzeug._compat import iteritems, string_types, text_type, \
|
||||
integer_types, to_native
|
||||
from werkzeug.posixemulation import rename
|
||||
|
||||
|
||||
def _items(mappingorseq):
|
||||
"""Wrapper for efficient iteration over mappings represented by dicts
|
||||
or sequences::
|
||||
|
||||
>>> for k, v in _items((i, i*i) for i in xrange(5)):
|
||||
... assert k*k == v
|
||||
|
||||
>>> for k, v in _items(dict((i, i*i) for i in xrange(5))):
|
||||
... assert k*k == v
|
||||
|
||||
"""
|
||||
if hasattr(mappingorseq, 'items'):
|
||||
return iteritems(mappingorseq)
|
||||
return mappingorseq
|
||||
|
||||
|
||||
class BaseCache(object):
|
||||
|
||||
"""Baseclass for the cache systems. All the cache systems implement this
|
||||
API or a superset of it.
|
||||
|
||||
:param default_timeout: the default timeout (in seconds) that is used if
|
||||
no timeout is specified on :meth:`set`. A timeout
|
||||
of 0 indicates that the cache never expires.
|
||||
"""
|
||||
|
||||
def __init__(self, default_timeout=300):
|
||||
self.default_timeout = default_timeout
|
||||
|
||||
def _normalize_timeout(self, timeout):
|
||||
if timeout is None:
|
||||
timeout = self.default_timeout
|
||||
return timeout
|
||||
|
||||
def get(self, key):
|
||||
"""Look up key in the cache and return the value for it.
|
||||
|
||||
:param key: the key to be looked up.
|
||||
:returns: The value if it exists and is readable, else ``None``.
|
||||
"""
|
||||
return None
|
||||
|
||||
def delete(self, key):
|
||||
"""Delete `key` from the cache.
|
||||
|
||||
:param key: the key to delete.
|
||||
:returns: Whether the key existed and has been deleted.
|
||||
:rtype: boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def get_many(self, *keys):
|
||||
"""Returns a list of values for the given keys.
|
||||
For each key a item in the list is created::
|
||||
|
||||
foo, bar = cache.get_many("foo", "bar")
|
||||
|
||||
Has the same error handling as :meth:`get`.
|
||||
|
||||
:param keys: The function accepts multiple keys as positional
|
||||
arguments.
|
||||
"""
|
||||
return map(self.get, keys)
|
||||
|
||||
def get_dict(self, *keys):
|
||||
"""Like :meth:`get_many` but return a dict::
|
||||
|
||||
d = cache.get_dict("foo", "bar")
|
||||
foo = d["foo"]
|
||||
bar = d["bar"]
|
||||
|
||||
:param keys: The function accepts multiple keys as positional
|
||||
arguments.
|
||||
"""
|
||||
return dict(zip(keys, self.get_many(*keys)))
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
"""Add a new key/value to the cache (overwrites value, if key already
|
||||
exists in the cache).
|
||||
|
||||
:param key: the key to set
|
||||
:param value: the value for the key
|
||||
:param timeout: the cache timeout for the key in seconds (if not
|
||||
specified, it uses the default timeout). A timeout of
|
||||
0 idicates that the cache never expires.
|
||||
:returns: ``True`` if key has been updated, ``False`` for backend
|
||||
errors. Pickling errors, however, will raise a subclass of
|
||||
``pickle.PickleError``.
|
||||
:rtype: boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
"""Works like :meth:`set` but does not overwrite the values of already
|
||||
existing keys.
|
||||
|
||||
:param key: the key to set
|
||||
:param value: the value for the key
|
||||
:param timeout: the cache timeout for the key in seconds (if not
|
||||
specified, it uses the default timeout). A timeout of
|
||||
0 idicates that the cache never expires.
|
||||
:returns: Same as :meth:`set`, but also ``False`` for already
|
||||
existing keys.
|
||||
:rtype: boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def set_many(self, mapping, timeout=None):
|
||||
"""Sets multiple keys and values from a mapping.
|
||||
|
||||
:param mapping: a mapping with the keys/values to set.
|
||||
:param timeout: the cache timeout for the key in seconds (if not
|
||||
specified, it uses the default timeout). A timeout of
|
||||
0 idicates that the cache never expires.
|
||||
:returns: Whether all given keys have been set.
|
||||
:rtype: boolean
|
||||
"""
|
||||
rv = True
|
||||
for key, value in _items(mapping):
|
||||
if not self.set(key, value, timeout):
|
||||
rv = False
|
||||
return rv
|
||||
|
||||
def delete_many(self, *keys):
|
||||
"""Deletes multiple keys at once.
|
||||
|
||||
:param keys: The function accepts multiple keys as positional
|
||||
arguments.
|
||||
:returns: Whether all given keys have been deleted.
|
||||
:rtype: boolean
|
||||
"""
|
||||
return all(self.delete(key) for key in keys)
|
||||
|
||||
def has(self, key):
|
||||
"""Checks if a key exists in the cache without returning it. This is a
|
||||
cheap operation that bypasses loading the actual data on the backend.
|
||||
|
||||
This method is optional and may not be implemented on all caches.
|
||||
|
||||
:param key: the key to check
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
'%s doesn\'t have an efficient implementation of `has`. That '
|
||||
'means it is impossible to check whether a key exists without '
|
||||
'fully loading the key\'s data. Consider using `self.get` '
|
||||
'explicitly if you don\'t care about performance.'
|
||||
)
|
||||
|
||||
def clear(self):
|
||||
"""Clears the cache. Keep in mind that not all caches support
|
||||
completely clearing the cache.
|
||||
|
||||
:returns: Whether the cache has been cleared.
|
||||
:rtype: boolean
|
||||
"""
|
||||
return True
|
||||
|
||||
def inc(self, key, delta=1):
|
||||
"""Increments the value of a key by `delta`. If the key does
|
||||
not yet exist it is initialized with `delta`.
|
||||
|
||||
For supporting caches this is an atomic operation.
|
||||
|
||||
:param key: the key to increment.
|
||||
:param delta: the delta to add.
|
||||
:returns: The new value or ``None`` for backend errors.
|
||||
"""
|
||||
value = (self.get(key) or 0) + delta
|
||||
return value if self.set(key, value) else None
|
||||
|
||||
def dec(self, key, delta=1):
|
||||
"""Decrements the value of a key by `delta`. If the key does
|
||||
not yet exist it is initialized with `-delta`.
|
||||
|
||||
For supporting caches this is an atomic operation.
|
||||
|
||||
:param key: the key to increment.
|
||||
:param delta: the delta to subtract.
|
||||
:returns: The new value or `None` for backend errors.
|
||||
"""
|
||||
value = (self.get(key) or 0) - delta
|
||||
return value if self.set(key, value) else None
|
||||
|
||||
|
||||
class NullCache(BaseCache):
|
||||
|
||||
"""A cache that doesn't cache. This can be useful for unit testing.
|
||||
|
||||
:param default_timeout: a dummy parameter that is ignored but exists
|
||||
for API compatibility with other caches.
|
||||
"""
|
||||
|
||||
|
||||
class SimpleCache(BaseCache):
|
||||
|
||||
"""Simple memory cache for single process environments. This class exists
|
||||
mainly for the development server and is not 100% thread safe. It tries
|
||||
to use as many atomic operations as possible and no locks for simplicity
|
||||
but it could happen under heavy load that keys are added multiple times.
|
||||
|
||||
:param threshold: the maximum number of items the cache stores before
|
||||
it starts deleting some.
|
||||
:param default_timeout: the default timeout that is used if no timeout is
|
||||
specified on :meth:`~BaseCache.set`. A timeout of
|
||||
0 indicates that the cache never expires.
|
||||
"""
|
||||
|
||||
def __init__(self, threshold=500, default_timeout=300):
|
||||
BaseCache.__init__(self, default_timeout)
|
||||
self._cache = {}
|
||||
self.clear = self._cache.clear
|
||||
self._threshold = threshold
|
||||
|
||||
def _prune(self):
|
||||
if len(self._cache) > self._threshold:
|
||||
now = time()
|
||||
toremove = []
|
||||
for idx, (key, (expires, _)) in enumerate(self._cache.items()):
|
||||
if (expires != 0 and expires <= now) or idx % 3 == 0:
|
||||
toremove.append(key)
|
||||
for key in toremove:
|
||||
self._cache.pop(key, None)
|
||||
|
||||
def _normalize_timeout(self, timeout):
|
||||
timeout = BaseCache._normalize_timeout(self, timeout)
|
||||
if timeout > 0:
|
||||
timeout = time() + timeout
|
||||
return timeout
|
||||
|
||||
def get(self, key):
|
||||
try:
|
||||
expires, value = self._cache[key]
|
||||
if expires == 0 or expires > time():
|
||||
return pickle.loads(value)
|
||||
except (KeyError, pickle.PickleError):
|
||||
return None
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
expires = self._normalize_timeout(timeout)
|
||||
self._prune()
|
||||
self._cache[key] = (expires, pickle.dumps(value,
|
||||
pickle.HIGHEST_PROTOCOL))
|
||||
return True
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
expires = self._normalize_timeout(timeout)
|
||||
self._prune()
|
||||
item = (expires, pickle.dumps(value,
|
||||
pickle.HIGHEST_PROTOCOL))
|
||||
if key in self._cache:
|
||||
return False
|
||||
self._cache.setdefault(key, item)
|
||||
return True
|
||||
|
||||
def delete(self, key):
|
||||
return self._cache.pop(key, None) is not None
|
||||
|
||||
def has(self, key):
|
||||
try:
|
||||
expires, value = self._cache[key]
|
||||
return expires == 0 or expires > time()
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
_test_memcached_key = re.compile(r'[^\x00-\x21\xff]{1,250}$').match
|
||||
|
||||
|
||||
class MemcachedCache(BaseCache):
|
||||
|
||||
"""A cache that uses memcached as backend.
|
||||
|
||||
The first argument can either be an object that resembles the API of a
|
||||
:class:`memcache.Client` or a tuple/list of server addresses. In the
|
||||
event that a tuple/list is passed, Werkzeug tries to import the best
|
||||
available memcache library.
|
||||
|
||||
This cache looks into the following packages/modules to find bindings for
|
||||
memcached:
|
||||
|
||||
- ``pylibmc``
|
||||
- ``google.appengine.api.memcached``
|
||||
- ``memcached``
|
||||
|
||||
Implementation notes: This cache backend works around some limitations in
|
||||
memcached to simplify the interface. For example unicode keys are encoded
|
||||
to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return
|
||||
the keys in the same format as passed. Furthermore all get methods
|
||||
silently ignore key errors to not cause problems when untrusted user data
|
||||
is passed to the get methods which is often the case in web applications.
|
||||
|
||||
:param servers: a list or tuple of server addresses or alternatively
|
||||
a :class:`memcache.Client` or a compatible client.
|
||||
:param default_timeout: the default timeout that is used if no timeout is
|
||||
specified on :meth:`~BaseCache.set`. A timeout of
|
||||
0 indicates taht the cache never expires.
|
||||
:param key_prefix: a prefix that is added before all keys. This makes it
|
||||
possible to use the same memcached server for different
|
||||
applications. Keep in mind that
|
||||
:meth:`~BaseCache.clear` will also clear keys with a
|
||||
different prefix.
|
||||
"""
|
||||
|
||||
def __init__(self, servers=None, default_timeout=300, key_prefix=None):
|
||||
BaseCache.__init__(self, default_timeout)
|
||||
if servers is None or isinstance(servers, (list, tuple)):
|
||||
if servers is None:
|
||||
servers = ['127.0.0.1:11211']
|
||||
self._client = self.import_preferred_memcache_lib(servers)
|
||||
if self._client is None:
|
||||
raise RuntimeError('no memcache module found')
|
||||
else:
|
||||
# NOTE: servers is actually an already initialized memcache
|
||||
# client.
|
||||
self._client = servers
|
||||
|
||||
self.key_prefix = to_native(key_prefix)
|
||||
|
||||
def _normalize_key(self, key):
|
||||
key = to_native(key, 'utf-8')
|
||||
if self.key_prefix:
|
||||
key = self.key_prefix + key
|
||||
return key
|
||||
|
||||
def _normalize_timeout(self, timeout):
|
||||
timeout = BaseCache._normalize_timeout(self, timeout)
|
||||
if timeout > 0:
|
||||
timeout = int(time()) + timeout
|
||||
return timeout
|
||||
|
||||
def get(self, key):
|
||||
key = self._normalize_key(key)
|
||||
# memcached doesn't support keys longer than that. Because often
|
||||
# checks for so long keys can occur because it's tested from user
|
||||
# submitted data etc we fail silently for getting.
|
||||
if _test_memcached_key(key):
|
||||
return self._client.get(key)
|
||||
|
||||
def get_dict(self, *keys):
|
||||
key_mapping = {}
|
||||
have_encoded_keys = False
|
||||
for key in keys:
|
||||
encoded_key = self._normalize_key(key)
|
||||
if not isinstance(key, str):
|
||||
have_encoded_keys = True
|
||||
if _test_memcached_key(key):
|
||||
key_mapping[encoded_key] = key
|
||||
d = rv = self._client.get_multi(key_mapping.keys())
|
||||
if have_encoded_keys or self.key_prefix:
|
||||
rv = {}
|
||||
for key, value in iteritems(d):
|
||||
rv[key_mapping[key]] = value
|
||||
if len(rv) < len(keys):
|
||||
for key in keys:
|
||||
if key not in rv:
|
||||
rv[key] = None
|
||||
return rv
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
key = self._normalize_key(key)
|
||||
timeout = self._normalize_timeout(timeout)
|
||||
return self._client.add(key, value, timeout)
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
key = self._normalize_key(key)
|
||||
timeout = self._normalize_timeout(timeout)
|
||||
return self._client.set(key, value, timeout)
|
||||
|
||||
def get_many(self, *keys):
|
||||
d = self.get_dict(*keys)
|
||||
return [d[key] for key in keys]
|
||||
|
||||
def set_many(self, mapping, timeout=None):
|
||||
new_mapping = {}
|
||||
for key, value in _items(mapping):
|
||||
key = self._normalize_key(key)
|
||||
new_mapping[key] = value
|
||||
|
||||
timeout = self._normalize_timeout(timeout)
|
||||
failed_keys = self._client.set_multi(new_mapping, timeout)
|
||||
return not failed_keys
|
||||
|
||||
def delete(self, key):
|
||||
key = self._normalize_key(key)
|
||||
if _test_memcached_key(key):
|
||||
return self._client.delete(key)
|
||||
|
||||
def delete_many(self, *keys):
|
||||
new_keys = []
|
||||
for key in keys:
|
||||
key = self._normalize_key(key)
|
||||
if _test_memcached_key(key):
|
||||
new_keys.append(key)
|
||||
return self._client.delete_multi(new_keys)
|
||||
|
||||
def has(self, key):
|
||||
key = self._normalize_key(key)
|
||||
if _test_memcached_key(key):
|
||||
return self._client.append(key, '')
|
||||
return False
|
||||
|
||||
def clear(self):
|
||||
return self._client.flush_all()
|
||||
|
||||
def inc(self, key, delta=1):
|
||||
key = self._normalize_key(key)
|
||||
return self._client.incr(key, delta)
|
||||
|
||||
def dec(self, key, delta=1):
|
||||
key = self._normalize_key(key)
|
||||
return self._client.decr(key, delta)
|
||||
|
||||
def import_preferred_memcache_lib(self, servers):
|
||||
"""Returns an initialized memcache client. Used by the constructor."""
|
||||
try:
|
||||
import pylibmc
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
return pylibmc.Client(servers)
|
||||
|
||||
try:
|
||||
from google.appengine.api import memcache
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
return memcache.Client()
|
||||
|
||||
try:
|
||||
import memcache
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
return memcache.Client(servers)
|
||||
|
||||
|
||||
# backwards compatibility
|
||||
GAEMemcachedCache = MemcachedCache
|
||||
|
||||
|
||||
class RedisCache(BaseCache):
|
||||
|
||||
"""Uses the Redis key-value store as a cache backend.
|
||||
|
||||
The first argument can be either a string denoting address of the Redis
|
||||
server or an object resembling an instance of a redis.Redis class.
|
||||
|
||||
Note: Python Redis API already takes care of encoding unicode strings on
|
||||
the fly.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
|
||||
.. versionadded:: 0.8
|
||||
`key_prefix` was added.
|
||||
|
||||
.. versionchanged:: 0.8
|
||||
This cache backend now properly serializes objects.
|
||||
|
||||
.. versionchanged:: 0.8.3
|
||||
This cache backend now supports password authentication.
|
||||
|
||||
.. versionchanged:: 0.10
|
||||
``**kwargs`` is now passed to the redis object.
|
||||
|
||||
:param host: address of the Redis server or an object which API is
|
||||
compatible with the official Python Redis client (redis-py).
|
||||
:param port: port number on which Redis server listens for connections.
|
||||
:param password: password authentication for the Redis server.
|
||||
:param db: db (zero-based numeric index) on Redis Server to connect.
|
||||
:param default_timeout: the default timeout that is used if no timeout is
|
||||
specified on :meth:`~BaseCache.set`. A timeout of
|
||||
0 indicates that the cache never expires.
|
||||
:param key_prefix: A prefix that should be added to all keys.
|
||||
|
||||
Any additional keyword arguments will be passed to ``redis.Redis``.
|
||||
"""
|
||||
|
||||
def __init__(self, host='localhost', port=6379, password=None,
|
||||
db=0, default_timeout=300, key_prefix=None, **kwargs):
|
||||
BaseCache.__init__(self, default_timeout)
|
||||
if isinstance(host, string_types):
|
||||
try:
|
||||
import redis
|
||||
except ImportError:
|
||||
raise RuntimeError('no redis module found')
|
||||
if kwargs.get('decode_responses', None):
|
||||
raise ValueError('decode_responses is not supported by '
|
||||
'RedisCache.')
|
||||
self._client = redis.Redis(host=host, port=port, password=password,
|
||||
db=db, **kwargs)
|
||||
else:
|
||||
self._client = host
|
||||
self.key_prefix = key_prefix or ''
|
||||
|
||||
def _normalize_timeout(self, timeout):
|
||||
timeout = BaseCache._normalize_timeout(self, timeout)
|
||||
if timeout == 0:
|
||||
timeout = -1
|
||||
return timeout
|
||||
|
||||
def dump_object(self, value):
|
||||
"""Dumps an object into a string for redis. By default it serializes
|
||||
integers as regular string and pickle dumps everything else.
|
||||
"""
|
||||
t = type(value)
|
||||
if t in integer_types:
|
||||
return str(value).encode('ascii')
|
||||
return b'!' + pickle.dumps(value)
|
||||
|
||||
def load_object(self, value):
|
||||
"""The reversal of :meth:`dump_object`. This might be called with
|
||||
None.
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
if value.startswith(b'!'):
|
||||
try:
|
||||
return pickle.loads(value[1:])
|
||||
except pickle.PickleError:
|
||||
return None
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
# before 0.8 we did not have serialization. Still support that.
|
||||
return value
|
||||
|
||||
def get(self, key):
|
||||
return self.load_object(self._client.get(self.key_prefix + key))
|
||||
|
||||
def get_many(self, *keys):
|
||||
if self.key_prefix:
|
||||
keys = [self.key_prefix + key for key in keys]
|
||||
return [self.load_object(x) for x in self._client.mget(keys)]
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
timeout = self._normalize_timeout(timeout)
|
||||
dump = self.dump_object(value)
|
||||
if timeout == -1:
|
||||
result = self._client.set(name=self.key_prefix + key,
|
||||
value=dump)
|
||||
else:
|
||||
result = self._client.setex(name=self.key_prefix + key,
|
||||
value=dump, time=timeout)
|
||||
return result
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
timeout = self._normalize_timeout(timeout)
|
||||
dump = self.dump_object(value)
|
||||
return (
|
||||
self._client.setnx(name=self.key_prefix + key, value=dump) and
|
||||
self._client.expire(name=self.key_prefix + key, time=timeout)
|
||||
)
|
||||
|
||||
def set_many(self, mapping, timeout=None):
|
||||
timeout = self._normalize_timeout(timeout)
|
||||
# Use transaction=False to batch without calling redis MULTI
|
||||
# which is not supported by twemproxy
|
||||
pipe = self._client.pipeline(transaction=False)
|
||||
|
||||
for key, value in _items(mapping):
|
||||
dump = self.dump_object(value)
|
||||
if timeout == -1:
|
||||
pipe.set(name=self.key_prefix + key, value=dump)
|
||||
else:
|
||||
pipe.setex(name=self.key_prefix + key, value=dump,
|
||||
time=timeout)
|
||||
return pipe.execute()
|
||||
|
||||
def delete(self, key):
|
||||
return self._client.delete(self.key_prefix + key)
|
||||
|
||||
def delete_many(self, *keys):
|
||||
if not keys:
|
||||
return
|
||||
if self.key_prefix:
|
||||
keys = [self.key_prefix + key for key in keys]
|
||||
return self._client.delete(*keys)
|
||||
|
||||
def has(self, key):
|
||||
return self._client.exists(self.key_prefix + key)
|
||||
|
||||
def clear(self):
|
||||
status = False
|
||||
if self.key_prefix:
|
||||
keys = self._client.keys(self.key_prefix + '*')
|
||||
if keys:
|
||||
status = self._client.delete(*keys)
|
||||
else:
|
||||
status = self._client.flushdb()
|
||||
return status
|
||||
|
||||
def inc(self, key, delta=1):
|
||||
return self._client.incr(name=self.key_prefix + key, amount=delta)
|
||||
|
||||
def dec(self, key, delta=1):
|
||||
return self._client.decr(name=self.key_prefix + key, amount=delta)
|
||||
|
||||
|
||||
class FileSystemCache(BaseCache):
|
||||
|
||||
"""A cache that stores the items on the file system. This cache depends
|
||||
on being the only user of the `cache_dir`. Make absolutely sure that
|
||||
nobody but this cache stores files there or otherwise the cache will
|
||||
randomly delete files therein.
|
||||
|
||||
:param cache_dir: the directory where cache files are stored.
|
||||
:param threshold: the maximum number of items the cache stores before
|
||||
it starts deleting some.
|
||||
:param default_timeout: the default timeout that is used if no timeout is
|
||||
specified on :meth:`~BaseCache.set`. A timeout of
|
||||
0 indicates that the cache never expires.
|
||||
:param mode: the file mode wanted for the cache files, default 0600
|
||||
"""
|
||||
|
||||
#: used for temporary files by the FileSystemCache
|
||||
_fs_transaction_suffix = '.__wz_cache'
|
||||
|
||||
def __init__(self, cache_dir, threshold=500, default_timeout=300,
|
||||
mode=0o600):
|
||||
BaseCache.__init__(self, default_timeout)
|
||||
self._path = cache_dir
|
||||
self._threshold = threshold
|
||||
self._mode = mode
|
||||
|
||||
try:
|
||||
os.makedirs(self._path)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
def _normalize_timeout(self, timeout):
|
||||
timeout = BaseCache._normalize_timeout(self, timeout)
|
||||
if timeout != 0:
|
||||
timeout = time() + timeout
|
||||
return int(timeout)
|
||||
|
||||
def _list_dir(self):
|
||||
"""return a list of (fully qualified) cache filenames
|
||||
"""
|
||||
return [os.path.join(self._path, fn) for fn in os.listdir(self._path)
|
||||
if not fn.endswith(self._fs_transaction_suffix)]
|
||||
|
||||
def _prune(self):
|
||||
entries = self._list_dir()
|
||||
if len(entries) > self._threshold:
|
||||
now = time()
|
||||
for idx, fname in enumerate(entries):
|
||||
try:
|
||||
remove = False
|
||||
with open(fname, 'rb') as f:
|
||||
expires = pickle.load(f)
|
||||
remove = (expires != 0 and expires <= now) or idx % 3 == 0
|
||||
|
||||
if remove:
|
||||
os.remove(fname)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
def clear(self):
|
||||
for fname in self._list_dir():
|
||||
try:
|
||||
os.remove(fname)
|
||||
except (IOError, OSError):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _get_filename(self, key):
|
||||
if isinstance(key, text_type):
|
||||
key = key.encode('utf-8') # XXX unicode review
|
||||
hash = md5(key).hexdigest()
|
||||
return os.path.join(self._path, hash)
|
||||
|
||||
def get(self, key):
|
||||
filename = self._get_filename(key)
|
||||
try:
|
||||
with open(filename, 'rb') as f:
|
||||
pickle_time = pickle.load(f)
|
||||
if pickle_time == 0 or pickle_time >= time():
|
||||
return pickle.load(f)
|
||||
else:
|
||||
os.remove(filename)
|
||||
return None
|
||||
except (IOError, OSError, pickle.PickleError):
|
||||
return None
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
filename = self._get_filename(key)
|
||||
if not os.path.exists(filename):
|
||||
return self.set(key, value, timeout)
|
||||
return False
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
timeout = self._normalize_timeout(timeout)
|
||||
filename = self._get_filename(key)
|
||||
self._prune()
|
||||
try:
|
||||
fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix,
|
||||
dir=self._path)
|
||||
with os.fdopen(fd, 'wb') as f:
|
||||
pickle.dump(timeout, f, 1)
|
||||
pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
|
||||
rename(tmp, filename)
|
||||
os.chmod(filename, self._mode)
|
||||
except (IOError, OSError):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def delete(self, key):
|
||||
try:
|
||||
os.remove(self._get_filename(key))
|
||||
except (IOError, OSError):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def has(self, key):
|
||||
filename = self._get_filename(key)
|
||||
try:
|
||||
with open(filename, 'rb') as f:
|
||||
pickle_time = pickle.load(f)
|
||||
if pickle_time == 0 or pickle_time >= time():
|
||||
return True
|
||||
else:
|
||||
os.remove(filename)
|
||||
return False
|
||||
except (IOError, OSError, pickle.PickleError):
|
||||
return False
|
||||
|
||||
|
||||
class UWSGICache(BaseCache):
|
||||
""" Implements the cache using uWSGI's caching framework.
|
||||
|
||||
.. note::
|
||||
This class cannot be used when running under PyPy, because the uWSGI
|
||||
API implementation for PyPy is lacking the needed functionality.
|
||||
|
||||
:param default_timeout: The default timeout in seconds.
|
||||
:param cache: The name of the caching instance to connect to, for
|
||||
example: mycache@localhost:3031, defaults to an empty string, which
|
||||
means uWSGI will cache in the local instance. If the cache is in the
|
||||
same instance as the werkzeug app, you only have to provide the name of
|
||||
the cache.
|
||||
"""
|
||||
def __init__(self, default_timeout=300, cache=''):
|
||||
BaseCache.__init__(self, default_timeout)
|
||||
|
||||
if platform.python_implementation() == 'PyPy':
|
||||
raise RuntimeError("uWSGI caching does not work under PyPy, see "
|
||||
"the docs for more details.")
|
||||
|
||||
try:
|
||||
import uwsgi
|
||||
self._uwsgi = uwsgi
|
||||
except ImportError:
|
||||
raise RuntimeError("uWSGI could not be imported, are you "
|
||||
"running under uWSGI?")
|
||||
|
||||
self.cache = cache
|
||||
|
||||
def get(self, key):
|
||||
rv = self._uwsgi.cache_get(key, self.cache)
|
||||
if rv is None:
|
||||
return
|
||||
return pickle.loads(rv)
|
||||
|
||||
def delete(self, key):
|
||||
return self._uwsgi.cache_del(key, self.cache)
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
return self._uwsgi.cache_update(key, pickle.dumps(value),
|
||||
self._normalize_timeout(timeout),
|
||||
self.cache)
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
return self._uwsgi.cache_set(key, pickle.dumps(value),
|
||||
self._normalize_timeout(timeout),
|
||||
self.cache)
|
||||
|
||||
def clear(self):
|
||||
return self._uwsgi.cache_clear(self.cache)
|
||||
|
||||
def has(self, key):
|
||||
return self._uwsgi.cache_exists(key, self.cache) is not None
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/cache.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/cache.pyc
Normal file
Binary file not shown.
254
venv/lib/python2.7/site-packages/werkzeug/contrib/fixers.py
Normal file
254
venv/lib/python2.7/site-packages/werkzeug/contrib/fixers.py
Normal file
@@ -0,0 +1,254 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib.fixers
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. versionadded:: 0.5
|
||||
|
||||
This module includes various helpers that fix bugs in web servers. They may
|
||||
be necessary for some versions of a buggy web server but not others. We try
|
||||
to stay updated with the status of the bugs as good as possible but you have
|
||||
to make sure whether they fix the problem you encounter.
|
||||
|
||||
If you notice bugs in webservers not fixed in this module consider
|
||||
contributing a patch.
|
||||
|
||||
:copyright: Copyright 2009 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
try:
|
||||
from urllib import unquote
|
||||
except ImportError:
|
||||
from urllib.parse import unquote
|
||||
|
||||
from werkzeug.http import parse_options_header, parse_cache_control_header, \
|
||||
parse_set_header
|
||||
from werkzeug.useragents import UserAgent
|
||||
from werkzeug.datastructures import Headers, ResponseCacheControl
|
||||
|
||||
|
||||
class CGIRootFix(object):
|
||||
|
||||
"""Wrap the application in this middleware if you are using FastCGI or CGI
|
||||
and you have problems with your app root being set to the cgi script's path
|
||||
instead of the path users are going to visit
|
||||
|
||||
.. versionchanged:: 0.9
|
||||
Added `app_root` parameter and renamed from `LighttpdCGIRootFix`.
|
||||
|
||||
:param app: the WSGI application
|
||||
:param app_root: Defaulting to ``'/'``, you can set this to something else
|
||||
if your app is mounted somewhere else.
|
||||
"""
|
||||
|
||||
def __init__(self, app, app_root='/'):
|
||||
self.app = app
|
||||
self.app_root = app_root
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
# only set PATH_INFO for older versions of Lighty or if no
|
||||
# server software is provided. That's because the test was
|
||||
# added in newer Werkzeug versions and we don't want to break
|
||||
# people's code if they are using this fixer in a test that
|
||||
# does not set the SERVER_SOFTWARE key.
|
||||
if 'SERVER_SOFTWARE' not in environ or \
|
||||
environ['SERVER_SOFTWARE'] < 'lighttpd/1.4.28':
|
||||
environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \
|
||||
environ.get('PATH_INFO', '')
|
||||
environ['SCRIPT_NAME'] = self.app_root.strip('/')
|
||||
return self.app(environ, start_response)
|
||||
|
||||
# backwards compatibility
|
||||
LighttpdCGIRootFix = CGIRootFix
|
||||
|
||||
|
||||
class PathInfoFromRequestUriFix(object):
|
||||
|
||||
"""On windows environment variables are limited to the system charset
|
||||
which makes it impossible to store the `PATH_INFO` variable in the
|
||||
environment without loss of information on some systems.
|
||||
|
||||
This is for example a problem for CGI scripts on a Windows Apache.
|
||||
|
||||
This fixer works by recreating the `PATH_INFO` from `REQUEST_URI`,
|
||||
`REQUEST_URL`, or `UNENCODED_URL` (whatever is available). Thus the
|
||||
fix can only be applied if the webserver supports either of these
|
||||
variables.
|
||||
|
||||
:param app: the WSGI application
|
||||
"""
|
||||
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
for key in 'REQUEST_URL', 'REQUEST_URI', 'UNENCODED_URL':
|
||||
if key not in environ:
|
||||
continue
|
||||
request_uri = unquote(environ[key])
|
||||
script_name = unquote(environ.get('SCRIPT_NAME', ''))
|
||||
if request_uri.startswith(script_name):
|
||||
environ['PATH_INFO'] = request_uri[len(script_name):] \
|
||||
.split('?', 1)[0]
|
||||
break
|
||||
return self.app(environ, start_response)
|
||||
|
||||
|
||||
class ProxyFix(object):
|
||||
|
||||
"""This middleware can be applied to add HTTP proxy support to an
|
||||
application that was not designed with HTTP proxies in mind. It
|
||||
sets `REMOTE_ADDR`, `HTTP_HOST` from `X-Forwarded` headers. While
|
||||
Werkzeug-based applications already can use
|
||||
:py:func:`werkzeug.wsgi.get_host` to retrieve the current host even if
|
||||
behind proxy setups, this middleware can be used for applications which
|
||||
access the WSGI environment directly.
|
||||
|
||||
If you have more than one proxy server in front of your app, set
|
||||
`num_proxies` accordingly.
|
||||
|
||||
Do not use this middleware in non-proxy setups for security reasons.
|
||||
|
||||
The original values of `REMOTE_ADDR` and `HTTP_HOST` are stored in
|
||||
the WSGI environment as `werkzeug.proxy_fix.orig_remote_addr` and
|
||||
`werkzeug.proxy_fix.orig_http_host`.
|
||||
|
||||
:param app: the WSGI application
|
||||
:param num_proxies: the number of proxy servers in front of the app.
|
||||
"""
|
||||
|
||||
def __init__(self, app, num_proxies=1):
|
||||
self.app = app
|
||||
self.num_proxies = num_proxies
|
||||
|
||||
def get_remote_addr(self, forwarded_for):
|
||||
"""Selects the new remote addr from the given list of ips in
|
||||
X-Forwarded-For. By default it picks the one that the `num_proxies`
|
||||
proxy server provides. Before 0.9 it would always pick the first.
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
if len(forwarded_for) >= self.num_proxies:
|
||||
return forwarded_for[-self.num_proxies]
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
getter = environ.get
|
||||
forwarded_proto = getter('HTTP_X_FORWARDED_PROTO', '')
|
||||
forwarded_for = getter('HTTP_X_FORWARDED_FOR', '').split(',')
|
||||
forwarded_host = getter('HTTP_X_FORWARDED_HOST', '')
|
||||
environ.update({
|
||||
'werkzeug.proxy_fix.orig_wsgi_url_scheme': getter('wsgi.url_scheme'),
|
||||
'werkzeug.proxy_fix.orig_remote_addr': getter('REMOTE_ADDR'),
|
||||
'werkzeug.proxy_fix.orig_http_host': getter('HTTP_HOST')
|
||||
})
|
||||
forwarded_for = [x for x in [x.strip() for x in forwarded_for] if x]
|
||||
remote_addr = self.get_remote_addr(forwarded_for)
|
||||
if remote_addr is not None:
|
||||
environ['REMOTE_ADDR'] = remote_addr
|
||||
if forwarded_host:
|
||||
environ['HTTP_HOST'] = forwarded_host
|
||||
if forwarded_proto:
|
||||
environ['wsgi.url_scheme'] = forwarded_proto
|
||||
return self.app(environ, start_response)
|
||||
|
||||
|
||||
class HeaderRewriterFix(object):
|
||||
|
||||
"""This middleware can remove response headers and add others. This
|
||||
is for example useful to remove the `Date` header from responses if you
|
||||
are using a server that adds that header, no matter if it's present or
|
||||
not or to add `X-Powered-By` headers::
|
||||
|
||||
app = HeaderRewriterFix(app, remove_headers=['Date'],
|
||||
add_headers=[('X-Powered-By', 'WSGI')])
|
||||
|
||||
:param app: the WSGI application
|
||||
:param remove_headers: a sequence of header keys that should be
|
||||
removed.
|
||||
:param add_headers: a sequence of ``(key, value)`` tuples that should
|
||||
be added.
|
||||
"""
|
||||
|
||||
def __init__(self, app, remove_headers=None, add_headers=None):
|
||||
self.app = app
|
||||
self.remove_headers = set(x.lower() for x in (remove_headers or ()))
|
||||
self.add_headers = list(add_headers or ())
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
def rewriting_start_response(status, headers, exc_info=None):
|
||||
new_headers = []
|
||||
for key, value in headers:
|
||||
if key.lower() not in self.remove_headers:
|
||||
new_headers.append((key, value))
|
||||
new_headers += self.add_headers
|
||||
return start_response(status, new_headers, exc_info)
|
||||
return self.app(environ, rewriting_start_response)
|
||||
|
||||
|
||||
class InternetExplorerFix(object):
|
||||
|
||||
"""This middleware fixes a couple of bugs with Microsoft Internet
|
||||
Explorer. Currently the following fixes are applied:
|
||||
|
||||
- removing of `Vary` headers for unsupported mimetypes which
|
||||
causes troubles with caching. Can be disabled by passing
|
||||
``fix_vary=False`` to the constructor.
|
||||
see: http://support.microsoft.com/kb/824847/en-us
|
||||
|
||||
- removes offending headers to work around caching bugs in
|
||||
Internet Explorer if `Content-Disposition` is set. Can be
|
||||
disabled by passing ``fix_attach=False`` to the constructor.
|
||||
|
||||
If it does not detect affected Internet Explorer versions it won't touch
|
||||
the request / response.
|
||||
"""
|
||||
|
||||
# This code was inspired by Django fixers for the same bugs. The
|
||||
# fix_vary and fix_attach fixers were originally implemented in Django
|
||||
# by Michael Axiak and is available as part of the Django project:
|
||||
# http://code.djangoproject.com/ticket/4148
|
||||
|
||||
def __init__(self, app, fix_vary=True, fix_attach=True):
|
||||
self.app = app
|
||||
self.fix_vary = fix_vary
|
||||
self.fix_attach = fix_attach
|
||||
|
||||
def fix_headers(self, environ, headers, status=None):
|
||||
if self.fix_vary:
|
||||
header = headers.get('content-type', '')
|
||||
mimetype, options = parse_options_header(header)
|
||||
if mimetype not in ('text/html', 'text/plain', 'text/sgml'):
|
||||
headers.pop('vary', None)
|
||||
|
||||
if self.fix_attach and 'content-disposition' in headers:
|
||||
pragma = parse_set_header(headers.get('pragma', ''))
|
||||
pragma.discard('no-cache')
|
||||
header = pragma.to_header()
|
||||
if not header:
|
||||
headers.pop('pragma', '')
|
||||
else:
|
||||
headers['Pragma'] = header
|
||||
header = headers.get('cache-control', '')
|
||||
if header:
|
||||
cc = parse_cache_control_header(header,
|
||||
cls=ResponseCacheControl)
|
||||
cc.no_cache = None
|
||||
cc.no_store = False
|
||||
header = cc.to_header()
|
||||
if not header:
|
||||
headers.pop('cache-control', '')
|
||||
else:
|
||||
headers['Cache-Control'] = header
|
||||
|
||||
def run_fixed(self, environ, start_response):
|
||||
def fixing_start_response(status, headers, exc_info=None):
|
||||
headers = Headers(headers)
|
||||
self.fix_headers(environ, headers, status)
|
||||
return start_response(status, headers.to_wsgi_list(), exc_info)
|
||||
return self.app(environ, fixing_start_response)
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
ua = UserAgent(environ)
|
||||
if ua.browser != 'msie':
|
||||
return self.app(environ, start_response)
|
||||
return self.run_fixed(environ, start_response)
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/fixers.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/fixers.pyc
Normal file
Binary file not shown.
352
venv/lib/python2.7/site-packages/werkzeug/contrib/iterio.py
Normal file
352
venv/lib/python2.7/site-packages/werkzeug/contrib/iterio.py
Normal file
@@ -0,0 +1,352 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
r"""
|
||||
werkzeug.contrib.iterio
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module implements a :class:`IterIO` that converts an iterator into
|
||||
a stream object and the other way round. Converting streams into
|
||||
iterators requires the `greenlet`_ module.
|
||||
|
||||
To convert an iterator into a stream all you have to do is to pass it
|
||||
directly to the :class:`IterIO` constructor. In this example we pass it
|
||||
a newly created generator::
|
||||
|
||||
def foo():
|
||||
yield "something\n"
|
||||
yield "otherthings"
|
||||
stream = IterIO(foo())
|
||||
print stream.read() # read the whole iterator
|
||||
|
||||
The other way round works a bit different because we have to ensure that
|
||||
the code execution doesn't take place yet. An :class:`IterIO` call with a
|
||||
callable as first argument does two things. The function itself is passed
|
||||
an :class:`IterIO` stream it can feed. The object returned by the
|
||||
:class:`IterIO` constructor on the other hand is not an stream object but
|
||||
an iterator::
|
||||
|
||||
def foo(stream):
|
||||
stream.write("some")
|
||||
stream.write("thing")
|
||||
stream.flush()
|
||||
stream.write("otherthing")
|
||||
iterator = IterIO(foo)
|
||||
print iterator.next() # prints something
|
||||
print iterator.next() # prints otherthing
|
||||
iterator.next() # raises StopIteration
|
||||
|
||||
.. _greenlet: https://github.com/python-greenlet/greenlet
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
try:
|
||||
import greenlet
|
||||
except ImportError:
|
||||
greenlet = None
|
||||
|
||||
from werkzeug._compat import implements_iterator
|
||||
|
||||
|
||||
def _mixed_join(iterable, sentinel):
|
||||
"""concatenate any string type in an intelligent way."""
|
||||
iterator = iter(iterable)
|
||||
first_item = next(iterator, sentinel)
|
||||
if isinstance(first_item, bytes):
|
||||
return first_item + b''.join(iterator)
|
||||
return first_item + u''.join(iterator)
|
||||
|
||||
|
||||
def _newline(reference_string):
|
||||
if isinstance(reference_string, bytes):
|
||||
return b'\n'
|
||||
return u'\n'
|
||||
|
||||
|
||||
@implements_iterator
|
||||
class IterIO(object):
|
||||
|
||||
"""Instances of this object implement an interface compatible with the
|
||||
standard Python :class:`file` object. Streams are either read-only or
|
||||
write-only depending on how the object is created.
|
||||
|
||||
If the first argument is an iterable a file like object is returned that
|
||||
returns the contents of the iterable. In case the iterable is empty
|
||||
read operations will return the sentinel value.
|
||||
|
||||
If the first argument is a callable then the stream object will be
|
||||
created and passed to that function. The caller itself however will
|
||||
not receive a stream but an iterable. The function will be be executed
|
||||
step by step as something iterates over the returned iterable. Each
|
||||
call to :meth:`flush` will create an item for the iterable. If
|
||||
:meth:`flush` is called without any writes in-between the sentinel
|
||||
value will be yielded.
|
||||
|
||||
Note for Python 3: due to the incompatible interface of bytes and
|
||||
streams you should set the sentinel value explicitly to an empty
|
||||
bytestring (``b''``) if you are expecting to deal with bytes as
|
||||
otherwise the end of the stream is marked with the wrong sentinel
|
||||
value.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
`sentinel` parameter was added.
|
||||
"""
|
||||
|
||||
def __new__(cls, obj, sentinel=''):
|
||||
try:
|
||||
iterator = iter(obj)
|
||||
except TypeError:
|
||||
return IterI(obj, sentinel)
|
||||
return IterO(iterator, sentinel)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def tell(self):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
return self.pos
|
||||
|
||||
def isatty(self):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
return False
|
||||
|
||||
def seek(self, pos, mode=0):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
raise IOError(9, 'Bad file descriptor')
|
||||
|
||||
def truncate(self, size=None):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
raise IOError(9, 'Bad file descriptor')
|
||||
|
||||
def write(self, s):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
raise IOError(9, 'Bad file descriptor')
|
||||
|
||||
def writelines(self, list):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
raise IOError(9, 'Bad file descriptor')
|
||||
|
||||
def read(self, n=-1):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
raise IOError(9, 'Bad file descriptor')
|
||||
|
||||
def readlines(self, sizehint=0):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
raise IOError(9, 'Bad file descriptor')
|
||||
|
||||
def readline(self, length=None):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
raise IOError(9, 'Bad file descriptor')
|
||||
|
||||
def flush(self):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
raise IOError(9, 'Bad file descriptor')
|
||||
|
||||
def __next__(self):
|
||||
if self.closed:
|
||||
raise StopIteration()
|
||||
line = self.readline()
|
||||
if not line:
|
||||
raise StopIteration()
|
||||
return line
|
||||
|
||||
|
||||
class IterI(IterIO):
|
||||
|
||||
"""Convert an stream into an iterator."""
|
||||
|
||||
def __new__(cls, func, sentinel=''):
|
||||
if greenlet is None:
|
||||
raise RuntimeError('IterI requires greenlet support')
|
||||
stream = object.__new__(cls)
|
||||
stream._parent = greenlet.getcurrent()
|
||||
stream._buffer = []
|
||||
stream.closed = False
|
||||
stream.sentinel = sentinel
|
||||
stream.pos = 0
|
||||
|
||||
def run():
|
||||
func(stream)
|
||||
stream.close()
|
||||
|
||||
g = greenlet.greenlet(run, stream._parent)
|
||||
while 1:
|
||||
rv = g.switch()
|
||||
if not rv:
|
||||
return
|
||||
yield rv[0]
|
||||
|
||||
def close(self):
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
self._flush_impl()
|
||||
|
||||
def write(self, s):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
if s:
|
||||
self.pos += len(s)
|
||||
self._buffer.append(s)
|
||||
|
||||
def writelines(self, list):
|
||||
for item in list:
|
||||
self.write(item)
|
||||
|
||||
def flush(self):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
self._flush_impl()
|
||||
|
||||
def _flush_impl(self):
|
||||
data = _mixed_join(self._buffer, self.sentinel)
|
||||
self._buffer = []
|
||||
if not data and self.closed:
|
||||
self._parent.switch()
|
||||
else:
|
||||
self._parent.switch((data,))
|
||||
|
||||
|
||||
class IterO(IterIO):
|
||||
|
||||
"""Iter output. Wrap an iterator and give it a stream like interface."""
|
||||
|
||||
def __new__(cls, gen, sentinel=''):
|
||||
self = object.__new__(cls)
|
||||
self._gen = gen
|
||||
self._buf = None
|
||||
self.sentinel = sentinel
|
||||
self.closed = False
|
||||
self.pos = 0
|
||||
return self
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def _buf_append(self, string):
|
||||
'''Replace string directly without appending to an empty string,
|
||||
avoiding type issues.'''
|
||||
if not self._buf:
|
||||
self._buf = string
|
||||
else:
|
||||
self._buf += string
|
||||
|
||||
def close(self):
|
||||
if not self.closed:
|
||||
self.closed = True
|
||||
if hasattr(self._gen, 'close'):
|
||||
self._gen.close()
|
||||
|
||||
def seek(self, pos, mode=0):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
if mode == 1:
|
||||
pos += self.pos
|
||||
elif mode == 2:
|
||||
self.read()
|
||||
self.pos = min(self.pos, self.pos + pos)
|
||||
return
|
||||
elif mode != 0:
|
||||
raise IOError('Invalid argument')
|
||||
buf = []
|
||||
try:
|
||||
tmp_end_pos = len(self._buf)
|
||||
while pos > tmp_end_pos:
|
||||
item = next(self._gen)
|
||||
tmp_end_pos += len(item)
|
||||
buf.append(item)
|
||||
except StopIteration:
|
||||
pass
|
||||
if buf:
|
||||
self._buf_append(_mixed_join(buf, self.sentinel))
|
||||
self.pos = max(0, pos)
|
||||
|
||||
def read(self, n=-1):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
if n < 0:
|
||||
self._buf_append(_mixed_join(self._gen, self.sentinel))
|
||||
result = self._buf[self.pos:]
|
||||
self.pos += len(result)
|
||||
return result
|
||||
new_pos = self.pos + n
|
||||
buf = []
|
||||
try:
|
||||
tmp_end_pos = 0 if self._buf is None else len(self._buf)
|
||||
while new_pos > tmp_end_pos or (self._buf is None and not buf):
|
||||
item = next(self._gen)
|
||||
tmp_end_pos += len(item)
|
||||
buf.append(item)
|
||||
except StopIteration:
|
||||
pass
|
||||
if buf:
|
||||
self._buf_append(_mixed_join(buf, self.sentinel))
|
||||
|
||||
if self._buf is None:
|
||||
return self.sentinel
|
||||
|
||||
new_pos = max(0, new_pos)
|
||||
try:
|
||||
return self._buf[self.pos:new_pos]
|
||||
finally:
|
||||
self.pos = min(new_pos, len(self._buf))
|
||||
|
||||
def readline(self, length=None):
|
||||
if self.closed:
|
||||
raise ValueError('I/O operation on closed file')
|
||||
|
||||
nl_pos = -1
|
||||
if self._buf:
|
||||
nl_pos = self._buf.find(_newline(self._buf), self.pos)
|
||||
buf = []
|
||||
try:
|
||||
if self._buf is None:
|
||||
pos = self.pos
|
||||
else:
|
||||
pos = len(self._buf)
|
||||
while nl_pos < 0:
|
||||
item = next(self._gen)
|
||||
local_pos = item.find(_newline(item))
|
||||
buf.append(item)
|
||||
if local_pos >= 0:
|
||||
nl_pos = pos + local_pos
|
||||
break
|
||||
pos += len(item)
|
||||
except StopIteration:
|
||||
pass
|
||||
if buf:
|
||||
self._buf_append(_mixed_join(buf, self.sentinel))
|
||||
|
||||
if self._buf is None:
|
||||
return self.sentinel
|
||||
|
||||
if nl_pos < 0:
|
||||
new_pos = len(self._buf)
|
||||
else:
|
||||
new_pos = nl_pos + 1
|
||||
if length is not None and self.pos + length < new_pos:
|
||||
new_pos = self.pos + length
|
||||
try:
|
||||
return self._buf[self.pos:new_pos]
|
||||
finally:
|
||||
self.pos = min(new_pos, len(self._buf))
|
||||
|
||||
def readlines(self, sizehint=0):
|
||||
total = 0
|
||||
lines = []
|
||||
line = self.readline()
|
||||
while line:
|
||||
lines.append(line)
|
||||
total += len(line)
|
||||
if 0 < sizehint <= total:
|
||||
break
|
||||
line = self.readline()
|
||||
return lines
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/iterio.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/iterio.pyc
Normal file
Binary file not shown.
264
venv/lib/python2.7/site-packages/werkzeug/contrib/jsrouting.py
Normal file
264
venv/lib/python2.7/site-packages/werkzeug/contrib/jsrouting.py
Normal file
@@ -0,0 +1,264 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib.jsrouting
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Addon module that allows to create a JavaScript function from a map
|
||||
that generates rules.
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
try:
|
||||
from simplejson import dumps
|
||||
except ImportError:
|
||||
try:
|
||||
from json import dumps
|
||||
except ImportError:
|
||||
def dumps(*args):
|
||||
raise RuntimeError('simplejson required for jsrouting')
|
||||
|
||||
from inspect import getmro
|
||||
from werkzeug.routing import NumberConverter
|
||||
from werkzeug._compat import iteritems
|
||||
|
||||
|
||||
def render_template(name_parts, rules, converters):
|
||||
result = u''
|
||||
if name_parts:
|
||||
for idx in range(0, len(name_parts) - 1):
|
||||
name = u'.'.join(name_parts[:idx + 1])
|
||||
result += u"if (typeof %s === 'undefined') %s = {}\n" % (name, name)
|
||||
result += '%s = ' % '.'.join(name_parts)
|
||||
result += """(function (server_name, script_name, subdomain, url_scheme) {
|
||||
var converters = [%(converters)s];
|
||||
var rules = %(rules)s;
|
||||
function in_array(array, value) {
|
||||
if (array.indexOf != undefined) {
|
||||
return array.indexOf(value) != -1;
|
||||
}
|
||||
for (var i = 0; i < array.length; i++) {
|
||||
if (array[i] == value) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function array_diff(array1, array2) {
|
||||
array1 = array1.slice();
|
||||
for (var i = array1.length-1; i >= 0; i--) {
|
||||
if (in_array(array2, array1[i])) {
|
||||
array1.splice(i, 1);
|
||||
}
|
||||
}
|
||||
return array1;
|
||||
}
|
||||
function split_obj(obj) {
|
||||
var names = [];
|
||||
var values = [];
|
||||
for (var name in obj) {
|
||||
if (typeof(obj[name]) != 'function') {
|
||||
names.push(name);
|
||||
values.push(obj[name]);
|
||||
}
|
||||
}
|
||||
return {names: names, values: values, original: obj};
|
||||
}
|
||||
function suitable(rule, args) {
|
||||
var default_args = split_obj(rule.defaults || {});
|
||||
var diff_arg_names = array_diff(rule.arguments, default_args.names);
|
||||
|
||||
for (var i = 0; i < diff_arg_names.length; i++) {
|
||||
if (!in_array(args.names, diff_arg_names[i])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (array_diff(rule.arguments, args.names).length == 0) {
|
||||
if (rule.defaults == null) {
|
||||
return true;
|
||||
}
|
||||
for (var i = 0; i < default_args.names.length; i++) {
|
||||
var key = default_args.names[i];
|
||||
var value = default_args.values[i];
|
||||
if (value != args.original[key]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
function build(rule, args) {
|
||||
var tmp = [];
|
||||
var processed = rule.arguments.slice();
|
||||
for (var i = 0; i < rule.trace.length; i++) {
|
||||
var part = rule.trace[i];
|
||||
if (part.is_dynamic) {
|
||||
var converter = converters[rule.converters[part.data]];
|
||||
var data = converter(args.original[part.data]);
|
||||
if (data == null) {
|
||||
return null;
|
||||
}
|
||||
tmp.push(data);
|
||||
processed.push(part.name);
|
||||
} else {
|
||||
tmp.push(part.data);
|
||||
}
|
||||
}
|
||||
tmp = tmp.join('');
|
||||
var pipe = tmp.indexOf('|');
|
||||
var subdomain = tmp.substring(0, pipe);
|
||||
var url = tmp.substring(pipe+1);
|
||||
|
||||
var unprocessed = array_diff(args.names, processed);
|
||||
var first_query_var = true;
|
||||
for (var i = 0; i < unprocessed.length; i++) {
|
||||
if (first_query_var) {
|
||||
url += '?';
|
||||
} else {
|
||||
url += '&';
|
||||
}
|
||||
first_query_var = false;
|
||||
url += encodeURIComponent(unprocessed[i]);
|
||||
url += '=';
|
||||
url += encodeURIComponent(args.original[unprocessed[i]]);
|
||||
}
|
||||
return {subdomain: subdomain, path: url};
|
||||
}
|
||||
function lstrip(s, c) {
|
||||
while (s && s.substring(0, 1) == c) {
|
||||
s = s.substring(1);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
function rstrip(s, c) {
|
||||
while (s && s.substring(s.length-1, s.length) == c) {
|
||||
s = s.substring(0, s.length-1);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
return function(endpoint, args, force_external) {
|
||||
args = split_obj(args);
|
||||
var rv = null;
|
||||
for (var i = 0; i < rules.length; i++) {
|
||||
var rule = rules[i];
|
||||
if (rule.endpoint != endpoint) continue;
|
||||
if (suitable(rule, args)) {
|
||||
rv = build(rule, args);
|
||||
if (rv != null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (rv == null) {
|
||||
return null;
|
||||
}
|
||||
if (!force_external && rv.subdomain == subdomain) {
|
||||
return rstrip(script_name, '/') + '/' + lstrip(rv.path, '/');
|
||||
} else {
|
||||
return url_scheme + '://'
|
||||
+ (rv.subdomain ? rv.subdomain + '.' : '')
|
||||
+ server_name + rstrip(script_name, '/')
|
||||
+ '/' + lstrip(rv.path, '/');
|
||||
}
|
||||
};
|
||||
})""" % {'converters': u', '.join(converters),
|
||||
'rules': rules}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def generate_map(map, name='url_map'):
|
||||
"""
|
||||
Generates a JavaScript function containing the rules defined in
|
||||
this map, to be used with a MapAdapter's generate_javascript
|
||||
method. If you don't pass a name the returned JavaScript code is
|
||||
an expression that returns a function. Otherwise it's a standalone
|
||||
script that assigns the function with that name. Dotted names are
|
||||
resolved (so you an use a name like 'obj.url_for')
|
||||
|
||||
In order to use JavaScript generation, simplejson must be installed.
|
||||
|
||||
Note that using this feature will expose the rules
|
||||
defined in your map to users. If your rules contain sensitive
|
||||
information, don't use JavaScript generation!
|
||||
"""
|
||||
from warnings import warn
|
||||
warn(DeprecationWarning('This module is deprecated'))
|
||||
map.update()
|
||||
rules = []
|
||||
converters = []
|
||||
for rule in map.iter_rules():
|
||||
trace = [{
|
||||
'is_dynamic': is_dynamic,
|
||||
'data': data
|
||||
} for is_dynamic, data in rule._trace]
|
||||
rule_converters = {}
|
||||
for key, converter in iteritems(rule._converters):
|
||||
js_func = js_to_url_function(converter)
|
||||
try:
|
||||
index = converters.index(js_func)
|
||||
except ValueError:
|
||||
converters.append(js_func)
|
||||
index = len(converters) - 1
|
||||
rule_converters[key] = index
|
||||
rules.append({
|
||||
u'endpoint': rule.endpoint,
|
||||
u'arguments': list(rule.arguments),
|
||||
u'converters': rule_converters,
|
||||
u'trace': trace,
|
||||
u'defaults': rule.defaults
|
||||
})
|
||||
|
||||
return render_template(name_parts=name and name.split('.') or [],
|
||||
rules=dumps(rules),
|
||||
converters=converters)
|
||||
|
||||
|
||||
def generate_adapter(adapter, name='url_for', map_name='url_map'):
|
||||
"""Generates the url building function for a map."""
|
||||
values = {
|
||||
u'server_name': dumps(adapter.server_name),
|
||||
u'script_name': dumps(adapter.script_name),
|
||||
u'subdomain': dumps(adapter.subdomain),
|
||||
u'url_scheme': dumps(adapter.url_scheme),
|
||||
u'name': name,
|
||||
u'map_name': map_name
|
||||
}
|
||||
return u'''\
|
||||
var %(name)s = %(map_name)s(
|
||||
%(server_name)s,
|
||||
%(script_name)s,
|
||||
%(subdomain)s,
|
||||
%(url_scheme)s
|
||||
);''' % values
|
||||
|
||||
|
||||
def js_to_url_function(converter):
|
||||
"""Get the JavaScript converter function from a rule."""
|
||||
if hasattr(converter, 'js_to_url_function'):
|
||||
data = converter.js_to_url_function()
|
||||
else:
|
||||
for cls in getmro(type(converter)):
|
||||
if cls in js_to_url_functions:
|
||||
data = js_to_url_functions[cls](converter)
|
||||
break
|
||||
else:
|
||||
return 'encodeURIComponent'
|
||||
return '(function(value) { %s })' % data
|
||||
|
||||
|
||||
def NumberConverter_js_to_url(conv):
|
||||
if conv.fixed_digits:
|
||||
return u'''\
|
||||
var result = value.toString();
|
||||
while (result.length < %s)
|
||||
result = '0' + result;
|
||||
return result;''' % conv.fixed_digits
|
||||
return u'return value.toString();'
|
||||
|
||||
|
||||
js_to_url_functions = {
|
||||
NumberConverter: NumberConverter_js_to_url
|
||||
}
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/jsrouting.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/jsrouting.pyc
Normal file
Binary file not shown.
41
venv/lib/python2.7/site-packages/werkzeug/contrib/limiter.py
Normal file
41
venv/lib/python2.7/site-packages/werkzeug/contrib/limiter.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib.limiter
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
A middleware that limits incoming data. This works around problems with
|
||||
Trac_ or Django_ because those directly stream into the memory.
|
||||
|
||||
.. _Trac: http://trac.edgewall.org/
|
||||
.. _Django: http://www.djangoproject.com/
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
from warnings import warn
|
||||
|
||||
from werkzeug.wsgi import LimitedStream
|
||||
|
||||
|
||||
class StreamLimitMiddleware(object):
|
||||
|
||||
"""Limits the input stream to a given number of bytes. This is useful if
|
||||
you have a WSGI application that reads form data into memory (django for
|
||||
example) and you don't want users to harm the server by uploading tons of
|
||||
data.
|
||||
|
||||
Default is 10MB
|
||||
|
||||
.. versionchanged:: 0.9
|
||||
Deprecated middleware.
|
||||
"""
|
||||
|
||||
def __init__(self, app, maximum_size=1024 * 1024 * 10):
|
||||
warn(DeprecationWarning('This middleware is deprecated'))
|
||||
self.app = app
|
||||
self.maximum_size = maximum_size
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
limit = min(self.maximum_size, int(environ.get('CONTENT_LENGTH') or 0))
|
||||
environ['wsgi.input'] = LimitedStream(environ['wsgi.input'], limit)
|
||||
return self.app(environ, start_response)
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/limiter.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/limiter.pyc
Normal file
Binary file not shown.
343
venv/lib/python2.7/site-packages/werkzeug/contrib/lint.py
Normal file
343
venv/lib/python2.7/site-packages/werkzeug/contrib/lint.py
Normal file
@@ -0,0 +1,343 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib.lint
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. versionadded:: 0.5
|
||||
|
||||
This module provides a middleware that performs sanity checks of the WSGI
|
||||
application. It checks that :pep:`333` is properly implemented and warns
|
||||
on some common HTTP errors such as non-empty responses for 304 status
|
||||
codes.
|
||||
|
||||
This module provides a middleware, the :class:`LintMiddleware`. Wrap your
|
||||
application with it and it will warn about common problems with WSGI and
|
||||
HTTP while your application is running.
|
||||
|
||||
It's strongly recommended to use it during development.
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
except ImportError:
|
||||
from urlparse import urlparse
|
||||
|
||||
from warnings import warn
|
||||
|
||||
from werkzeug.datastructures import Headers
|
||||
from werkzeug.http import is_entity_header
|
||||
from werkzeug.wsgi import FileWrapper
|
||||
from werkzeug._compat import string_types
|
||||
|
||||
|
||||
class WSGIWarning(Warning):
|
||||
|
||||
"""Warning class for WSGI warnings."""
|
||||
|
||||
|
||||
class HTTPWarning(Warning):
|
||||
|
||||
"""Warning class for HTTP warnings."""
|
||||
|
||||
|
||||
def check_string(context, obj, stacklevel=3):
|
||||
if type(obj) is not str:
|
||||
warn(WSGIWarning('%s requires bytestrings, got %s' %
|
||||
(context, obj.__class__.__name__)))
|
||||
|
||||
|
||||
class InputStream(object):
|
||||
|
||||
def __init__(self, stream):
|
||||
self._stream = stream
|
||||
|
||||
def read(self, *args):
|
||||
if len(args) == 0:
|
||||
warn(WSGIWarning('wsgi does not guarantee an EOF marker on the '
|
||||
'input stream, thus making calls to '
|
||||
'wsgi.input.read() unsafe. Conforming servers '
|
||||
'may never return from this call.'),
|
||||
stacklevel=2)
|
||||
elif len(args) != 1:
|
||||
warn(WSGIWarning('too many parameters passed to wsgi.input.read()'),
|
||||
stacklevel=2)
|
||||
return self._stream.read(*args)
|
||||
|
||||
def readline(self, *args):
|
||||
if len(args) == 0:
|
||||
warn(WSGIWarning('Calls to wsgi.input.readline() without arguments'
|
||||
' are unsafe. Use wsgi.input.read() instead.'),
|
||||
stacklevel=2)
|
||||
elif len(args) == 1:
|
||||
warn(WSGIWarning('wsgi.input.readline() was called with a size hint. '
|
||||
'WSGI does not support this, although it\'s available '
|
||||
'on all major servers.'),
|
||||
stacklevel=2)
|
||||
else:
|
||||
raise TypeError('too many arguments passed to wsgi.input.readline()')
|
||||
return self._stream.readline(*args)
|
||||
|
||||
def __iter__(self):
|
||||
try:
|
||||
return iter(self._stream)
|
||||
except TypeError:
|
||||
warn(WSGIWarning('wsgi.input is not iterable.'), stacklevel=2)
|
||||
return iter(())
|
||||
|
||||
def close(self):
|
||||
warn(WSGIWarning('application closed the input stream!'),
|
||||
stacklevel=2)
|
||||
self._stream.close()
|
||||
|
||||
|
||||
class ErrorStream(object):
|
||||
|
||||
def __init__(self, stream):
|
||||
self._stream = stream
|
||||
|
||||
def write(self, s):
|
||||
check_string('wsgi.error.write()', s)
|
||||
self._stream.write(s)
|
||||
|
||||
def flush(self):
|
||||
self._stream.flush()
|
||||
|
||||
def writelines(self, seq):
|
||||
for line in seq:
|
||||
self.write(seq)
|
||||
|
||||
def close(self):
|
||||
warn(WSGIWarning('application closed the error stream!'),
|
||||
stacklevel=2)
|
||||
self._stream.close()
|
||||
|
||||
|
||||
class GuardedWrite(object):
|
||||
|
||||
def __init__(self, write, chunks):
|
||||
self._write = write
|
||||
self._chunks = chunks
|
||||
|
||||
def __call__(self, s):
|
||||
check_string('write()', s)
|
||||
self._write.write(s)
|
||||
self._chunks.append(len(s))
|
||||
|
||||
|
||||
class GuardedIterator(object):
|
||||
|
||||
def __init__(self, iterator, headers_set, chunks):
|
||||
self._iterator = iterator
|
||||
self._next = iter(iterator).next
|
||||
self.closed = False
|
||||
self.headers_set = headers_set
|
||||
self.chunks = chunks
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
if self.closed:
|
||||
warn(WSGIWarning('iterated over closed app_iter'),
|
||||
stacklevel=2)
|
||||
rv = self._next()
|
||||
if not self.headers_set:
|
||||
warn(WSGIWarning('Application returned before it '
|
||||
'started the response'), stacklevel=2)
|
||||
check_string('application iterator items', rv)
|
||||
self.chunks.append(len(rv))
|
||||
return rv
|
||||
|
||||
def close(self):
|
||||
self.closed = True
|
||||
if hasattr(self._iterator, 'close'):
|
||||
self._iterator.close()
|
||||
|
||||
if self.headers_set:
|
||||
status_code, headers = self.headers_set
|
||||
bytes_sent = sum(self.chunks)
|
||||
content_length = headers.get('content-length', type=int)
|
||||
|
||||
if status_code == 304:
|
||||
for key, value in headers:
|
||||
key = key.lower()
|
||||
if key not in ('expires', 'content-location') and \
|
||||
is_entity_header(key):
|
||||
warn(HTTPWarning('entity header %r found in 304 '
|
||||
'response' % key))
|
||||
if bytes_sent:
|
||||
warn(HTTPWarning('304 responses must not have a body'))
|
||||
elif 100 <= status_code < 200 or status_code == 204:
|
||||
if content_length != 0:
|
||||
warn(HTTPWarning('%r responses must have an empty '
|
||||
'content length') % status_code)
|
||||
if bytes_sent:
|
||||
warn(HTTPWarning('%r responses must not have a body' %
|
||||
status_code))
|
||||
elif content_length is not None and content_length != bytes_sent:
|
||||
warn(WSGIWarning('Content-Length and the number of bytes '
|
||||
'sent to the client do not match.'))
|
||||
|
||||
def __del__(self):
|
||||
if not self.closed:
|
||||
try:
|
||||
warn(WSGIWarning('Iterator was garbage collected before '
|
||||
'it was closed.'))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
class LintMiddleware(object):
|
||||
|
||||
"""This middleware wraps an application and warns on common errors.
|
||||
Among other thing it currently checks for the following problems:
|
||||
|
||||
- invalid status codes
|
||||
- non-bytestrings sent to the WSGI server
|
||||
- strings returned from the WSGI application
|
||||
- non-empty conditional responses
|
||||
- unquoted etags
|
||||
- relative URLs in the Location header
|
||||
- unsafe calls to wsgi.input
|
||||
- unclosed iterators
|
||||
|
||||
Detected errors are emitted using the standard Python :mod:`warnings`
|
||||
system and usually end up on :data:`stderr`.
|
||||
|
||||
::
|
||||
|
||||
from werkzeug.contrib.lint import LintMiddleware
|
||||
app = LintMiddleware(app)
|
||||
|
||||
:param app: the application to wrap
|
||||
"""
|
||||
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def check_environ(self, environ):
|
||||
if type(environ) is not dict:
|
||||
warn(WSGIWarning('WSGI environment is not a standard python dict.'),
|
||||
stacklevel=4)
|
||||
for key in ('REQUEST_METHOD', 'SERVER_NAME', 'SERVER_PORT',
|
||||
'wsgi.version', 'wsgi.input', 'wsgi.errors',
|
||||
'wsgi.multithread', 'wsgi.multiprocess',
|
||||
'wsgi.run_once'):
|
||||
if key not in environ:
|
||||
warn(WSGIWarning('required environment key %r not found'
|
||||
% key), stacklevel=3)
|
||||
if environ['wsgi.version'] != (1, 0):
|
||||
warn(WSGIWarning('environ is not a WSGI 1.0 environ'),
|
||||
stacklevel=3)
|
||||
|
||||
script_name = environ.get('SCRIPT_NAME', '')
|
||||
if script_name and script_name[:1] != '/':
|
||||
warn(WSGIWarning('SCRIPT_NAME does not start with a slash: %r'
|
||||
% script_name), stacklevel=3)
|
||||
path_info = environ.get('PATH_INFO', '')
|
||||
if path_info[:1] != '/':
|
||||
warn(WSGIWarning('PATH_INFO does not start with a slash: %r'
|
||||
% path_info), stacklevel=3)
|
||||
|
||||
def check_start_response(self, status, headers, exc_info):
|
||||
check_string('status', status)
|
||||
status_code = status.split(None, 1)[0]
|
||||
if len(status_code) != 3 or not status_code.isdigit():
|
||||
warn(WSGIWarning('Status code must be three digits'), stacklevel=3)
|
||||
if len(status) < 4 or status[3] != ' ':
|
||||
warn(WSGIWarning('Invalid value for status %r. Valid '
|
||||
'status strings are three digits, a space '
|
||||
'and a status explanation'), stacklevel=3)
|
||||
status_code = int(status_code)
|
||||
if status_code < 100:
|
||||
warn(WSGIWarning('status code < 100 detected'), stacklevel=3)
|
||||
|
||||
if type(headers) is not list:
|
||||
warn(WSGIWarning('header list is not a list'), stacklevel=3)
|
||||
for item in headers:
|
||||
if type(item) is not tuple or len(item) != 2:
|
||||
warn(WSGIWarning('Headers must tuple 2-item tuples'),
|
||||
stacklevel=3)
|
||||
name, value = item
|
||||
if type(name) is not str or type(value) is not str:
|
||||
warn(WSGIWarning('header items must be strings'),
|
||||
stacklevel=3)
|
||||
if name.lower() == 'status':
|
||||
warn(WSGIWarning('The status header is not supported due to '
|
||||
'conflicts with the CGI spec.'),
|
||||
stacklevel=3)
|
||||
|
||||
if exc_info is not None and not isinstance(exc_info, tuple):
|
||||
warn(WSGIWarning('invalid value for exc_info'), stacklevel=3)
|
||||
|
||||
headers = Headers(headers)
|
||||
self.check_headers(headers)
|
||||
|
||||
return status_code, headers
|
||||
|
||||
def check_headers(self, headers):
|
||||
etag = headers.get('etag')
|
||||
if etag is not None:
|
||||
if etag.startswith(('W/', 'w/')):
|
||||
if etag.startswith('w/'):
|
||||
warn(HTTPWarning('weak etag indicator should be upcase.'),
|
||||
stacklevel=4)
|
||||
etag = etag[2:]
|
||||
if not (etag[:1] == etag[-1:] == '"'):
|
||||
warn(HTTPWarning('unquoted etag emitted.'), stacklevel=4)
|
||||
|
||||
location = headers.get('location')
|
||||
if location is not None:
|
||||
if not urlparse(location).netloc:
|
||||
warn(HTTPWarning('absolute URLs required for location header'),
|
||||
stacklevel=4)
|
||||
|
||||
def check_iterator(self, app_iter):
|
||||
if isinstance(app_iter, string_types):
|
||||
warn(WSGIWarning('application returned string. Response will '
|
||||
'send character for character to the client '
|
||||
'which will kill the performance. Return a '
|
||||
'list or iterable instead.'), stacklevel=3)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
if len(args) != 2:
|
||||
warn(WSGIWarning('Two arguments to WSGI app required'), stacklevel=2)
|
||||
if kwargs:
|
||||
warn(WSGIWarning('No keyword arguments to WSGI app allowed'),
|
||||
stacklevel=2)
|
||||
environ, start_response = args
|
||||
|
||||
self.check_environ(environ)
|
||||
environ['wsgi.input'] = InputStream(environ['wsgi.input'])
|
||||
environ['wsgi.errors'] = ErrorStream(environ['wsgi.errors'])
|
||||
|
||||
# hook our own file wrapper in so that applications will always
|
||||
# iterate to the end and we can check the content length
|
||||
environ['wsgi.file_wrapper'] = FileWrapper
|
||||
|
||||
headers_set = []
|
||||
chunks = []
|
||||
|
||||
def checking_start_response(*args, **kwargs):
|
||||
if len(args) not in (2, 3):
|
||||
warn(WSGIWarning('Invalid number of arguments: %s, expected '
|
||||
'2 or 3' % len(args), stacklevel=2))
|
||||
if kwargs:
|
||||
warn(WSGIWarning('no keyword arguments allowed.'))
|
||||
|
||||
status, headers = args[:2]
|
||||
if len(args) == 3:
|
||||
exc_info = args[2]
|
||||
else:
|
||||
exc_info = None
|
||||
|
||||
headers_set[:] = self.check_start_response(status, headers,
|
||||
exc_info)
|
||||
return GuardedWrite(start_response(status, headers, exc_info),
|
||||
chunks)
|
||||
|
||||
app_iter = self.app(environ, checking_start_response)
|
||||
self.check_iterator(app_iter)
|
||||
return GuardedIterator(app_iter, headers_set, chunks)
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/lint.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/lint.pyc
Normal file
Binary file not shown.
147
venv/lib/python2.7/site-packages/werkzeug/contrib/profiler.py
Normal file
147
venv/lib/python2.7/site-packages/werkzeug/contrib/profiler.py
Normal file
@@ -0,0 +1,147 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib.profiler
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module provides a simple WSGI profiler middleware for finding
|
||||
bottlenecks in web application. It uses the :mod:`profile` or
|
||||
:mod:`cProfile` module to do the profiling and writes the stats to the
|
||||
stream provided (defaults to stderr).
|
||||
|
||||
Example usage::
|
||||
|
||||
from werkzeug.contrib.profiler import ProfilerMiddleware
|
||||
app = ProfilerMiddleware(app)
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
import sys
|
||||
import time
|
||||
import os.path
|
||||
try:
|
||||
try:
|
||||
from cProfile import Profile
|
||||
except ImportError:
|
||||
from profile import Profile
|
||||
from pstats import Stats
|
||||
available = True
|
||||
except ImportError:
|
||||
available = False
|
||||
|
||||
|
||||
class MergeStream(object):
|
||||
|
||||
"""An object that redirects `write` calls to multiple streams.
|
||||
Use this to log to both `sys.stdout` and a file::
|
||||
|
||||
f = open('profiler.log', 'w')
|
||||
stream = MergeStream(sys.stdout, f)
|
||||
profiler = ProfilerMiddleware(app, stream)
|
||||
"""
|
||||
|
||||
def __init__(self, *streams):
|
||||
if not streams:
|
||||
raise TypeError('at least one stream must be given')
|
||||
self.streams = streams
|
||||
|
||||
def write(self, data):
|
||||
for stream in self.streams:
|
||||
stream.write(data)
|
||||
|
||||
|
||||
class ProfilerMiddleware(object):
|
||||
|
||||
"""Simple profiler middleware. Wraps a WSGI application and profiles
|
||||
a request. This intentionally buffers the response so that timings are
|
||||
more exact.
|
||||
|
||||
By giving the `profile_dir` argument, pstat.Stats files are saved to that
|
||||
directory, one file per request. Without it, a summary is printed to
|
||||
`stream` instead.
|
||||
|
||||
For the exact meaning of `sort_by` and `restrictions` consult the
|
||||
:mod:`profile` documentation.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
Added support for `restrictions` and `profile_dir`.
|
||||
|
||||
:param app: the WSGI application to profile.
|
||||
:param stream: the stream for the profiled stats. defaults to stderr.
|
||||
:param sort_by: a tuple of columns to sort the result by.
|
||||
:param restrictions: a tuple of profiling strictions, not used if dumping
|
||||
to `profile_dir`.
|
||||
:param profile_dir: directory name to save pstat files
|
||||
"""
|
||||
|
||||
def __init__(self, app, stream=None,
|
||||
sort_by=('time', 'calls'), restrictions=(), profile_dir=None):
|
||||
if not available:
|
||||
raise RuntimeError('the profiler is not available because '
|
||||
'profile or pstat is not installed.')
|
||||
self._app = app
|
||||
self._stream = stream or sys.stdout
|
||||
self._sort_by = sort_by
|
||||
self._restrictions = restrictions
|
||||
self._profile_dir = profile_dir
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
response_body = []
|
||||
|
||||
def catching_start_response(status, headers, exc_info=None):
|
||||
start_response(status, headers, exc_info)
|
||||
return response_body.append
|
||||
|
||||
def runapp():
|
||||
appiter = self._app(environ, catching_start_response)
|
||||
response_body.extend(appiter)
|
||||
if hasattr(appiter, 'close'):
|
||||
appiter.close()
|
||||
|
||||
p = Profile()
|
||||
start = time.time()
|
||||
p.runcall(runapp)
|
||||
body = b''.join(response_body)
|
||||
elapsed = time.time() - start
|
||||
|
||||
if self._profile_dir is not None:
|
||||
prof_filename = os.path.join(self._profile_dir,
|
||||
'%s.%s.%06dms.%d.prof' % (
|
||||
environ['REQUEST_METHOD'],
|
||||
environ.get('PATH_INFO').strip(
|
||||
'/').replace('/', '.') or 'root',
|
||||
elapsed * 1000.0,
|
||||
time.time()
|
||||
))
|
||||
p.dump_stats(prof_filename)
|
||||
|
||||
else:
|
||||
stats = Stats(p, stream=self._stream)
|
||||
stats.sort_stats(*self._sort_by)
|
||||
|
||||
self._stream.write('-' * 80)
|
||||
self._stream.write('\nPATH: %r\n' % environ.get('PATH_INFO'))
|
||||
stats.print_stats(*self._restrictions)
|
||||
self._stream.write('-' * 80 + '\n\n')
|
||||
|
||||
return [body]
|
||||
|
||||
|
||||
def make_action(app_factory, hostname='localhost', port=5000,
|
||||
threaded=False, processes=1, stream=None,
|
||||
sort_by=('time', 'calls'), restrictions=()):
|
||||
"""Return a new callback for :mod:`werkzeug.script` that starts a local
|
||||
server with the profiler enabled.
|
||||
|
||||
::
|
||||
|
||||
from werkzeug.contrib import profiler
|
||||
action_profile = profiler.make_action(make_app)
|
||||
"""
|
||||
def action(hostname=('h', hostname), port=('p', port),
|
||||
threaded=threaded, processes=processes):
|
||||
"""Start a new development server."""
|
||||
from werkzeug.serving import run_simple
|
||||
app = ProfilerMiddleware(app_factory(), stream, sort_by, restrictions)
|
||||
run_simple(hostname, port, app, False, None, threaded, processes)
|
||||
return action
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/profiler.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/profiler.pyc
Normal file
Binary file not shown.
@@ -0,0 +1,323 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
r"""
|
||||
werkzeug.contrib.securecookie
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module implements a cookie that is not alterable from the client
|
||||
because it adds a checksum the server checks for. You can use it as
|
||||
session replacement if all you have is a user id or something to mark
|
||||
a logged in user.
|
||||
|
||||
Keep in mind that the data is still readable from the client as a
|
||||
normal cookie is. However you don't have to store and flush the
|
||||
sessions you have at the server.
|
||||
|
||||
Example usage:
|
||||
|
||||
>>> from werkzeug.contrib.securecookie import SecureCookie
|
||||
>>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef")
|
||||
|
||||
Dumping into a string so that one can store it in a cookie:
|
||||
|
||||
>>> value = x.serialize()
|
||||
|
||||
Loading from that string again:
|
||||
|
||||
>>> x = SecureCookie.unserialize(value, "deadbeef")
|
||||
>>> x["baz"]
|
||||
(1, 2, 3)
|
||||
|
||||
If someone modifies the cookie and the checksum is wrong the unserialize
|
||||
method will fail silently and return a new empty `SecureCookie` object.
|
||||
|
||||
Keep in mind that the values will be visible in the cookie so do not
|
||||
store data in a cookie you don't want the user to see.
|
||||
|
||||
Application Integration
|
||||
=======================
|
||||
|
||||
If you are using the werkzeug request objects you could integrate the
|
||||
secure cookie into your application like this::
|
||||
|
||||
from werkzeug.utils import cached_property
|
||||
from werkzeug.wrappers import BaseRequest
|
||||
from werkzeug.contrib.securecookie import SecureCookie
|
||||
|
||||
# don't use this key but a different one; you could just use
|
||||
# os.urandom(20) to get something random
|
||||
SECRET_KEY = '\xfa\xdd\xb8z\xae\xe0}4\x8b\xea'
|
||||
|
||||
class Request(BaseRequest):
|
||||
|
||||
@cached_property
|
||||
def client_session(self):
|
||||
data = self.cookies.get('session_data')
|
||||
if not data:
|
||||
return SecureCookie(secret_key=SECRET_KEY)
|
||||
return SecureCookie.unserialize(data, SECRET_KEY)
|
||||
|
||||
def application(environ, start_response):
|
||||
request = Request(environ)
|
||||
|
||||
# get a response object here
|
||||
response = ...
|
||||
|
||||
if request.client_session.should_save:
|
||||
session_data = request.client_session.serialize()
|
||||
response.set_cookie('session_data', session_data,
|
||||
httponly=True)
|
||||
return response(environ, start_response)
|
||||
|
||||
A less verbose integration can be achieved by using shorthand methods::
|
||||
|
||||
class Request(BaseRequest):
|
||||
|
||||
@cached_property
|
||||
def client_session(self):
|
||||
return SecureCookie.load_cookie(self, secret_key=COOKIE_SECRET)
|
||||
|
||||
def application(environ, start_response):
|
||||
request = Request(environ)
|
||||
|
||||
# get a response object here
|
||||
response = ...
|
||||
|
||||
request.client_session.save_cookie(response)
|
||||
return response(environ, start_response)
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
import pickle
|
||||
import base64
|
||||
from hmac import new as hmac
|
||||
from time import time
|
||||
from hashlib import sha1 as _default_hash
|
||||
|
||||
from werkzeug._compat import iteritems, text_type
|
||||
from werkzeug.urls import url_quote_plus, url_unquote_plus
|
||||
from werkzeug._internal import _date_to_unix
|
||||
from werkzeug.contrib.sessions import ModificationTrackingDict
|
||||
from werkzeug.security import safe_str_cmp
|
||||
from werkzeug._compat import to_native
|
||||
|
||||
|
||||
class UnquoteError(Exception):
|
||||
|
||||
"""Internal exception used to signal failures on quoting."""
|
||||
|
||||
|
||||
class SecureCookie(ModificationTrackingDict):
|
||||
|
||||
"""Represents a secure cookie. You can subclass this class and provide
|
||||
an alternative mac method. The import thing is that the mac method
|
||||
is a function with a similar interface to the hashlib. Required
|
||||
methods are update() and digest().
|
||||
|
||||
Example usage:
|
||||
|
||||
>>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef")
|
||||
>>> x["foo"]
|
||||
42
|
||||
>>> x["baz"]
|
||||
(1, 2, 3)
|
||||
>>> x["blafasel"] = 23
|
||||
>>> x.should_save
|
||||
True
|
||||
|
||||
:param data: the initial data. Either a dict, list of tuples or `None`.
|
||||
:param secret_key: the secret key. If not set `None` or not specified
|
||||
it has to be set before :meth:`serialize` is called.
|
||||
:param new: The initial value of the `new` flag.
|
||||
"""
|
||||
|
||||
#: The hash method to use. This has to be a module with a new function
|
||||
#: or a function that creates a hashlib object. Such as `hashlib.md5`
|
||||
#: Subclasses can override this attribute. The default hash is sha1.
|
||||
#: Make sure to wrap this in staticmethod() if you store an arbitrary
|
||||
#: function there such as hashlib.sha1 which might be implemented
|
||||
#: as a function.
|
||||
hash_method = staticmethod(_default_hash)
|
||||
|
||||
#: the module used for serialization. Unless overriden by subclasses
|
||||
#: the standard pickle module is used.
|
||||
serialization_method = pickle
|
||||
|
||||
#: if the contents should be base64 quoted. This can be disabled if the
|
||||
#: serialization process returns cookie safe strings only.
|
||||
quote_base64 = True
|
||||
|
||||
def __init__(self, data=None, secret_key=None, new=True):
|
||||
ModificationTrackingDict.__init__(self, data or ())
|
||||
# explicitly convert it into a bytestring because python 2.6
|
||||
# no longer performs an implicit string conversion on hmac
|
||||
if secret_key is not None:
|
||||
secret_key = bytes(secret_key)
|
||||
self.secret_key = secret_key
|
||||
self.new = new
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s %s%s>' % (
|
||||
self.__class__.__name__,
|
||||
dict.__repr__(self),
|
||||
self.should_save and '*' or ''
|
||||
)
|
||||
|
||||
@property
|
||||
def should_save(self):
|
||||
"""True if the session should be saved. By default this is only true
|
||||
for :attr:`modified` cookies, not :attr:`new`.
|
||||
"""
|
||||
return self.modified
|
||||
|
||||
@classmethod
|
||||
def quote(cls, value):
|
||||
"""Quote the value for the cookie. This can be any object supported
|
||||
by :attr:`serialization_method`.
|
||||
|
||||
:param value: the value to quote.
|
||||
"""
|
||||
if cls.serialization_method is not None:
|
||||
value = cls.serialization_method.dumps(value)
|
||||
if cls.quote_base64:
|
||||
value = b''.join(base64.b64encode(value).splitlines()).strip()
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def unquote(cls, value):
|
||||
"""Unquote the value for the cookie. If unquoting does not work a
|
||||
:exc:`UnquoteError` is raised.
|
||||
|
||||
:param value: the value to unquote.
|
||||
"""
|
||||
try:
|
||||
if cls.quote_base64:
|
||||
value = base64.b64decode(value)
|
||||
if cls.serialization_method is not None:
|
||||
value = cls.serialization_method.loads(value)
|
||||
return value
|
||||
except Exception:
|
||||
# unfortunately pickle and other serialization modules can
|
||||
# cause pretty every error here. if we get one we catch it
|
||||
# and convert it into an UnquoteError
|
||||
raise UnquoteError()
|
||||
|
||||
def serialize(self, expires=None):
|
||||
"""Serialize the secure cookie into a string.
|
||||
|
||||
If expires is provided, the session will be automatically invalidated
|
||||
after expiration when you unseralize it. This provides better
|
||||
protection against session cookie theft.
|
||||
|
||||
:param expires: an optional expiration date for the cookie (a
|
||||
:class:`datetime.datetime` object)
|
||||
"""
|
||||
if self.secret_key is None:
|
||||
raise RuntimeError('no secret key defined')
|
||||
if expires:
|
||||
self['_expires'] = _date_to_unix(expires)
|
||||
result = []
|
||||
mac = hmac(self.secret_key, None, self.hash_method)
|
||||
for key, value in sorted(self.items()):
|
||||
result.append(('%s=%s' % (
|
||||
url_quote_plus(key),
|
||||
self.quote(value).decode('ascii')
|
||||
)).encode('ascii'))
|
||||
mac.update(b'|' + result[-1])
|
||||
return b'?'.join([
|
||||
base64.b64encode(mac.digest()).strip(),
|
||||
b'&'.join(result)
|
||||
])
|
||||
|
||||
@classmethod
|
||||
def unserialize(cls, string, secret_key):
|
||||
"""Load the secure cookie from a serialized string.
|
||||
|
||||
:param string: the cookie value to unserialize.
|
||||
:param secret_key: the secret key used to serialize the cookie.
|
||||
:return: a new :class:`SecureCookie`.
|
||||
"""
|
||||
if isinstance(string, text_type):
|
||||
string = string.encode('utf-8', 'replace')
|
||||
if isinstance(secret_key, text_type):
|
||||
secret_key = secret_key.encode('utf-8', 'replace')
|
||||
try:
|
||||
base64_hash, data = string.split(b'?', 1)
|
||||
except (ValueError, IndexError):
|
||||
items = ()
|
||||
else:
|
||||
items = {}
|
||||
mac = hmac(secret_key, None, cls.hash_method)
|
||||
for item in data.split(b'&'):
|
||||
mac.update(b'|' + item)
|
||||
if b'=' not in item:
|
||||
items = None
|
||||
break
|
||||
key, value = item.split(b'=', 1)
|
||||
# try to make the key a string
|
||||
key = url_unquote_plus(key.decode('ascii'))
|
||||
try:
|
||||
key = to_native(key)
|
||||
except UnicodeError:
|
||||
pass
|
||||
items[key] = value
|
||||
|
||||
# no parsing error and the mac looks okay, we can now
|
||||
# sercurely unpickle our cookie.
|
||||
try:
|
||||
client_hash = base64.b64decode(base64_hash)
|
||||
except TypeError:
|
||||
items = client_hash = None
|
||||
if items is not None and safe_str_cmp(client_hash, mac.digest()):
|
||||
try:
|
||||
for key, value in iteritems(items):
|
||||
items[key] = cls.unquote(value)
|
||||
except UnquoteError:
|
||||
items = ()
|
||||
else:
|
||||
if '_expires' in items:
|
||||
if time() > items['_expires']:
|
||||
items = ()
|
||||
else:
|
||||
del items['_expires']
|
||||
else:
|
||||
items = ()
|
||||
return cls(items, secret_key, False)
|
||||
|
||||
@classmethod
|
||||
def load_cookie(cls, request, key='session', secret_key=None):
|
||||
"""Loads a :class:`SecureCookie` from a cookie in request. If the
|
||||
cookie is not set, a new :class:`SecureCookie` instanced is
|
||||
returned.
|
||||
|
||||
:param request: a request object that has a `cookies` attribute
|
||||
which is a dict of all cookie values.
|
||||
:param key: the name of the cookie.
|
||||
:param secret_key: the secret key used to unquote the cookie.
|
||||
Always provide the value even though it has
|
||||
no default!
|
||||
"""
|
||||
data = request.cookies.get(key)
|
||||
if not data:
|
||||
return cls(secret_key=secret_key)
|
||||
return cls.unserialize(data, secret_key)
|
||||
|
||||
def save_cookie(self, response, key='session', expires=None,
|
||||
session_expires=None, max_age=None, path='/', domain=None,
|
||||
secure=None, httponly=False, force=False):
|
||||
"""Saves the SecureCookie in a cookie on response object. All
|
||||
parameters that are not described here are forwarded directly
|
||||
to :meth:`~BaseResponse.set_cookie`.
|
||||
|
||||
:param response: a response object that has a
|
||||
:meth:`~BaseResponse.set_cookie` method.
|
||||
:param key: the name of the cookie.
|
||||
:param session_expires: the expiration date of the secure cookie
|
||||
stored information. If this is not provided
|
||||
the cookie `expires` date is used instead.
|
||||
"""
|
||||
if force or self.should_save:
|
||||
data = self.serialize(session_expires or expires)
|
||||
response.set_cookie(key, data, expires=expires, max_age=max_age,
|
||||
path=path, domain=domain, secure=secure,
|
||||
httponly=httponly)
|
||||
Binary file not shown.
352
venv/lib/python2.7/site-packages/werkzeug/contrib/sessions.py
Normal file
352
venv/lib/python2.7/site-packages/werkzeug/contrib/sessions.py
Normal file
@@ -0,0 +1,352 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
r"""
|
||||
werkzeug.contrib.sessions
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains some helper classes that help one to add session
|
||||
support to a python WSGI application. For full client-side session
|
||||
storage see :mod:`~werkzeug.contrib.securecookie` which implements a
|
||||
secure, client-side session storage.
|
||||
|
||||
|
||||
Application Integration
|
||||
=======================
|
||||
|
||||
::
|
||||
|
||||
from werkzeug.contrib.sessions import SessionMiddleware, \
|
||||
FilesystemSessionStore
|
||||
|
||||
app = SessionMiddleware(app, FilesystemSessionStore())
|
||||
|
||||
The current session will then appear in the WSGI environment as
|
||||
`werkzeug.session`. However it's recommended to not use the middleware
|
||||
but the stores directly in the application. However for very simple
|
||||
scripts a middleware for sessions could be sufficient.
|
||||
|
||||
This module does not implement methods or ways to check if a session is
|
||||
expired. That should be done by a cronjob and storage specific. For
|
||||
example to prune unused filesystem sessions one could check the modified
|
||||
time of the files. If sessions are stored in the database the new()
|
||||
method should add an expiration timestamp for the session.
|
||||
|
||||
For better flexibility it's recommended to not use the middleware but the
|
||||
store and session object directly in the application dispatching::
|
||||
|
||||
session_store = FilesystemSessionStore()
|
||||
|
||||
def application(environ, start_response):
|
||||
request = Request(environ)
|
||||
sid = request.cookies.get('cookie_name')
|
||||
if sid is None:
|
||||
request.session = session_store.new()
|
||||
else:
|
||||
request.session = session_store.get(sid)
|
||||
response = get_the_response_object(request)
|
||||
if request.session.should_save:
|
||||
session_store.save(request.session)
|
||||
response.set_cookie('cookie_name', request.session.sid)
|
||||
return response(environ, start_response)
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
import re
|
||||
import os
|
||||
import tempfile
|
||||
from os import path
|
||||
from time import time
|
||||
from random import random
|
||||
from hashlib import sha1
|
||||
from pickle import dump, load, HIGHEST_PROTOCOL
|
||||
|
||||
from werkzeug.datastructures import CallbackDict
|
||||
from werkzeug.utils import dump_cookie, parse_cookie
|
||||
from werkzeug.wsgi import ClosingIterator
|
||||
from werkzeug.posixemulation import rename
|
||||
from werkzeug._compat import PY2, text_type
|
||||
from werkzeug.filesystem import get_filesystem_encoding
|
||||
|
||||
|
||||
_sha1_re = re.compile(r'^[a-f0-9]{40}$')
|
||||
|
||||
|
||||
def _urandom():
|
||||
if hasattr(os, 'urandom'):
|
||||
return os.urandom(30)
|
||||
return text_type(random()).encode('ascii')
|
||||
|
||||
|
||||
def generate_key(salt=None):
|
||||
if salt is None:
|
||||
salt = repr(salt).encode('ascii')
|
||||
return sha1(b''.join([
|
||||
salt,
|
||||
str(time()).encode('ascii'),
|
||||
_urandom()
|
||||
])).hexdigest()
|
||||
|
||||
|
||||
class ModificationTrackingDict(CallbackDict):
|
||||
__slots__ = ('modified',)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def on_update(self):
|
||||
self.modified = True
|
||||
self.modified = False
|
||||
CallbackDict.__init__(self, on_update=on_update)
|
||||
dict.update(self, *args, **kwargs)
|
||||
|
||||
def copy(self):
|
||||
"""Create a flat copy of the dict."""
|
||||
missing = object()
|
||||
result = object.__new__(self.__class__)
|
||||
for name in self.__slots__:
|
||||
val = getattr(self, name, missing)
|
||||
if val is not missing:
|
||||
setattr(result, name, val)
|
||||
return result
|
||||
|
||||
def __copy__(self):
|
||||
return self.copy()
|
||||
|
||||
|
||||
class Session(ModificationTrackingDict):
|
||||
|
||||
"""Subclass of a dict that keeps track of direct object changes. Changes
|
||||
in mutable structures are not tracked, for those you have to set
|
||||
`modified` to `True` by hand.
|
||||
"""
|
||||
__slots__ = ModificationTrackingDict.__slots__ + ('sid', 'new')
|
||||
|
||||
def __init__(self, data, sid, new=False):
|
||||
ModificationTrackingDict.__init__(self, data)
|
||||
self.sid = sid
|
||||
self.new = new
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s %s%s>' % (
|
||||
self.__class__.__name__,
|
||||
dict.__repr__(self),
|
||||
self.should_save and '*' or ''
|
||||
)
|
||||
|
||||
@property
|
||||
def should_save(self):
|
||||
"""True if the session should be saved.
|
||||
|
||||
.. versionchanged:: 0.6
|
||||
By default the session is now only saved if the session is
|
||||
modified, not if it is new like it was before.
|
||||
"""
|
||||
return self.modified
|
||||
|
||||
|
||||
class SessionStore(object):
|
||||
|
||||
"""Baseclass for all session stores. The Werkzeug contrib module does not
|
||||
implement any useful stores besides the filesystem store, application
|
||||
developers are encouraged to create their own stores.
|
||||
|
||||
:param session_class: The session class to use. Defaults to
|
||||
:class:`Session`.
|
||||
"""
|
||||
|
||||
def __init__(self, session_class=None):
|
||||
if session_class is None:
|
||||
session_class = Session
|
||||
self.session_class = session_class
|
||||
|
||||
def is_valid_key(self, key):
|
||||
"""Check if a key has the correct format."""
|
||||
return _sha1_re.match(key) is not None
|
||||
|
||||
def generate_key(self, salt=None):
|
||||
"""Simple function that generates a new session key."""
|
||||
return generate_key(salt)
|
||||
|
||||
def new(self):
|
||||
"""Generate a new session."""
|
||||
return self.session_class({}, self.generate_key(), True)
|
||||
|
||||
def save(self, session):
|
||||
"""Save a session."""
|
||||
|
||||
def save_if_modified(self, session):
|
||||
"""Save if a session class wants an update."""
|
||||
if session.should_save:
|
||||
self.save(session)
|
||||
|
||||
def delete(self, session):
|
||||
"""Delete a session."""
|
||||
|
||||
def get(self, sid):
|
||||
"""Get a session for this sid or a new session object. This method
|
||||
has to check if the session key is valid and create a new session if
|
||||
that wasn't the case.
|
||||
"""
|
||||
return self.session_class({}, sid, True)
|
||||
|
||||
|
||||
#: used for temporary files by the filesystem session store
|
||||
_fs_transaction_suffix = '.__wz_sess'
|
||||
|
||||
|
||||
class FilesystemSessionStore(SessionStore):
|
||||
|
||||
"""Simple example session store that saves sessions on the filesystem.
|
||||
This store works best on POSIX systems and Windows Vista / Windows
|
||||
Server 2008 and newer.
|
||||
|
||||
.. versionchanged:: 0.6
|
||||
`renew_missing` was added. Previously this was considered `True`,
|
||||
now the default changed to `False` and it can be explicitly
|
||||
deactivated.
|
||||
|
||||
:param path: the path to the folder used for storing the sessions.
|
||||
If not provided the default temporary directory is used.
|
||||
:param filename_template: a string template used to give the session
|
||||
a filename. ``%s`` is replaced with the
|
||||
session id.
|
||||
:param session_class: The session class to use. Defaults to
|
||||
:class:`Session`.
|
||||
:param renew_missing: set to `True` if you want the store to
|
||||
give the user a new sid if the session was
|
||||
not yet saved.
|
||||
"""
|
||||
|
||||
def __init__(self, path=None, filename_template='werkzeug_%s.sess',
|
||||
session_class=None, renew_missing=False, mode=0o644):
|
||||
SessionStore.__init__(self, session_class)
|
||||
if path is None:
|
||||
path = tempfile.gettempdir()
|
||||
self.path = path
|
||||
if isinstance(filename_template, text_type) and PY2:
|
||||
filename_template = filename_template.encode(
|
||||
get_filesystem_encoding())
|
||||
assert not filename_template.endswith(_fs_transaction_suffix), \
|
||||
'filename templates may not end with %s' % _fs_transaction_suffix
|
||||
self.filename_template = filename_template
|
||||
self.renew_missing = renew_missing
|
||||
self.mode = mode
|
||||
|
||||
def get_session_filename(self, sid):
|
||||
# out of the box, this should be a strict ASCII subset but
|
||||
# you might reconfigure the session object to have a more
|
||||
# arbitrary string.
|
||||
if isinstance(sid, text_type) and PY2:
|
||||
sid = sid.encode(get_filesystem_encoding())
|
||||
return path.join(self.path, self.filename_template % sid)
|
||||
|
||||
def save(self, session):
|
||||
fn = self.get_session_filename(session.sid)
|
||||
fd, tmp = tempfile.mkstemp(suffix=_fs_transaction_suffix,
|
||||
dir=self.path)
|
||||
f = os.fdopen(fd, 'wb')
|
||||
try:
|
||||
dump(dict(session), f, HIGHEST_PROTOCOL)
|
||||
finally:
|
||||
f.close()
|
||||
try:
|
||||
rename(tmp, fn)
|
||||
os.chmod(fn, self.mode)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
def delete(self, session):
|
||||
fn = self.get_session_filename(session.sid)
|
||||
try:
|
||||
os.unlink(fn)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def get(self, sid):
|
||||
if not self.is_valid_key(sid):
|
||||
return self.new()
|
||||
try:
|
||||
f = open(self.get_session_filename(sid), 'rb')
|
||||
except IOError:
|
||||
if self.renew_missing:
|
||||
return self.new()
|
||||
data = {}
|
||||
else:
|
||||
try:
|
||||
try:
|
||||
data = load(f)
|
||||
except Exception:
|
||||
data = {}
|
||||
finally:
|
||||
f.close()
|
||||
return self.session_class(data, sid, False)
|
||||
|
||||
def list(self):
|
||||
"""Lists all sessions in the store.
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
before, after = self.filename_template.split('%s', 1)
|
||||
filename_re = re.compile(r'%s(.{5,})%s$' % (re.escape(before),
|
||||
re.escape(after)))
|
||||
result = []
|
||||
for filename in os.listdir(self.path):
|
||||
#: this is a session that is still being saved.
|
||||
if filename.endswith(_fs_transaction_suffix):
|
||||
continue
|
||||
match = filename_re.match(filename)
|
||||
if match is not None:
|
||||
result.append(match.group(1))
|
||||
return result
|
||||
|
||||
|
||||
class SessionMiddleware(object):
|
||||
|
||||
"""A simple middleware that puts the session object of a store provided
|
||||
into the WSGI environ. It automatically sets cookies and restores
|
||||
sessions.
|
||||
|
||||
However a middleware is not the preferred solution because it won't be as
|
||||
fast as sessions managed by the application itself and will put a key into
|
||||
the WSGI environment only relevant for the application which is against
|
||||
the concept of WSGI.
|
||||
|
||||
The cookie parameters are the same as for the :func:`~dump_cookie`
|
||||
function just prefixed with ``cookie_``. Additionally `max_age` is
|
||||
called `cookie_age` and not `cookie_max_age` because of backwards
|
||||
compatibility.
|
||||
"""
|
||||
|
||||
def __init__(self, app, store, cookie_name='session_id',
|
||||
cookie_age=None, cookie_expires=None, cookie_path='/',
|
||||
cookie_domain=None, cookie_secure=None,
|
||||
cookie_httponly=False, environ_key='werkzeug.session'):
|
||||
self.app = app
|
||||
self.store = store
|
||||
self.cookie_name = cookie_name
|
||||
self.cookie_age = cookie_age
|
||||
self.cookie_expires = cookie_expires
|
||||
self.cookie_path = cookie_path
|
||||
self.cookie_domain = cookie_domain
|
||||
self.cookie_secure = cookie_secure
|
||||
self.cookie_httponly = cookie_httponly
|
||||
self.environ_key = environ_key
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
cookie = parse_cookie(environ.get('HTTP_COOKIE', ''))
|
||||
sid = cookie.get(self.cookie_name, None)
|
||||
if sid is None:
|
||||
session = self.store.new()
|
||||
else:
|
||||
session = self.store.get(sid)
|
||||
environ[self.environ_key] = session
|
||||
|
||||
def injecting_start_response(status, headers, exc_info=None):
|
||||
if session.should_save:
|
||||
self.store.save(session)
|
||||
headers.append(('Set-Cookie', dump_cookie(self.cookie_name,
|
||||
session.sid, self.cookie_age,
|
||||
self.cookie_expires, self.cookie_path,
|
||||
self.cookie_domain, self.cookie_secure,
|
||||
self.cookie_httponly)))
|
||||
return start_response(status, headers, exc_info)
|
||||
return ClosingIterator(self.app(environ, injecting_start_response),
|
||||
lambda: self.store.save_if_modified(session))
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/sessions.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/sessions.pyc
Normal file
Binary file not shown.
@@ -0,0 +1,73 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib.testtools
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module implements extended wrappers for simplified testing.
|
||||
|
||||
`TestResponse`
|
||||
A response wrapper which adds various cached attributes for
|
||||
simplified assertions on various content types.
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
from werkzeug.utils import cached_property, import_string
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from warnings import warn
|
||||
warn(DeprecationWarning('werkzeug.contrib.testtools is deprecated and '
|
||||
'will be removed with Werkzeug 1.0'))
|
||||
|
||||
|
||||
class ContentAccessors(object):
|
||||
|
||||
"""
|
||||
A mixin class for response objects that provides a couple of useful
|
||||
accessors for unittesting.
|
||||
"""
|
||||
|
||||
def xml(self):
|
||||
"""Get an etree if possible."""
|
||||
if 'xml' not in self.mimetype:
|
||||
raise AttributeError(
|
||||
'Not a XML response (Content-Type: %s)'
|
||||
% self.mimetype)
|
||||
for module in ['xml.etree.ElementTree', 'ElementTree',
|
||||
'elementtree.ElementTree']:
|
||||
etree = import_string(module, silent=True)
|
||||
if etree is not None:
|
||||
return etree.XML(self.body)
|
||||
raise RuntimeError('You must have ElementTree installed '
|
||||
'to use TestResponse.xml')
|
||||
xml = cached_property(xml)
|
||||
|
||||
def lxml(self):
|
||||
"""Get an lxml etree if possible."""
|
||||
if ('html' not in self.mimetype and 'xml' not in self.mimetype):
|
||||
raise AttributeError('Not an HTML/XML response')
|
||||
from lxml import etree
|
||||
try:
|
||||
from lxml.html import fromstring
|
||||
except ImportError:
|
||||
fromstring = etree.HTML
|
||||
if self.mimetype == 'text/html':
|
||||
return fromstring(self.data)
|
||||
return etree.XML(self.data)
|
||||
lxml = cached_property(lxml)
|
||||
|
||||
def json(self):
|
||||
"""Get the result of simplejson.loads if possible."""
|
||||
if 'json' not in self.mimetype:
|
||||
raise AttributeError('Not a JSON response')
|
||||
try:
|
||||
from simplejson import loads
|
||||
except ImportError:
|
||||
from json import loads
|
||||
return loads(self.data)
|
||||
json = cached_property(json)
|
||||
|
||||
|
||||
class TestResponse(Response, ContentAccessors):
|
||||
|
||||
"""Pass this to `werkzeug.test.Client` for easier unittesting."""
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/testtools.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/testtools.pyc
Normal file
Binary file not shown.
284
venv/lib/python2.7/site-packages/werkzeug/contrib/wrappers.py
Normal file
284
venv/lib/python2.7/site-packages/werkzeug/contrib/wrappers.py
Normal file
@@ -0,0 +1,284 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
werkzeug.contrib.wrappers
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Extra wrappers or mixins contributed by the community. These wrappers can
|
||||
be mixed in into request objects to add extra functionality.
|
||||
|
||||
Example::
|
||||
|
||||
from werkzeug.wrappers import Request as RequestBase
|
||||
from werkzeug.contrib.wrappers import JSONRequestMixin
|
||||
|
||||
class Request(RequestBase, JSONRequestMixin):
|
||||
pass
|
||||
|
||||
Afterwards this request object provides the extra functionality of the
|
||||
:class:`JSONRequestMixin`.
|
||||
|
||||
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
import codecs
|
||||
try:
|
||||
from simplejson import loads
|
||||
except ImportError:
|
||||
from json import loads
|
||||
|
||||
from werkzeug.exceptions import BadRequest
|
||||
from werkzeug.utils import cached_property
|
||||
from werkzeug.http import dump_options_header, parse_options_header
|
||||
from werkzeug._compat import wsgi_decoding_dance
|
||||
|
||||
|
||||
def is_known_charset(charset):
|
||||
"""Checks if the given charset is known to Python."""
|
||||
try:
|
||||
codecs.lookup(charset)
|
||||
except LookupError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class JSONRequestMixin(object):
|
||||
|
||||
"""Add json method to a request object. This will parse the input data
|
||||
through simplejson if possible.
|
||||
|
||||
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
|
||||
is not json or if the data itself cannot be parsed as json.
|
||||
"""
|
||||
|
||||
@cached_property
|
||||
def json(self):
|
||||
"""Get the result of simplejson.loads if possible."""
|
||||
if 'json' not in self.environ.get('CONTENT_TYPE', ''):
|
||||
raise BadRequest('Not a JSON request')
|
||||
try:
|
||||
return loads(self.data.decode(self.charset, self.encoding_errors))
|
||||
except Exception:
|
||||
raise BadRequest('Unable to read JSON request')
|
||||
|
||||
|
||||
class ProtobufRequestMixin(object):
|
||||
|
||||
"""Add protobuf parsing method to a request object. This will parse the
|
||||
input data through `protobuf`_ if possible.
|
||||
|
||||
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
|
||||
is not protobuf or if the data itself cannot be parsed property.
|
||||
|
||||
.. _protobuf: http://code.google.com/p/protobuf/
|
||||
"""
|
||||
|
||||
#: by default the :class:`ProtobufRequestMixin` will raise a
|
||||
#: :exc:`~werkzeug.exceptions.BadRequest` if the object is not
|
||||
#: initialized. You can bypass that check by setting this
|
||||
#: attribute to `False`.
|
||||
protobuf_check_initialization = True
|
||||
|
||||
def parse_protobuf(self, proto_type):
|
||||
"""Parse the data into an instance of proto_type."""
|
||||
if 'protobuf' not in self.environ.get('CONTENT_TYPE', ''):
|
||||
raise BadRequest('Not a Protobuf request')
|
||||
|
||||
obj = proto_type()
|
||||
try:
|
||||
obj.ParseFromString(self.data)
|
||||
except Exception:
|
||||
raise BadRequest("Unable to parse Protobuf request")
|
||||
|
||||
# Fail if not all required fields are set
|
||||
if self.protobuf_check_initialization and not obj.IsInitialized():
|
||||
raise BadRequest("Partial Protobuf request")
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
class RoutingArgsRequestMixin(object):
|
||||
|
||||
"""This request mixin adds support for the wsgiorg routing args
|
||||
`specification`_.
|
||||
|
||||
.. _specification: https://wsgi.readthedocs.io/en/latest/specifications/routing_args.html
|
||||
"""
|
||||
|
||||
def _get_routing_args(self):
|
||||
return self.environ.get('wsgiorg.routing_args', (()))[0]
|
||||
|
||||
def _set_routing_args(self, value):
|
||||
if self.shallow:
|
||||
raise RuntimeError('A shallow request tried to modify the WSGI '
|
||||
'environment. If you really want to do that, '
|
||||
'set `shallow` to False.')
|
||||
self.environ['wsgiorg.routing_args'] = (value, self.routing_vars)
|
||||
|
||||
routing_args = property(_get_routing_args, _set_routing_args, doc='''
|
||||
The positional URL arguments as `tuple`.''')
|
||||
del _get_routing_args, _set_routing_args
|
||||
|
||||
def _get_routing_vars(self):
|
||||
rv = self.environ.get('wsgiorg.routing_args')
|
||||
if rv is not None:
|
||||
return rv[1]
|
||||
rv = {}
|
||||
if not self.shallow:
|
||||
self.routing_vars = rv
|
||||
return rv
|
||||
|
||||
def _set_routing_vars(self, value):
|
||||
if self.shallow:
|
||||
raise RuntimeError('A shallow request tried to modify the WSGI '
|
||||
'environment. If you really want to do that, '
|
||||
'set `shallow` to False.')
|
||||
self.environ['wsgiorg.routing_args'] = (self.routing_args, value)
|
||||
|
||||
routing_vars = property(_get_routing_vars, _set_routing_vars, doc='''
|
||||
The keyword URL arguments as `dict`.''')
|
||||
del _get_routing_vars, _set_routing_vars
|
||||
|
||||
|
||||
class ReverseSlashBehaviorRequestMixin(object):
|
||||
|
||||
"""This mixin reverses the trailing slash behavior of :attr:`script_root`
|
||||
and :attr:`path`. This makes it possible to use :func:`~urlparse.urljoin`
|
||||
directly on the paths.
|
||||
|
||||
Because it changes the behavior or :class:`Request` this class has to be
|
||||
mixed in *before* the actual request class::
|
||||
|
||||
class MyRequest(ReverseSlashBehaviorRequestMixin, Request):
|
||||
pass
|
||||
|
||||
This example shows the differences (for an application mounted on
|
||||
`/application` and the request going to `/application/foo/bar`):
|
||||
|
||||
+---------------+-------------------+---------------------+
|
||||
| | normal behavior | reverse behavior |
|
||||
+===============+===================+=====================+
|
||||
| `script_root` | ``/application`` | ``/application/`` |
|
||||
+---------------+-------------------+---------------------+
|
||||
| `path` | ``/foo/bar`` | ``foo/bar`` |
|
||||
+---------------+-------------------+---------------------+
|
||||
"""
|
||||
|
||||
@cached_property
|
||||
def path(self):
|
||||
"""Requested path as unicode. This works a bit like the regular path
|
||||
info in the WSGI environment but will not include a leading slash.
|
||||
"""
|
||||
path = wsgi_decoding_dance(self.environ.get('PATH_INFO') or '',
|
||||
self.charset, self.encoding_errors)
|
||||
return path.lstrip('/')
|
||||
|
||||
@cached_property
|
||||
def script_root(self):
|
||||
"""The root path of the script includling a trailing slash."""
|
||||
path = wsgi_decoding_dance(self.environ.get('SCRIPT_NAME') or '',
|
||||
self.charset, self.encoding_errors)
|
||||
return path.rstrip('/') + '/'
|
||||
|
||||
|
||||
class DynamicCharsetRequestMixin(object):
|
||||
|
||||
""""If this mixin is mixed into a request class it will provide
|
||||
a dynamic `charset` attribute. This means that if the charset is
|
||||
transmitted in the content type headers it's used from there.
|
||||
|
||||
Because it changes the behavior or :class:`Request` this class has
|
||||
to be mixed in *before* the actual request class::
|
||||
|
||||
class MyRequest(DynamicCharsetRequestMixin, Request):
|
||||
pass
|
||||
|
||||
By default the request object assumes that the URL charset is the
|
||||
same as the data charset. If the charset varies on each request
|
||||
based on the transmitted data it's not a good idea to let the URLs
|
||||
change based on that. Most browsers assume either utf-8 or latin1
|
||||
for the URLs if they have troubles figuring out. It's strongly
|
||||
recommended to set the URL charset to utf-8::
|
||||
|
||||
class MyRequest(DynamicCharsetRequestMixin, Request):
|
||||
url_charset = 'utf-8'
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
|
||||
#: the default charset that is assumed if the content type header
|
||||
#: is missing or does not contain a charset parameter. The default
|
||||
#: is latin1 which is what HTTP specifies as default charset.
|
||||
#: You may however want to set this to utf-8 to better support
|
||||
#: browsers that do not transmit a charset for incoming data.
|
||||
default_charset = 'latin1'
|
||||
|
||||
def unknown_charset(self, charset):
|
||||
"""Called if a charset was provided but is not supported by
|
||||
the Python codecs module. By default latin1 is assumed then
|
||||
to not lose any information, you may override this method to
|
||||
change the behavior.
|
||||
|
||||
:param charset: the charset that was not found.
|
||||
:return: the replacement charset.
|
||||
"""
|
||||
return 'latin1'
|
||||
|
||||
@cached_property
|
||||
def charset(self):
|
||||
"""The charset from the content type."""
|
||||
header = self.environ.get('CONTENT_TYPE')
|
||||
if header:
|
||||
ct, options = parse_options_header(header)
|
||||
charset = options.get('charset')
|
||||
if charset:
|
||||
if is_known_charset(charset):
|
||||
return charset
|
||||
return self.unknown_charset(charset)
|
||||
return self.default_charset
|
||||
|
||||
|
||||
class DynamicCharsetResponseMixin(object):
|
||||
|
||||
"""If this mixin is mixed into a response class it will provide
|
||||
a dynamic `charset` attribute. This means that if the charset is
|
||||
looked up and stored in the `Content-Type` header and updates
|
||||
itself automatically. This also means a small performance hit but
|
||||
can be useful if you're working with different charsets on
|
||||
responses.
|
||||
|
||||
Because the charset attribute is no a property at class-level, the
|
||||
default value is stored in `default_charset`.
|
||||
|
||||
Because it changes the behavior or :class:`Response` this class has
|
||||
to be mixed in *before* the actual response class::
|
||||
|
||||
class MyResponse(DynamicCharsetResponseMixin, Response):
|
||||
pass
|
||||
|
||||
.. versionadded:: 0.6
|
||||
"""
|
||||
|
||||
#: the default charset.
|
||||
default_charset = 'utf-8'
|
||||
|
||||
def _get_charset(self):
|
||||
header = self.headers.get('content-type')
|
||||
if header:
|
||||
charset = parse_options_header(header)[1].get('charset')
|
||||
if charset:
|
||||
return charset
|
||||
return self.default_charset
|
||||
|
||||
def _set_charset(self, charset):
|
||||
header = self.headers.get('content-type')
|
||||
ct, options = parse_options_header(header)
|
||||
if not ct:
|
||||
raise TypeError('Cannot set charset if Content-Type '
|
||||
'header is missing.')
|
||||
options['charset'] = charset
|
||||
self.headers['Content-Type'] = dump_options_header(ct, options)
|
||||
|
||||
charset = property(_get_charset, _set_charset, doc="""
|
||||
The charset for the response. It's stored inside the
|
||||
Content-Type header as a parameter.""")
|
||||
del _get_charset, _set_charset
|
||||
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/wrappers.pyc
Normal file
BIN
venv/lib/python2.7/site-packages/werkzeug/contrib/wrappers.pyc
Normal file
Binary file not shown.
Reference in New Issue
Block a user