Merge branch 'master-backport' into 'master'
[ADD] initial backport from v12 See merge request flectra-hq/flectra!153
This commit is contained in:
commit
a6e18e1dfc
111
flectra/api.py
111
flectra/api.py
@ -52,10 +52,11 @@ from inspect import currentframe, getargspec
|
|||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from weakref import WeakSet
|
from weakref import WeakSet
|
||||||
|
|
||||||
from decorator import decorator
|
from decorator import decorate, decorator
|
||||||
from werkzeug.local import Local, release_local
|
from werkzeug.local import Local, release_local
|
||||||
|
|
||||||
from flectra.tools import frozendict, classproperty
|
from flectra.tools import frozendict, classproperty, StackMap
|
||||||
|
from flectra.exceptions import CacheMiss
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -315,6 +316,8 @@ def model(method):
|
|||||||
|
|
||||||
Notice that no ``ids`` are passed to the method in the traditional style.
|
Notice that no ``ids`` are passed to the method in the traditional style.
|
||||||
"""
|
"""
|
||||||
|
if method.__name__ == 'create':
|
||||||
|
return model_create_single(method)
|
||||||
method._api = 'model'
|
method._api = 'model'
|
||||||
return method
|
return method
|
||||||
|
|
||||||
@ -418,6 +421,50 @@ def model_cr_context(method):
|
|||||||
return method
|
return method
|
||||||
|
|
||||||
|
|
||||||
|
_create_logger = logging.getLogger(__name__ + '.create')
|
||||||
|
|
||||||
|
|
||||||
|
def _model_create_single(create, self, arg):
|
||||||
|
# 'create' expects a dict and returns a record
|
||||||
|
if isinstance(arg, Mapping):
|
||||||
|
return create(self, arg)
|
||||||
|
if len(arg) > 1:
|
||||||
|
_create_logger.debug("%s.create() called with %d dicts", self, len(arg))
|
||||||
|
return self.browse().concat(*(create(self, vals) for vals in arg))
|
||||||
|
|
||||||
|
|
||||||
|
def model_create_single(method):
|
||||||
|
""" Decorate a method that takes a dictionary and creates a single record.
|
||||||
|
The method may be called with either a single dict or a list of dicts::
|
||||||
|
|
||||||
|
record = model.create(vals)
|
||||||
|
records = model.create([vals, ...])
|
||||||
|
"""
|
||||||
|
wrapper = decorate(method, _model_create_single)
|
||||||
|
wrapper._api = 'model_create'
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def _model_create_multi(create, self, arg):
|
||||||
|
# 'create' expects a list of dicts and returns a recordset
|
||||||
|
if isinstance(arg, Mapping):
|
||||||
|
return create(self, [arg])
|
||||||
|
return create(self, arg)
|
||||||
|
|
||||||
|
|
||||||
|
def model_create_multi(method):
|
||||||
|
""" Decorate a method that takes a list of dictionaries and creates multiple
|
||||||
|
records. The method may be called with either a single dict or a list of
|
||||||
|
dicts::
|
||||||
|
|
||||||
|
record = model.create(vals)
|
||||||
|
records = model.create([vals, ...])
|
||||||
|
"""
|
||||||
|
wrapper = decorate(method, _model_create_multi)
|
||||||
|
wrapper._api = 'model_create'
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
def cr(method):
|
def cr(method):
|
||||||
""" Decorate a traditional-style method that takes ``cr`` as a parameter.
|
""" Decorate a traditional-style method that takes ``cr`` as a parameter.
|
||||||
Such a method may be called in both record and traditional styles, like::
|
Such a method may be called in both record and traditional styles, like::
|
||||||
@ -664,15 +711,24 @@ def expected(decorator, func):
|
|||||||
return decorator(func) if not hasattr(func, '_api') else func
|
return decorator(func) if not hasattr(func, '_api') else func
|
||||||
|
|
||||||
|
|
||||||
|
def _call_kw_model(method, self, args, kwargs):
|
||||||
def call_kw_model(method, self, args, kwargs):
|
|
||||||
context, args, kwargs = split_context(method, args, kwargs)
|
context, args, kwargs = split_context(method, args, kwargs)
|
||||||
recs = self.with_context(context or {})
|
recs = self.with_context(context or {})
|
||||||
_logger.debug("call %s.%s(%s)", recs, method.__name__, Params(args, kwargs))
|
_logger.debug("call %s.%s(%s)", recs, method.__name__, Params(args, kwargs))
|
||||||
result = method(recs, *args, **kwargs)
|
result = method(recs, *args, **kwargs)
|
||||||
return downgrade(method, result, recs, args, kwargs)
|
return downgrade(method, result, recs, args, kwargs)
|
||||||
|
|
||||||
def call_kw_multi(method, self, args, kwargs):
|
|
||||||
|
def _call_kw_model_create(method, self, args, kwargs):
|
||||||
|
# special case for method 'create'
|
||||||
|
context, args, kwargs = split_context(method, args, kwargs)
|
||||||
|
recs = self.with_context(context or {})
|
||||||
|
_logger.debug("call %s.%s(%s)", recs, method.__name__, Params(args, kwargs))
|
||||||
|
result = method(recs, *args, **kwargs)
|
||||||
|
return result.id if isinstance(args[0], Mapping) else result.ids
|
||||||
|
|
||||||
|
|
||||||
|
def _call_kw_multi(method, self, args, kwargs):
|
||||||
ids, args = args[0], args[1:]
|
ids, args = args[0], args[1:]
|
||||||
context, args, kwargs = split_context(method, args, kwargs)
|
context, args, kwargs = split_context(method, args, kwargs)
|
||||||
recs = self.with_context(context or {}).browse(ids)
|
recs = self.with_context(context or {}).browse(ids)
|
||||||
@ -680,13 +736,17 @@ def call_kw_multi(method, self, args, kwargs):
|
|||||||
result = method(recs, *args, **kwargs)
|
result = method(recs, *args, **kwargs)
|
||||||
return downgrade(method, result, recs, args, kwargs)
|
return downgrade(method, result, recs, args, kwargs)
|
||||||
|
|
||||||
|
|
||||||
def call_kw(model, name, args, kwargs):
|
def call_kw(model, name, args, kwargs):
|
||||||
""" Invoke the given method ``name`` on the recordset ``model``. """
|
""" Invoke the given method ``name`` on the recordset ``model``. """
|
||||||
method = getattr(type(model), name)
|
method = getattr(type(model), name)
|
||||||
if getattr(method, '_api', None) == 'model':
|
api = getattr(method, '_api', None)
|
||||||
return call_kw_model(method, model, args, kwargs)
|
if api == 'model':
|
||||||
|
return _call_kw_model(method, model, args, kwargs)
|
||||||
|
elif api == 'model_create':
|
||||||
|
return _call_kw_model_create(method, model, args, kwargs)
|
||||||
else:
|
else:
|
||||||
return call_kw_multi(method, model, args, kwargs)
|
return _call_kw_multi(method, model, args, kwargs)
|
||||||
|
|
||||||
|
|
||||||
class Environment(Mapping):
|
class Environment(Mapping):
|
||||||
@ -741,7 +801,7 @@ class Environment(Mapping):
|
|||||||
self.cr, self.uid, self.context = self.args = (cr, uid, frozendict(context))
|
self.cr, self.uid, self.context = self.args = (cr, uid, frozendict(context))
|
||||||
self.registry = Registry(cr.dbname)
|
self.registry = Registry(cr.dbname)
|
||||||
self.cache = envs.cache
|
self.cache = envs.cache
|
||||||
self._protected = defaultdict(frozenset) # {field: ids, ...}
|
self._protected = StackMap() # {field: ids, ...}
|
||||||
self.dirty = defaultdict(set) # {record: set(field_name), ...}
|
self.dirty = defaultdict(set) # {record: set(field_name), ...}
|
||||||
self.all = envs
|
self.all = envs
|
||||||
envs.add(self)
|
envs.add(self)
|
||||||
@ -859,16 +919,23 @@ class Environment(Mapping):
|
|||||||
return self[field.model_name].browse(self._protected.get(field, ()))
|
return self[field.model_name].browse(self._protected.get(field, ()))
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def protecting(self, fields, records):
|
def protecting(self, what, records=None):
|
||||||
""" Prevent the invalidation or recomputation of ``fields`` on ``records``. """
|
""" Prevent the invalidation or recomputation of fields on records.
|
||||||
saved = {}
|
The parameters are either:
|
||||||
|
- ``what`` a collection of fields and ``records`` a recordset, or
|
||||||
|
- ``what`` a collection of pairs ``(fields, records)``.
|
||||||
|
"""
|
||||||
|
protected = self._protected
|
||||||
try:
|
try:
|
||||||
for field in fields:
|
protected.pushmap()
|
||||||
ids = saved[field] = self._protected[field]
|
what = what if records is None else [(what, records)]
|
||||||
self._protected[field] = ids.union(records._ids)
|
for fields, records in what:
|
||||||
|
for field in fields:
|
||||||
|
ids = protected.get(field, frozenset())
|
||||||
|
protected[field] = ids.union(records._ids)
|
||||||
yield
|
yield
|
||||||
finally:
|
finally:
|
||||||
self._protected.update(saved)
|
protected.popmap()
|
||||||
|
|
||||||
def field_todo(self, field):
|
def field_todo(self, field):
|
||||||
""" Return a recordset with all records to recompute for ``field``. """
|
""" Return a recordset with all records to recompute for ``field``. """
|
||||||
@ -888,7 +955,11 @@ class Environment(Mapping):
|
|||||||
recs_list = self.all.todo.setdefault(field, [])
|
recs_list = self.all.todo.setdefault(field, [])
|
||||||
for i, recs in enumerate(recs_list):
|
for i, recs in enumerate(recs_list):
|
||||||
if recs.env == records.env:
|
if recs.env == records.env:
|
||||||
recs_list[i] |= records
|
# only add records if not already in the recordset, much much
|
||||||
|
# cheaper in case recs is big and records is a singleton
|
||||||
|
# already present
|
||||||
|
if not records <= recs:
|
||||||
|
recs_list[i] |= records
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
recs_list.append(records)
|
recs_list.append(records)
|
||||||
@ -957,7 +1028,11 @@ class Cache(object):
|
|||||||
def get(self, record, field):
|
def get(self, record, field):
|
||||||
""" Return the value of ``field`` for ``record``. """
|
""" Return the value of ``field`` for ``record``. """
|
||||||
key = field.cache_key(record)
|
key = field.cache_key(record)
|
||||||
value = self._data[field][record.id][key]
|
try:
|
||||||
|
value = self._data[field][record.id][key]
|
||||||
|
except KeyError:
|
||||||
|
raise CacheMiss(record, field)
|
||||||
|
|
||||||
return value.get() if isinstance(value, SpecialValue) else value
|
return value.get() if isinstance(value, SpecialValue) else value
|
||||||
|
|
||||||
def set(self, record, field, value):
|
def set(self, record, field, value):
|
||||||
|
@ -46,13 +46,19 @@ class RedirectWarning(Exception):
|
|||||||
:param string button_text: text to put on the button that will trigger
|
:param string button_text: text to put on the button that will trigger
|
||||||
the redirection.
|
the redirection.
|
||||||
"""
|
"""
|
||||||
|
# using this RedirectWarning won't crash if used as an except_orm
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self.args[0]
|
||||||
|
|
||||||
|
|
||||||
class AccessDenied(Exception):
|
class AccessDenied(Exception):
|
||||||
""" Login/password error. No message, no traceback.
|
""" Login/password error. no traceback.
|
||||||
Example: When you try to log with a wrong password."""
|
Example: When you try to log with a wrong password."""
|
||||||
def __init__(self):
|
def __init__(self, message='Access denied'):
|
||||||
super(AccessDenied, self).__init__('Access denied')
|
super(AccessDenied, self).__init__(message)
|
||||||
|
self.with_traceback(None)
|
||||||
|
self.__cause__ = None
|
||||||
self.traceback = ('', '', '')
|
self.traceback = ('', '', '')
|
||||||
|
|
||||||
|
|
||||||
@ -63,6 +69,13 @@ class AccessError(except_orm):
|
|||||||
super(AccessError, self).__init__(msg)
|
super(AccessError, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class CacheMiss(except_orm, KeyError):
|
||||||
|
""" Missing value(s) in cache.
|
||||||
|
Example: When you try to read a value in a flushed cache."""
|
||||||
|
def __init__(self, record, field):
|
||||||
|
super(CacheMiss, self).__init__("%s.%s" % (str(record), field.name))
|
||||||
|
|
||||||
|
|
||||||
class MissingError(except_orm):
|
class MissingError(except_orm):
|
||||||
""" Missing record(s).
|
""" Missing record(s).
|
||||||
Example: When you try to write on a deleted record."""
|
Example: When you try to write on a deleted record."""
|
||||||
|
@ -25,6 +25,7 @@ from os.path import join as opj
|
|||||||
from zlib import adler32
|
from zlib import adler32
|
||||||
|
|
||||||
import babel.core
|
import babel.core
|
||||||
|
from datetime import datetime, date
|
||||||
import passlib.utils
|
import passlib.utils
|
||||||
import psycopg2
|
import psycopg2
|
||||||
import json
|
import json
|
||||||
@ -44,10 +45,11 @@ except ImportError:
|
|||||||
psutil = None
|
psutil = None
|
||||||
|
|
||||||
import flectra
|
import flectra
|
||||||
|
from flectra import fields
|
||||||
from .service.server import memory_info
|
from .service.server import memory_info
|
||||||
from .service import security, model as service_model
|
from .service import security, model as service_model
|
||||||
from .tools.func import lazy_property
|
from .tools.func import lazy_property
|
||||||
from .tools import ustr, consteq, frozendict, pycompat, unique
|
from .tools import ustr, consteq, frozendict, pycompat, unique, date_utils
|
||||||
|
|
||||||
from .modules.module import module_manifest
|
from .modules.module import module_manifest
|
||||||
|
|
||||||
@ -101,9 +103,9 @@ def dispatch_rpc(service_name, method, params):
|
|||||||
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
|
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
|
||||||
if rpc_request_flag or rpc_response_flag:
|
if rpc_request_flag or rpc_response_flag:
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
start_rss, start_vms = 0, 0
|
start_memory = 0
|
||||||
if psutil:
|
if psutil:
|
||||||
start_rss, start_vms = memory_info(psutil.Process(os.getpid()))
|
start_memory = memory_info(psutil.Process(os.getpid()))
|
||||||
if rpc_request and rpc_response_flag:
|
if rpc_request and rpc_response_flag:
|
||||||
flectra.netsvc.log(rpc_request, logging.DEBUG, '%s.%s' % (service_name, method), replace_request_password(params))
|
flectra.netsvc.log(rpc_request, logging.DEBUG, '%s.%s' % (service_name, method), replace_request_password(params))
|
||||||
|
|
||||||
@ -617,6 +619,7 @@ class JsonRequest(WebRequest):
|
|||||||
self.context = self.params.pop('context', dict(self.session.context))
|
self.context = self.params.pop('context', dict(self.session.context))
|
||||||
|
|
||||||
def _json_response(self, result=None, error=None):
|
def _json_response(self, result=None, error=None):
|
||||||
|
|
||||||
response = {
|
response = {
|
||||||
'jsonrpc': '2.0',
|
'jsonrpc': '2.0',
|
||||||
'id': self.jsonrequest.get('id')
|
'id': self.jsonrequest.get('id')
|
||||||
@ -632,7 +635,7 @@ class JsonRequest(WebRequest):
|
|||||||
# We need then to manage http sessions manually.
|
# We need then to manage http sessions manually.
|
||||||
response['session_id'] = self.session.sid
|
response['session_id'] = self.session.sid
|
||||||
mime = 'application/javascript'
|
mime = 'application/javascript'
|
||||||
body = "%s(%s);" % (self.jsonp, json.dumps(response, default=ustr),)
|
body = "%s(%s);" % (self.jsonp, json.dumps(response, default=date_utils.json_default))
|
||||||
else:
|
else:
|
||||||
mime = 'application/json'
|
mime = 'application/json'
|
||||||
body = json.dumps(response, default=ustr)
|
body = json.dumps(response, default=ustr)
|
||||||
@ -682,9 +685,9 @@ class JsonRequest(WebRequest):
|
|||||||
args = self.params.get('args', [])
|
args = self.params.get('args', [])
|
||||||
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
_, start_vms = 0, 0
|
start_memory = 0
|
||||||
if psutil:
|
if psutil:
|
||||||
_, start_vms = memory_info(psutil.Process(os.getpid()))
|
start_memory = memory_info(psutil.Process(os.getpid()))
|
||||||
if rpc_request and rpc_response_flag:
|
if rpc_request and rpc_response_flag:
|
||||||
rpc_request.debug('%s: %s %s, %s',
|
rpc_request.debug('%s: %s %s, %s',
|
||||||
endpoint, model, method, pprint.pformat(args))
|
endpoint, model, method, pprint.pformat(args))
|
||||||
@ -693,11 +696,11 @@ class JsonRequest(WebRequest):
|
|||||||
|
|
||||||
if rpc_request_flag or rpc_response_flag:
|
if rpc_request_flag or rpc_response_flag:
|
||||||
end_time = time.time()
|
end_time = time.time()
|
||||||
_, end_vms = 0, 0
|
end_memory = 0
|
||||||
if psutil:
|
if psutil:
|
||||||
_, end_vms = memory_info(psutil.Process(os.getpid()))
|
end_memory = memory_info(psutil.Process(os.getpid()))
|
||||||
logline = '%s: %s %s: time:%.3fs mem: %sk -> %sk (diff: %sk)' % (
|
logline = '%s: %s %s: time:%.3fs mem: %sk -> %sk (diff: %sk)' % (
|
||||||
endpoint, model, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
|
endpoint, model, method, end_time - start_time, start_memory / 1024, end_memory / 1024, (end_memory - start_memory)/1024)
|
||||||
if rpc_response_flag:
|
if rpc_response_flag:
|
||||||
rpc_response.debug('%s, %s', logline, pprint.pformat(result))
|
rpc_response.debug('%s, %s', logline, pprint.pformat(result))
|
||||||
else:
|
else:
|
||||||
@ -1033,9 +1036,10 @@ class OpenERPSession(werkzeug.contrib.sessions.Session):
|
|||||||
HTTP_HOST=wsgienv['HTTP_HOST'],
|
HTTP_HOST=wsgienv['HTTP_HOST'],
|
||||||
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
|
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
|
||||||
)
|
)
|
||||||
uid = dispatch_rpc('common', 'authenticate', [db, login, password, env])
|
uid = flectra.registry(db)['res.users'].authenticate(db, login, password, env)
|
||||||
else:
|
else:
|
||||||
security.check(db, uid, password)
|
security.check(db, uid, password)
|
||||||
|
self.rotate = True
|
||||||
self.db = db
|
self.db = db
|
||||||
self.uid = uid
|
self.uid = uid
|
||||||
self.login = login
|
self.login = login
|
||||||
@ -1057,12 +1061,6 @@ class OpenERPSession(werkzeug.contrib.sessions.Session):
|
|||||||
# We create our own environment instead of the request's one.
|
# We create our own environment instead of the request's one.
|
||||||
# to avoid creating it without the uid since request.uid isn't set yet
|
# to avoid creating it without the uid since request.uid isn't set yet
|
||||||
env = flectra.api.Environment(request.cr, self.uid, self.context)
|
env = flectra.api.Environment(request.cr, self.uid, self.context)
|
||||||
# == BACKWARD COMPATIBILITY TO CONVERT OLD SESSION TYPE TO THE NEW ONES ! REMOVE ME AFTER 11.0 ==
|
|
||||||
if self.get('password'):
|
|
||||||
security.check(self.db, self.uid, self.password)
|
|
||||||
self.session_token = security.compute_session_token(self, env)
|
|
||||||
self.pop('password')
|
|
||||||
# =================================================================================================
|
|
||||||
# here we check if the session is still valid
|
# here we check if the session is still valid
|
||||||
if not security.check_session(self, env):
|
if not security.check_session(self, env):
|
||||||
raise SessionExpiredException("Session expired")
|
raise SessionExpiredException("Session expired")
|
||||||
@ -1239,6 +1237,7 @@ class Response(werkzeug.wrappers.Response):
|
|||||||
def set_default(self, template=None, qcontext=None, uid=None):
|
def set_default(self, template=None, qcontext=None, uid=None):
|
||||||
self.template = template
|
self.template = template
|
||||||
self.qcontext = qcontext or dict()
|
self.qcontext = qcontext or dict()
|
||||||
|
self.qcontext['response_template'] = self.template
|
||||||
self.uid = uid
|
self.uid = uid
|
||||||
# Support for Cross-Origin Resource Sharing
|
# Support for Cross-Origin Resource Sharing
|
||||||
if request.endpoint and 'cors' in request.endpoint.routing:
|
if request.endpoint and 'cors' in request.endpoint.routing:
|
||||||
@ -1302,7 +1301,8 @@ class Root(object):
|
|||||||
# Setup http sessions
|
# Setup http sessions
|
||||||
path = flectra.tools.config.session_dir
|
path = flectra.tools.config.session_dir
|
||||||
_logger.debug('HTTP sessions stored in: %s', path)
|
_logger.debug('HTTP sessions stored in: %s', path)
|
||||||
return werkzeug.contrib.sessions.FilesystemSessionStore(path, session_class=OpenERPSession)
|
return werkzeug.contrib.sessions.FilesystemSessionStore(
|
||||||
|
path, session_class=OpenERPSession, renew_missing=True)
|
||||||
|
|
||||||
@lazy_property
|
@lazy_property
|
||||||
def nodb_routing_map(self):
|
def nodb_routing_map(self):
|
||||||
@ -1351,6 +1351,7 @@ class Root(object):
|
|||||||
addons_manifest[module] = manifest
|
addons_manifest[module] = manifest
|
||||||
statics['/%s/static' % module] = path_static
|
statics['/%s/static' % module] = path_static
|
||||||
|
|
||||||
|
|
||||||
if statics:
|
if statics:
|
||||||
_logger.info("HTTP Configuring static files")
|
_logger.info("HTTP Configuring static files")
|
||||||
app = werkzeug.wsgi.SharedDataMiddleware(self.dispatch, statics, cache_timeout=STATIC_CACHE)
|
app = werkzeug.wsgi.SharedDataMiddleware(self.dispatch, statics, cache_timeout=STATIC_CACHE)
|
||||||
@ -1423,10 +1424,16 @@ class Root(object):
|
|||||||
else:
|
else:
|
||||||
response = result
|
response = result
|
||||||
|
|
||||||
|
save_session = (not request.endpoint) or request.endpoint.routing.get('save_session', True)
|
||||||
|
if not save_session:
|
||||||
|
return response
|
||||||
|
|
||||||
if httprequest.session.should_save:
|
if httprequest.session.should_save:
|
||||||
if httprequest.session.rotate:
|
if httprequest.session.rotate:
|
||||||
self.session_store.delete(httprequest.session)
|
self.session_store.delete(httprequest.session)
|
||||||
httprequest.session.sid = self.session_store.generate_key()
|
httprequest.session.sid = self.session_store.generate_key()
|
||||||
|
if httprequest.session.uid:
|
||||||
|
httprequest.session.session_token = security.compute_session_token(httprequest.session, request.env)
|
||||||
httprequest.session.modified = True
|
httprequest.session.modified = True
|
||||||
self.session_store.save(httprequest.session)
|
self.session_store.save(httprequest.session)
|
||||||
# We must not set the cookie if the session id was specified using a http header or a GET parameter.
|
# We must not set the cookie if the session id was specified using a http header or a GET parameter.
|
||||||
@ -1450,6 +1457,9 @@ class Root(object):
|
|||||||
httprequest.app = self
|
httprequest.app = self
|
||||||
httprequest.parameter_storage_class = werkzeug.datastructures.ImmutableOrderedMultiDict
|
httprequest.parameter_storage_class = werkzeug.datastructures.ImmutableOrderedMultiDict
|
||||||
threading.current_thread().url = httprequest.url
|
threading.current_thread().url = httprequest.url
|
||||||
|
threading.current_thread().query_count = 0
|
||||||
|
threading.current_thread().query_time = 0
|
||||||
|
threading.current_thread().perf_t0 = time.time()
|
||||||
|
|
||||||
explicit_session = self.setup_session(httprequest)
|
explicit_session = self.setup_session(httprequest)
|
||||||
self.setup_db(httprequest)
|
self.setup_db(httprequest)
|
||||||
@ -1513,6 +1523,7 @@ def db_filter(dbs, httprequest=None):
|
|||||||
if d == "www" and r:
|
if d == "www" and r:
|
||||||
d = r.partition('.')[0]
|
d = r.partition('.')[0]
|
||||||
if flectra.tools.config['dbfilter']:
|
if flectra.tools.config['dbfilter']:
|
||||||
|
d, h = re.escape(d), re.escape(h)
|
||||||
r = flectra.tools.config['dbfilter'].replace('%h', h).replace('%d', d)
|
r = flectra.tools.config['dbfilter'].replace('%h', h).replace('%d', d)
|
||||||
dbs = [i for i in dbs if re.match(r, i)]
|
dbs = [i for i in dbs if re.match(r, i)]
|
||||||
elif flectra.tools.config['db_name']:
|
elif flectra.tools.config['db_name']:
|
||||||
@ -1646,7 +1657,7 @@ def send_file(filepath_or_fp, mimetype=None, as_attachment=False, filename=None,
|
|||||||
|
|
||||||
def content_disposition(filename):
|
def content_disposition(filename):
|
||||||
filename = flectra.tools.ustr(filename)
|
filename = flectra.tools.ustr(filename)
|
||||||
escaped = urls.url_quote(filename)
|
escaped = urls.url_quote(filename, safe='')
|
||||||
|
|
||||||
return "attachment; filename*=UTF-8''%s" % escaped
|
return "attachment; filename*=UTF-8''%s" % escaped
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ import pprint
|
|||||||
from . import release
|
from . import release
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
|
import time
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
|
||||||
@ -69,10 +70,40 @@ LEVEL_COLOR_MAPPING = {
|
|||||||
logging.CRITICAL: (WHITE, RED),
|
logging.CRITICAL: (WHITE, RED),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class PerfFilter(logging.Filter):
|
||||||
|
def format_perf(self, query_count, query_time, remaining_time):
|
||||||
|
return ("%d" % query_count, "%.3f" % query_time, "%.3f" % remaining_time)
|
||||||
|
|
||||||
|
def filter(self, record):
|
||||||
|
if hasattr(threading.current_thread(), "query_count"):
|
||||||
|
query_count = threading.current_thread().query_count
|
||||||
|
query_time = threading.current_thread().query_time
|
||||||
|
perf_t0 = threading.current_thread().perf_t0
|
||||||
|
remaining_time = time.time() - perf_t0 - query_time
|
||||||
|
record.perf_info = '%s %s %s' % self.format_perf(query_count, query_time, remaining_time)
|
||||||
|
delattr(threading.current_thread(), "query_count")
|
||||||
|
else:
|
||||||
|
record.perf_info = "- - -"
|
||||||
|
return True
|
||||||
|
|
||||||
|
class ColoredPerfFilter(PerfFilter):
|
||||||
|
def format_perf(self, query_count, query_time, remaining_time):
|
||||||
|
def colorize_time(time, format, low=1, high=5):
|
||||||
|
if time > high:
|
||||||
|
return COLOR_PATTERN % (30 + RED, 40 + DEFAULT, format % time)
|
||||||
|
if time > low:
|
||||||
|
return COLOR_PATTERN % (30 + YELLOW, 40 + DEFAULT, format % time)
|
||||||
|
return format % time
|
||||||
|
return (
|
||||||
|
colorize_time(query_count, "%d", 100, 1000),
|
||||||
|
colorize_time(query_time, "%.3f", 0.1, 3),
|
||||||
|
colorize_time(remaining_time, "%.3f", 1, 5)
|
||||||
|
)
|
||||||
|
|
||||||
class DBFormatter(logging.Formatter):
|
class DBFormatter(logging.Formatter):
|
||||||
def format(self, record):
|
def format(self, record):
|
||||||
record.pid = os.getpid()
|
record.pid = os.getpid()
|
||||||
record.dbname = getattr(threading.currentThread(), 'dbname', '?')
|
record.dbname = getattr(threading.current_thread(), 'dbname', '?')
|
||||||
return logging.Formatter.format(self, record)
|
return logging.Formatter.format(self, record)
|
||||||
|
|
||||||
class ColoredFormatter(DBFormatter):
|
class ColoredFormatter(DBFormatter):
|
||||||
@ -88,6 +119,13 @@ def init_logger():
|
|||||||
return
|
return
|
||||||
_logger_init = True
|
_logger_init = True
|
||||||
|
|
||||||
|
old_factory = logging.getLogRecordFactory()
|
||||||
|
def record_factory(*args, **kwargs):
|
||||||
|
record = old_factory(*args, **kwargs)
|
||||||
|
record.perf_info = ""
|
||||||
|
return record
|
||||||
|
logging.setLogRecordFactory(record_factory)
|
||||||
|
|
||||||
logging.addLevelName(25, "INFO")
|
logging.addLevelName(25, "INFO")
|
||||||
logging.captureWarnings(True)
|
logging.captureWarnings(True)
|
||||||
|
|
||||||
@ -95,7 +133,7 @@ def init_logger():
|
|||||||
resetlocale()
|
resetlocale()
|
||||||
|
|
||||||
# create a format for log messages and dates
|
# create a format for log messages and dates
|
||||||
format = '%(asctime)s %(pid)s %(levelname)s %(dbname)s %(name)s: %(message)s'
|
format = '%(asctime)s %(pid)s %(levelname)s %(dbname)s %(name)s: %(message)s %(perf_info)s'
|
||||||
# Normal Handler on stderr
|
# Normal Handler on stderr
|
||||||
handler = logging.StreamHandler()
|
handler = logging.StreamHandler()
|
||||||
|
|
||||||
@ -119,7 +157,7 @@ def init_logger():
|
|||||||
if dirname and not os.path.isdir(dirname):
|
if dirname and not os.path.isdir(dirname):
|
||||||
os.makedirs(dirname)
|
os.makedirs(dirname)
|
||||||
if tools.config['logrotate'] is not False:
|
if tools.config['logrotate'] is not False:
|
||||||
if tools.config['workers'] > 1:
|
if tools.config['workers'] and tools.config['workers'] > 1:
|
||||||
# TODO: fallback to regular file logging in master for safe(r) defaults?
|
# TODO: fallback to regular file logging in master for safe(r) defaults?
|
||||||
#
|
#
|
||||||
# Doing so here would be a good idea but also might break
|
# Doing so here would be a good idea but also might break
|
||||||
@ -143,11 +181,13 @@ def init_logger():
|
|||||||
|
|
||||||
if os.name == 'posix' and isinstance(handler, logging.StreamHandler) and is_a_tty(handler.stream):
|
if os.name == 'posix' and isinstance(handler, logging.StreamHandler) and is_a_tty(handler.stream):
|
||||||
formatter = ColoredFormatter(format)
|
formatter = ColoredFormatter(format)
|
||||||
|
perf_filter = ColoredPerfFilter()
|
||||||
else:
|
else:
|
||||||
formatter = DBFormatter(format)
|
formatter = DBFormatter(format)
|
||||||
|
perf_filter = PerfFilter()
|
||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
|
|
||||||
logging.getLogger().addHandler(handler)
|
logging.getLogger().addHandler(handler)
|
||||||
|
logging.getLogger('werkzeug').addFilter(perf_filter)
|
||||||
|
|
||||||
if tools.config['log_db']:
|
if tools.config['log_db']:
|
||||||
db_levels = {
|
db_levels = {
|
||||||
|
@ -10,6 +10,7 @@ the ORM does, in fact.
|
|||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
@ -224,9 +225,9 @@ class Cursor(object):
|
|||||||
raise ValueError("SQL query parameters should be a tuple, list or dict; got %r" % (params,))
|
raise ValueError("SQL query parameters should be a tuple, list or dict; got %r" % (params,))
|
||||||
|
|
||||||
if self.sql_log:
|
if self.sql_log:
|
||||||
now = time.time()
|
encoding = psycopg2.extensions.encodings[self.connection.encoding]
|
||||||
_logger.debug("query: %s", query)
|
_logger.debug("query: %s", self._obj.mogrify(query, params).decode(encoding, 'replace'))
|
||||||
|
now = time.time()
|
||||||
try:
|
try:
|
||||||
params = params or None
|
params = params or None
|
||||||
res = self._obj.execute(query, params)
|
res = self._obj.execute(query, params)
|
||||||
@ -237,10 +238,14 @@ class Cursor(object):
|
|||||||
|
|
||||||
# simple query count is always computed
|
# simple query count is always computed
|
||||||
self.sql_log_count += 1
|
self.sql_log_count += 1
|
||||||
|
delay = (time.time() - now)
|
||||||
|
if hasattr(threading.current_thread(), 'query_count'):
|
||||||
|
threading.current_thread().query_count += 1
|
||||||
|
threading.current_thread().query_time += delay
|
||||||
|
|
||||||
# advanced stats only if sql_log is enabled
|
# advanced stats only if sql_log is enabled
|
||||||
if self.sql_log:
|
if self.sql_log:
|
||||||
delay = (time.time() - now) * 1E6
|
delay *= 1E6
|
||||||
|
|
||||||
res_from = re_from.match(query.lower())
|
res_from = re_from.match(query.lower())
|
||||||
if res_from:
|
if res_from:
|
||||||
|
@ -47,6 +47,10 @@ class ormcache(object):
|
|||||||
@ormcache(skiparg=1)
|
@ormcache(skiparg=1)
|
||||||
def _compute_domain(self, model_name, mode="read"):
|
def _compute_domain(self, model_name, mode="read"):
|
||||||
...
|
...
|
||||||
|
|
||||||
|
Methods implementing this decorator should never return a Recordset,
|
||||||
|
because the underlying cursor will eventually be closed and raise a
|
||||||
|
`psycopg2.OperationalError`.
|
||||||
"""
|
"""
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.args = args
|
self.args = args
|
||||||
|
@ -6,6 +6,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
import ConfigParser
|
import ConfigParser
|
||||||
|
|
||||||
|
import errno
|
||||||
import logging
|
import logging
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
@ -76,7 +77,7 @@ class configmanager(object):
|
|||||||
self.options = {
|
self.options = {
|
||||||
'admin_passwd': 'admin',
|
'admin_passwd': 'admin',
|
||||||
'csv_internal_sep': ',',
|
'csv_internal_sep': ',',
|
||||||
'publisher_warranty_url': 'http://services.flectrahq.com/publisher-warranty/',
|
'publisher_warranty_url': 'https://services.flectrahq.com/publisher-warranty/',
|
||||||
'reportgz': False,
|
'reportgz': False,
|
||||||
'root_path': None,
|
'root_path': None,
|
||||||
}
|
}
|
||||||
@ -288,10 +289,12 @@ class configmanager(object):
|
|||||||
help="Specify the number of workers, 0 disable prefork mode.",
|
help="Specify the number of workers, 0 disable prefork mode.",
|
||||||
type="int")
|
type="int")
|
||||||
group.add_option("--limit-memory-soft", dest="limit_memory_soft", my_default=2048 * 1024 * 1024,
|
group.add_option("--limit-memory-soft", dest="limit_memory_soft", my_default=2048 * 1024 * 1024,
|
||||||
help="Maximum allowed virtual memory per worker, when reached the worker be reset after the current request (default 671088640 aka 640MB).",
|
help="Maximum allowed virtual memory per worker, when reached the worker be "
|
||||||
|
"reset after the current request (default 2048MiB).",
|
||||||
type="int")
|
type="int")
|
||||||
group.add_option("--limit-memory-hard", dest="limit_memory_hard", my_default=2560 * 1024 * 1024,
|
group.add_option("--limit-memory-hard", dest="limit_memory_hard", my_default=2560 * 1024 * 1024,
|
||||||
help="Maximum allowed virtual memory per worker, when reached, any memory allocation will fail (default 805306368 aka 768MB).",
|
help="Maximum allowed virtual memory per worker, when reached, any memory "
|
||||||
|
"allocation will fail (default 2560MiB).",
|
||||||
type="int")
|
type="int")
|
||||||
group.add_option("--limit-time-cpu", dest="limit_time_cpu", my_default=60,
|
group.add_option("--limit-time-cpu", dest="limit_time_cpu", my_default=60,
|
||||||
help="Maximum allowed CPU time per request (default 60).",
|
help="Maximum allowed CPU time per request (default 60).",
|
||||||
@ -469,6 +472,8 @@ class configmanager(object):
|
|||||||
os.path.abspath(os.path.expanduser(os.path.expandvars(x.strip())))
|
os.path.abspath(os.path.expanduser(os.path.expandvars(x.strip())))
|
||||||
for x in self.options['addons_path'].split(','))
|
for x in self.options['addons_path'].split(','))
|
||||||
|
|
||||||
|
self.options['data_dir'] = os.path.abspath(os.path.expanduser(os.path.expandvars(self.options['data_dir'].strip())))
|
||||||
|
|
||||||
self.options['init'] = opt.init and dict.fromkeys(opt.init.split(','), 1) or {}
|
self.options['init'] = opt.init and dict.fromkeys(opt.init.split(','), 1) or {}
|
||||||
self.options['demo'] = (dict(self.options['init'])
|
self.options['demo'] = (dict(self.options['init'])
|
||||||
if not self.options['without_demo'] else {})
|
if not self.options['without_demo'] else {})
|
||||||
@ -655,9 +660,11 @@ class configmanager(object):
|
|||||||
@property
|
@property
|
||||||
def session_dir(self):
|
def session_dir(self):
|
||||||
d = os.path.join(self['data_dir'], 'sessions')
|
d = os.path.join(self['data_dir'], 'sessions')
|
||||||
if not os.path.exists(d):
|
try:
|
||||||
os.makedirs(d, 0o700)
|
os.makedirs(d, 0o700)
|
||||||
else:
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
assert os.access(d, os.W_OK), \
|
assert os.access(d, os.W_OK), \
|
||||||
"%s: directory is not writable" % d
|
"%s: directory is not writable" % d
|
||||||
return d
|
return d
|
||||||
|
219
flectra/tools/date_utils.py
Normal file
219
flectra/tools/date_utils.py
Normal file
@ -0,0 +1,219 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import math
|
||||||
|
import calendar
|
||||||
|
from datetime import date, datetime, time
|
||||||
|
import pytz
|
||||||
|
from dateutil.relativedelta import relativedelta
|
||||||
|
from . import ustr
|
||||||
|
|
||||||
|
|
||||||
|
def get_month(date):
|
||||||
|
''' Compute the month dates range on which the 'date' parameter belongs to.
|
||||||
|
|
||||||
|
:param date: A datetime.datetime or datetime.date object.
|
||||||
|
:return: A tuple (date_from, date_to) having the same object type as the 'date' parameter.
|
||||||
|
'''
|
||||||
|
date_from = type(date)(date.year, date.month, 1)
|
||||||
|
date_to = type(date)(date.year, date.month, calendar.monthrange(date.year, date.month)[1])
|
||||||
|
return date_from, date_to
|
||||||
|
|
||||||
|
|
||||||
|
def get_quarter_number(date):
|
||||||
|
''' Get the number of the quarter on which the 'date' parameter belongs to.
|
||||||
|
|
||||||
|
:param date: A datetime.datetime or datetime.date object.
|
||||||
|
:return: A [1-4] integer.
|
||||||
|
'''
|
||||||
|
return math.ceil(date.month / 3)
|
||||||
|
|
||||||
|
|
||||||
|
def get_quarter(date):
|
||||||
|
''' Compute the quarter dates range on which the 'date' parameter belongs to.
|
||||||
|
|
||||||
|
:param date: A datetime.datetime or datetime.date object.
|
||||||
|
:return: A tuple (date_from, date_to) having the same object type as the 'date' parameter.
|
||||||
|
'''
|
||||||
|
quarter_number = get_quarter_number(date)
|
||||||
|
month_from = ((quarter_number - 1) * 3) + 1
|
||||||
|
date_from = type(date)(date.year, month_from, 1)
|
||||||
|
date_to = (date_from + relativedelta(months=2))
|
||||||
|
date_to = date_to.replace(day=calendar.monthrange(date_to.year, date_to.month)[1])
|
||||||
|
return date_from, date_to
|
||||||
|
|
||||||
|
|
||||||
|
def get_fiscal_year(date, day=31, month=12):
|
||||||
|
''' Compute the fiscal year dates range on which the 'date' parameter belongs to.
|
||||||
|
A fiscal year is the period used by governments for accounting purposes and vary between countries.
|
||||||
|
|
||||||
|
By default, calling this method with only one parameter gives the calendar year because the ending date of the
|
||||||
|
fiscal year is set to the YYYY-12-31.
|
||||||
|
|
||||||
|
:param date: A datetime.datetime or datetime.date object.
|
||||||
|
:param day: The day of month the fiscal year ends.
|
||||||
|
:param month: The month of year the fiscal year ends.
|
||||||
|
:return: A tuple (date_from, date_to) having the same object type as the 'date' parameter.
|
||||||
|
'''
|
||||||
|
max_day = calendar.monthrange(date.year, month)[1]
|
||||||
|
date_to = type(date)(date.year, month, min(day, max_day))
|
||||||
|
if date <= date_to:
|
||||||
|
date_from = date_to - relativedelta(years=1)
|
||||||
|
date_from += relativedelta(days=1)
|
||||||
|
else:
|
||||||
|
date_from = date_to + relativedelta(days=1)
|
||||||
|
max_day = calendar.monthrange(date_to.year + 1, date_to.month)[1]
|
||||||
|
date_to = type(date)(date.year + 1, month, min(day, max_day))
|
||||||
|
return date_from, date_to
|
||||||
|
|
||||||
|
|
||||||
|
def start_of(value, granularity):
|
||||||
|
"""
|
||||||
|
Get start of a time period from a date or a datetime.
|
||||||
|
|
||||||
|
:param value: initial date or datetime.
|
||||||
|
:param granularity: type of period in string, can be year, quarter, month, week, day or hour.
|
||||||
|
:return: a date/datetime object corresponding to the start of the specified period.
|
||||||
|
"""
|
||||||
|
is_datetime = isinstance(value, datetime)
|
||||||
|
if granularity == "year":
|
||||||
|
result = value.replace(month=1, day=1)
|
||||||
|
elif granularity == "quarter":
|
||||||
|
# Q1 = Jan 1st
|
||||||
|
# Q2 = Apr 1st
|
||||||
|
# Q3 = Jul 1st
|
||||||
|
# Q4 = Oct 1st
|
||||||
|
result = get_quarter(value)[0]
|
||||||
|
elif granularity == "month":
|
||||||
|
result = value.replace(day=1)
|
||||||
|
elif granularity == 'week':
|
||||||
|
# `calendar.weekday` uses ISO8601 for start of week reference, this means that
|
||||||
|
# by default MONDAY is the first day of the week and SUNDAY is the last.
|
||||||
|
result = value - relativedelta(days=calendar.weekday(value.year, value.month, value.day))
|
||||||
|
elif granularity == "day":
|
||||||
|
result = value
|
||||||
|
elif granularity == "hour" and is_datetime:
|
||||||
|
return datetime.combine(value, time.min).replace(hour=value.hour)
|
||||||
|
elif is_datetime:
|
||||||
|
raise ValueError(
|
||||||
|
"Granularity must be year, quarter, month, week, day or hour for value %s" % value
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Granularity must be year, quarter, month, week or day for value %s" % value
|
||||||
|
)
|
||||||
|
|
||||||
|
return datetime.combine(result, time.min) if is_datetime else result
|
||||||
|
|
||||||
|
|
||||||
|
def end_of(value, granularity):
|
||||||
|
"""
|
||||||
|
Get end of a time period from a date or a datetime.
|
||||||
|
|
||||||
|
:param value: initial date or datetime.
|
||||||
|
:param granularity: Type of period in string, can be year, quarter, month, week, day or hour.
|
||||||
|
:return: A date/datetime object corresponding to the start of the specified period.
|
||||||
|
"""
|
||||||
|
is_datetime = isinstance(value, datetime)
|
||||||
|
if granularity == "year":
|
||||||
|
result = value.replace(month=12, day=31)
|
||||||
|
elif granularity == "quarter":
|
||||||
|
# Q1 = Mar 31st
|
||||||
|
# Q2 = Jun 30th
|
||||||
|
# Q3 = Sep 30th
|
||||||
|
# Q4 = Dec 31st
|
||||||
|
result = get_quarter(value)[1]
|
||||||
|
elif granularity == "month":
|
||||||
|
result = value + relativedelta(day=1, months=1, days=-1)
|
||||||
|
elif granularity == 'week':
|
||||||
|
# `calendar.weekday` uses ISO8601 for start of week reference, this means that
|
||||||
|
# by default MONDAY is the first day of the week and SUNDAY is the last.
|
||||||
|
result = value + relativedelta(days=6-calendar.weekday(value.year, value.month, value.day))
|
||||||
|
elif granularity == "day":
|
||||||
|
result = value
|
||||||
|
elif granularity == "hour" and is_datetime:
|
||||||
|
return datetime.combine(value, time.max).replace(hour=value.hour)
|
||||||
|
elif is_datetime:
|
||||||
|
raise ValueError(
|
||||||
|
"Granularity must be year, quarter, month, week, day or hour for value %s" % value
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Granularity must be year, quarter, month, week or day for value %s" % value
|
||||||
|
)
|
||||||
|
|
||||||
|
return datetime.combine(result, time.max) if is_datetime else result
|
||||||
|
|
||||||
|
|
||||||
|
def add(value, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Return the sum of ``value`` and a :class:`relativedelta`.
|
||||||
|
|
||||||
|
:param value: initial date or datetime.
|
||||||
|
:param args: positional args to pass directly to :class:`relativedelta`.
|
||||||
|
:param kwargs: keyword args to pass directly to :class:`relativedelta`.
|
||||||
|
:return: the resulting date/datetime.
|
||||||
|
"""
|
||||||
|
return value + relativedelta(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def subtract(value, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Return the difference between ``value`` and a :class:`relativedelta`.
|
||||||
|
|
||||||
|
:param value: initial date or datetime.
|
||||||
|
:param args: positional args to pass directly to :class:`relativedelta`.
|
||||||
|
:param kwargs: keyword args to pass directly to :class:`relativedelta`.
|
||||||
|
:return: the resulting date/datetime.
|
||||||
|
"""
|
||||||
|
return value - relativedelta(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def json_default(obj):
|
||||||
|
"""
|
||||||
|
Properly serializes date and datetime objects.
|
||||||
|
"""
|
||||||
|
from flectra import fields
|
||||||
|
if isinstance(obj, date):
|
||||||
|
if isinstance(obj, datetime):
|
||||||
|
return fields.Datetime.to_string(obj)
|
||||||
|
return fields.Date.to_string(obj)
|
||||||
|
return ustr(obj)
|
||||||
|
|
||||||
|
def date_range(start, end, step=relativedelta(months=1)):
|
||||||
|
"""Date range generator with a step interval.
|
||||||
|
|
||||||
|
:param start datetime: begining date of the range.
|
||||||
|
:param end datetime: ending date of the range.
|
||||||
|
:param step relativedelta: interval of the range.
|
||||||
|
:return: a range of datetime from start to end.
|
||||||
|
:rtype: Iterator[datetime]
|
||||||
|
"""
|
||||||
|
|
||||||
|
are_naive = start.tzinfo is None and end.tzinfo is None
|
||||||
|
are_utc = start.tzinfo == pytz.utc and end.tzinfo == pytz.utc
|
||||||
|
|
||||||
|
# Cases with miscellenous timezone are more complexe because of DST.
|
||||||
|
are_others = start.tzinfo and end.tzinfo and not are_utc
|
||||||
|
|
||||||
|
if are_others:
|
||||||
|
if start.tzinfo.zone != end.tzinfo.zone:
|
||||||
|
raise ValueError("Timezones of start argument and end argument seem inconsistent")
|
||||||
|
|
||||||
|
if not are_naive and not are_utc and not are_others:
|
||||||
|
raise ValueError("Timezones of start argument and end argument mismatch")
|
||||||
|
|
||||||
|
if start > end:
|
||||||
|
raise ValueError("start > end, start date must be before end")
|
||||||
|
|
||||||
|
if start == start + step:
|
||||||
|
raise ValueError("Looks like step is null")
|
||||||
|
|
||||||
|
if start.tzinfo:
|
||||||
|
localize = start.tzinfo.localize
|
||||||
|
else:
|
||||||
|
localize = lambda dt: dt
|
||||||
|
|
||||||
|
dt = start.replace(tzinfo=None)
|
||||||
|
end = end.replace(tzinfo=None)
|
||||||
|
while dt <= end:
|
||||||
|
yield localize(dt)
|
||||||
|
dt = dt + step
|
@ -2,10 +2,11 @@
|
|||||||
# Part of Odoo, Flectra. See LICENSE file for full copyright and licensing details.
|
# Part of Odoo, Flectra. See LICENSE file for full copyright and licensing details.
|
||||||
|
|
||||||
__all__ = ['synchronized', 'lazy_classproperty', 'lazy_property',
|
__all__ = ['synchronized', 'lazy_classproperty', 'lazy_property',
|
||||||
'classproperty', 'conditional']
|
'classproperty', 'conditional', 'lazy']
|
||||||
|
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from inspect import getsourcefile
|
from inspect import getsourcefile
|
||||||
|
from json import JSONEncoder
|
||||||
|
|
||||||
|
|
||||||
class lazy_property(object):
|
class lazy_property(object):
|
||||||
@ -112,3 +113,143 @@ class _ClassProperty(property):
|
|||||||
|
|
||||||
def classproperty(func):
|
def classproperty(func):
|
||||||
return _ClassProperty(classmethod(func))
|
return _ClassProperty(classmethod(func))
|
||||||
|
|
||||||
|
|
||||||
|
class lazy(object):
|
||||||
|
""" A proxy to the (memoized) result of a lazy evaluation::
|
||||||
|
|
||||||
|
foo = lazy(func, arg) # func(arg) is not called yet
|
||||||
|
bar = foo + 1 # eval func(arg) and add 1
|
||||||
|
baz = foo + 2 # use result of func(arg) and add 2
|
||||||
|
|
||||||
|
"""
|
||||||
|
__slots__ = ['_func', '_args', '_kwargs', '_cached_value']
|
||||||
|
|
||||||
|
def __init__(self, func, *args, **kwargs):
|
||||||
|
# bypass own __setattr__
|
||||||
|
object.__setattr__(self, '_func', func)
|
||||||
|
object.__setattr__(self, '_args', args)
|
||||||
|
object.__setattr__(self, '_kwargs', kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _value(self):
|
||||||
|
if self._func is not None:
|
||||||
|
value = self._func(*self._args, **self._kwargs)
|
||||||
|
object.__setattr__(self, '_func', None)
|
||||||
|
object.__setattr__(self, '_args', None)
|
||||||
|
object.__setattr__(self, '_kwargs', None)
|
||||||
|
object.__setattr__(self, '_cached_value', value)
|
||||||
|
return self._cached_value
|
||||||
|
|
||||||
|
def __getattr__(self, name): return getattr(self._value, name)
|
||||||
|
def __setattr__(self, name, value): return setattr(self._value, name, value)
|
||||||
|
def __delattr__(self, name): return delattr(self._value, name)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self._value) if self._func is None else object.__repr__(self)
|
||||||
|
def __str__(self): return str(self._value)
|
||||||
|
def __bytes__(self): return bytes(self._value)
|
||||||
|
def __format__(self, format_spec): return format(self._value, format_spec)
|
||||||
|
|
||||||
|
def __lt__(self, other): return self._value < other
|
||||||
|
def __le__(self, other): return self._value <= other
|
||||||
|
def __eq__(self, other): return self._value == other
|
||||||
|
def __ne__(self, other): return self._value != other
|
||||||
|
def __gt__(self, other): return self._value > other
|
||||||
|
def __ge__(self, other): return self._value >= other
|
||||||
|
|
||||||
|
def __hash__(self): return hash(self._value)
|
||||||
|
def __bool__(self): return bool(self._value)
|
||||||
|
|
||||||
|
def __call__(self, *args, **kwargs): return self._value(*args, **kwargs)
|
||||||
|
|
||||||
|
def __len__(self): return len(self._value)
|
||||||
|
def __getitem__(self, key): return self._value[key]
|
||||||
|
def __missing__(self, key): return self._value.__missing__(key)
|
||||||
|
def __setitem__(self, key, value): self._value[key] = value
|
||||||
|
def __delitem__(self, key): del self._value[key]
|
||||||
|
def __iter__(self): return iter(self._value)
|
||||||
|
def __reversed__(self): return reversed(self._value)
|
||||||
|
def __contains__(self, key): return key in self._value
|
||||||
|
|
||||||
|
def __add__(self, other): return self._value.__add__(other)
|
||||||
|
def __sub__(self, other): return self._value.__sub__(other)
|
||||||
|
def __mul__(self, other): return self._value.__mul__(other)
|
||||||
|
def __matmul__(self, other): return self._value.__matmul__(other)
|
||||||
|
def __truediv__(self, other): return self._value.__truediv__(other)
|
||||||
|
def __floordiv__(self, other): return self._value.__floordiv__(other)
|
||||||
|
def __mod__(self, other): return self._value.__mod__(other)
|
||||||
|
def __divmod__(self, other): return self._value.__divmod__(other)
|
||||||
|
def __pow__(self, other): return self._value.__pow__(other)
|
||||||
|
def __lshift__(self, other): return self._value.__lshift__(other)
|
||||||
|
def __rshift__(self, other): return self._value.__rshift__(other)
|
||||||
|
def __and__(self, other): return self._value.__and__(other)
|
||||||
|
def __xor__(self, other): return self._value.__xor__(other)
|
||||||
|
def __or__(self, other): return self._value.__or__(other)
|
||||||
|
|
||||||
|
def __radd__(self, other): return self._value.__radd__(other)
|
||||||
|
def __rsub__(self, other): return self._value.__rsub__(other)
|
||||||
|
def __rmul__(self, other): return self._value.__rmul__(other)
|
||||||
|
def __rmatmul__(self, other): return self._value.__rmatmul__(other)
|
||||||
|
def __rtruediv__(self, other): return self._value.__rtruediv__(other)
|
||||||
|
def __rfloordiv__(self, other): return self._value.__rfloordiv__(other)
|
||||||
|
def __rmod__(self, other): return self._value.__rmod__(other)
|
||||||
|
def __rdivmod__(self, other): return self._value.__rdivmod__(other)
|
||||||
|
def __rpow__(self, other): return self._value.__rpow__(other)
|
||||||
|
def __rlshift__(self, other): return self._value.__rlshift__(other)
|
||||||
|
def __rrshift__(self, other): return self._value.__rrshift__(other)
|
||||||
|
def __rand__(self, other): return self._value.__rand__(other)
|
||||||
|
def __rxor__(self, other): return self._value.__rxor__(other)
|
||||||
|
def __ror__(self, other): return self._value.__ror__(other)
|
||||||
|
|
||||||
|
def __iadd__(self, other): return self._value.__iadd__(other)
|
||||||
|
def __isub__(self, other): return self._value.__isub__(other)
|
||||||
|
def __imul__(self, other): return self._value.__imul__(other)
|
||||||
|
def __imatmul__(self, other): return self._value.__imatmul__(other)
|
||||||
|
def __itruediv__(self, other): return self._value.__itruediv__(other)
|
||||||
|
def __ifloordiv__(self, other): return self._value.__ifloordiv__(other)
|
||||||
|
def __imod__(self, other): return self._value.__imod__(other)
|
||||||
|
def __ipow__(self, other): return self._value.__ipow__(other)
|
||||||
|
def __ilshift__(self, other): return self._value.__ilshift__(other)
|
||||||
|
def __irshift__(self, other): return self._value.__irshift__(other)
|
||||||
|
def __iand__(self, other): return self._value.__iand__(other)
|
||||||
|
def __ixor__(self, other): return self._value.__ixor__(other)
|
||||||
|
def __ior__(self, other): return self._value.__ior__(other)
|
||||||
|
|
||||||
|
def __neg__(self): return self._value.__neg__()
|
||||||
|
def __pos__(self): return self._value.__pos__()
|
||||||
|
def __abs__(self): return self._value.__abs__()
|
||||||
|
def __invert__(self): return self._value.__invert__()
|
||||||
|
|
||||||
|
def __complex__(self): return complex(self._value)
|
||||||
|
def __int__(self): return int(self._value)
|
||||||
|
def __float__(self): return float(self._value)
|
||||||
|
|
||||||
|
def __index__(self): return self._value.__index__()
|
||||||
|
|
||||||
|
def __round__(self): return self._value.__round__()
|
||||||
|
def __trunc__(self): return self._value.__trunc__()
|
||||||
|
def __floor__(self): return self._value.__floor__()
|
||||||
|
def __ceil__(self): return self._value.__ceil__()
|
||||||
|
|
||||||
|
def __enter__(self): return self._value.__enter__()
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
return self._value.__exit__(exc_type, exc_value, traceback)
|
||||||
|
|
||||||
|
def __await__(self): return self._value.__await__()
|
||||||
|
def __aiter__(self): return self._value.__aiter__()
|
||||||
|
def __anext__(self): return self._value.__anext__()
|
||||||
|
def __aenter__(self): return self._value.__aenter__()
|
||||||
|
def __aexit__(self, exc_type, exc_value, traceback):
|
||||||
|
return self._value.__aexit__(exc_type, exc_value, traceback)
|
||||||
|
|
||||||
|
|
||||||
|
# patch serialization of lazy
|
||||||
|
def default(self, o):
|
||||||
|
if isinstance(o, lazy):
|
||||||
|
return o._value
|
||||||
|
return json_encoder_default(self, o)
|
||||||
|
|
||||||
|
|
||||||
|
json_encoder_default = JSONEncoder.default
|
||||||
|
JSONEncoder.default = default
|
||||||
|
@ -14,11 +14,20 @@ from flectra.tools import pycompat
|
|||||||
Image.preinit()
|
Image.preinit()
|
||||||
Image._initialized = 2
|
Image._initialized = 2
|
||||||
|
|
||||||
|
# Maps only the 6 first bits of the base64 data, accurate enough
|
||||||
|
# for our purpose and faster than decoding the full blob first
|
||||||
|
FILETYPE_BASE64_MAGICWORD = {
|
||||||
|
b'/': 'jpg',
|
||||||
|
b'R': 'gif',
|
||||||
|
b'i': 'png',
|
||||||
|
b'P': 'svg+xml',
|
||||||
|
}
|
||||||
|
|
||||||
# ----------------------------------------
|
# ----------------------------------------
|
||||||
# Image resizing
|
# Image resizing
|
||||||
# ----------------------------------------
|
# ----------------------------------------
|
||||||
|
|
||||||
def image_resize_image(base64_source, size=(1024, 1024), encoding='base64', filetype=None, avoid_if_small=False):
|
def image_resize_image(base64_source, size=(1024, 1024), encoding='base64', filetype=None, avoid_if_small=False, upper_limit=False):
|
||||||
""" Function to resize an image. The image will be resized to the given
|
""" Function to resize an image. The image will be resized to the given
|
||||||
size, while keeping the aspect ratios, and holes in the image will be
|
size, while keeping the aspect ratios, and holes in the image will be
|
||||||
filled with transparent background. The image will not be stretched if
|
filled with transparent background. The image will not be stretched if
|
||||||
@ -41,7 +50,7 @@ def image_resize_image(base64_source, size=(1024, 1024), encoding='base64', file
|
|||||||
:param base64_source: base64-encoded version of the source
|
:param base64_source: base64-encoded version of the source
|
||||||
image; if False, returns False
|
image; if False, returns False
|
||||||
:param size: 2-tuple(width, height). A None value for any of width or
|
:param size: 2-tuple(width, height). A None value for any of width or
|
||||||
height mean an automatically computed value based respectivelly
|
height mean an automatically computed value based respectively
|
||||||
on height or width of the source image.
|
on height or width of the source image.
|
||||||
:param encoding: the output encoding
|
:param encoding: the output encoding
|
||||||
:param filetype: the output filetype, by default the source image's
|
:param filetype: the output filetype, by default the source image's
|
||||||
@ -51,7 +60,10 @@ def image_resize_image(base64_source, size=(1024, 1024), encoding='base64', file
|
|||||||
"""
|
"""
|
||||||
if not base64_source:
|
if not base64_source:
|
||||||
return False
|
return False
|
||||||
if size == (None, None):
|
# Return unmodified content if no resize or we etect first 6 bits of '<'
|
||||||
|
# (0x3C) for SVG documents - This will bypass XML files as well, but it's
|
||||||
|
# harmless for these purposes
|
||||||
|
if size == (None, None) or base64_source[:1] == b'P':
|
||||||
return base64_source
|
return base64_source
|
||||||
image_stream = io.BytesIO(codecs.decode(base64_source, encoding))
|
image_stream = io.BytesIO(codecs.decode(base64_source, encoding))
|
||||||
image = Image.open(image_stream)
|
image = Image.open(image_stream)
|
||||||
@ -63,18 +75,32 @@ def image_resize_image(base64_source, size=(1024, 1024), encoding='base64', file
|
|||||||
}.get(filetype, filetype)
|
}.get(filetype, filetype)
|
||||||
|
|
||||||
asked_width, asked_height = size
|
asked_width, asked_height = size
|
||||||
|
if upper_limit:
|
||||||
|
if asked_width:
|
||||||
|
if asked_width >= image.size[0]:
|
||||||
|
asked_width = image.size[0]
|
||||||
|
if asked_height:
|
||||||
|
if asked_height >= image.size[1]:
|
||||||
|
asked_height = image.size[1]
|
||||||
|
|
||||||
|
if image.size[0] >= image.size[1]:
|
||||||
|
asked_height = None
|
||||||
|
else:
|
||||||
|
asked_width = None
|
||||||
|
if asked_width is None and asked_height is None:
|
||||||
|
return base64_source
|
||||||
|
|
||||||
if asked_width is None:
|
if asked_width is None:
|
||||||
asked_width = int(image.size[0] * (float(asked_height) / image.size[1]))
|
asked_width = int(image.size[0] * (float(asked_height) / image.size[1]))
|
||||||
if asked_height is None:
|
if asked_height is None:
|
||||||
asked_height = int(image.size[1] * (float(asked_width) / image.size[0]))
|
asked_height = int(image.size[1] * (float(asked_width) / image.size[0]))
|
||||||
size = asked_width, asked_height
|
size = asked_width, asked_height
|
||||||
|
|
||||||
# check image size: do not create a thumbnail if avoiding smaller images
|
# check image size: do not create a thumbnail if avoiding smaller images
|
||||||
if avoid_if_small and image.size[0] <= size[0] and image.size[1] <= size[1]:
|
if avoid_if_small and image.size[0] <= size[0] and image.size[1] <= size[1]:
|
||||||
return base64_source
|
return base64_source
|
||||||
|
|
||||||
if image.size != size:
|
if image.size != size:
|
||||||
image = image_resize_and_sharpen(image, size)
|
image = image_resize_and_sharpen(image, size, upper_limit=upper_limit)
|
||||||
if image.mode not in ["1", "L", "P", "RGB", "RGBA"] or (filetype == 'JPEG' and image.mode == 'RGBA'):
|
if image.mode not in ["1", "L", "P", "RGB", "RGBA"] or (filetype == 'JPEG' and image.mode == 'RGBA'):
|
||||||
image = image.convert("RGB")
|
image = image.convert("RGB")
|
||||||
|
|
||||||
@ -82,7 +108,7 @@ def image_resize_image(base64_source, size=(1024, 1024), encoding='base64', file
|
|||||||
image.save(background_stream, filetype)
|
image.save(background_stream, filetype)
|
||||||
return codecs.encode(background_stream.getvalue(), encoding)
|
return codecs.encode(background_stream.getvalue(), encoding)
|
||||||
|
|
||||||
def image_resize_and_sharpen(image, size, preserve_aspect_ratio=False, factor=2.0):
|
def image_resize_and_sharpen(image, size, preserve_aspect_ratio=False, factor=2.0, upper_limit=False):
|
||||||
"""
|
"""
|
||||||
Create a thumbnail by resizing while keeping ratio.
|
Create a thumbnail by resizing while keeping ratio.
|
||||||
A sharpen filter is applied for a better looking result.
|
A sharpen filter is applied for a better looking result.
|
||||||
@ -101,8 +127,12 @@ def image_resize_and_sharpen(image, size, preserve_aspect_ratio=False, factor=2.
|
|||||||
sharpener = ImageEnhance.Sharpness(image)
|
sharpener = ImageEnhance.Sharpness(image)
|
||||||
resized_image = sharpener.enhance(factor)
|
resized_image = sharpener.enhance(factor)
|
||||||
# create a transparent image for background and paste the image on it
|
# create a transparent image for background and paste the image on it
|
||||||
image = Image.new('RGBA', size, (255, 255, 255, 0))
|
if upper_limit:
|
||||||
|
image = Image.new('RGBA', (size[0], size[1]-3), (255, 255, 255, 0)) # FIXME temporary fix for trimming the ghost border.
|
||||||
|
else:
|
||||||
|
image = Image.new('RGBA', size, (255, 255, 255, 0))
|
||||||
image.paste(resized_image, ((size[0] - resized_image.size[0]) // 2, (size[1] - resized_image.size[1]) // 2))
|
image.paste(resized_image, ((size[0] - resized_image.size[0]) // 2, (size[1] - resized_image.size[1]) // 2))
|
||||||
|
|
||||||
if image.mode != origin_mode:
|
if image.mode != origin_mode:
|
||||||
image = image.convert(origin_mode)
|
image = image.convert(origin_mode)
|
||||||
return image
|
return image
|
||||||
@ -159,7 +189,7 @@ def image_resize_image_small(base64_source, size=(64, 64), encoding='base64', fi
|
|||||||
# ----------------------------------------
|
# ----------------------------------------
|
||||||
# Crop Image
|
# Crop Image
|
||||||
# ----------------------------------------
|
# ----------------------------------------
|
||||||
def crop_image(data, type='top', ratio=False, size=None, image_format="PNG"):
|
def crop_image(data, type='top', ratio=False, size=None, image_format=None):
|
||||||
""" Used for cropping image and create thumbnail
|
""" Used for cropping image and create thumbnail
|
||||||
:param data: base64 data of image.
|
:param data: base64 data of image.
|
||||||
:param type: Used for cropping position possible
|
:param type: Used for cropping position possible
|
||||||
@ -188,6 +218,7 @@ def crop_image(data, type='top', ratio=False, size=None, image_format="PNG"):
|
|||||||
new_h = h
|
new_h = h
|
||||||
new_w = (h * w_ratio) // h_ratio
|
new_w = (h * w_ratio) // h_ratio
|
||||||
|
|
||||||
|
image_format = image_format or image_stream.format or 'JPEG'
|
||||||
if type == "top":
|
if type == "top":
|
||||||
cropped_image = image_stream.crop((0, 0, new_w, new_h))
|
cropped_image = image_stream.crop((0, 0, new_w, new_h))
|
||||||
cropped_image.save(output_stream, format=image_format)
|
cropped_image.save(output_stream, format=image_format)
|
||||||
@ -201,6 +232,8 @@ def crop_image(data, type='top', ratio=False, size=None, image_format="PNG"):
|
|||||||
raise ValueError('ERROR: invalid value for crop_type')
|
raise ValueError('ERROR: invalid value for crop_type')
|
||||||
if size:
|
if size:
|
||||||
thumbnail = Image.open(io.BytesIO(output_stream.getvalue()))
|
thumbnail = Image.open(io.BytesIO(output_stream.getvalue()))
|
||||||
|
output_stream.truncate(0)
|
||||||
|
output_stream.seek(0)
|
||||||
thumbnail.thumbnail(size, Image.ANTIALIAS)
|
thumbnail.thumbnail(size, Image.ANTIALIAS)
|
||||||
thumbnail.save(output_stream, image_format)
|
thumbnail.save(output_stream, image_format)
|
||||||
return base64.b64encode(output_stream.getvalue())
|
return base64.b64encode(output_stream.getvalue())
|
||||||
@ -234,7 +267,7 @@ def image_colorize(original, randomize=True, color=(255, 255, 255)):
|
|||||||
|
|
||||||
def image_get_resized_images(base64_source, return_big=False, return_medium=True, return_small=True,
|
def image_get_resized_images(base64_source, return_big=False, return_medium=True, return_small=True,
|
||||||
big_name='image', medium_name='image_medium', small_name='image_small',
|
big_name='image', medium_name='image_medium', small_name='image_small',
|
||||||
avoid_resize_big=True, avoid_resize_medium=False, avoid_resize_small=False):
|
avoid_resize_big=True, avoid_resize_medium=False, avoid_resize_small=False, sizes={}):
|
||||||
""" Standard tool function that returns a dictionary containing the
|
""" Standard tool function that returns a dictionary containing the
|
||||||
big, medium and small versions of the source image. This function
|
big, medium and small versions of the source image. This function
|
||||||
is meant to be used for the methods of functional fields for
|
is meant to be used for the methods of functional fields for
|
||||||
@ -245,7 +278,7 @@ def image_get_resized_images(base64_source, return_big=False, return_medium=True
|
|||||||
only image_medium and image_small values, to update those fields.
|
only image_medium and image_small values, to update those fields.
|
||||||
|
|
||||||
:param base64_source: base64-encoded version of the source
|
:param base64_source: base64-encoded version of the source
|
||||||
image; if False, all returnes values will be False
|
image; if False, all returned values will be False
|
||||||
:param return_{..}: if set, computes and return the related resizing
|
:param return_{..}: if set, computes and return the related resizing
|
||||||
of the image
|
of the image
|
||||||
:param {..}_name: key of the resized image in the return dictionary;
|
:param {..}_name: key of the resized image in the return dictionary;
|
||||||
@ -255,36 +288,48 @@ def image_get_resized_images(base64_source, return_big=False, return_medium=True
|
|||||||
previous parameters.
|
previous parameters.
|
||||||
"""
|
"""
|
||||||
return_dict = dict()
|
return_dict = dict()
|
||||||
|
size_big = sizes.get(big_name, (1024, 1024))
|
||||||
|
size_medium = sizes.get(medium_name, (128, 128))
|
||||||
|
size_small = sizes.get(small_name, (64, 64))
|
||||||
if isinstance(base64_source, pycompat.text_type):
|
if isinstance(base64_source, pycompat.text_type):
|
||||||
base64_source = base64_source.encode('ascii')
|
base64_source = base64_source.encode('ascii')
|
||||||
if return_big:
|
if return_big:
|
||||||
return_dict[big_name] = image_resize_image_big(base64_source, avoid_if_small=avoid_resize_big)
|
return_dict[big_name] = image_resize_image_big(base64_source, avoid_if_small=avoid_resize_big, size=size_big)
|
||||||
if return_medium:
|
if return_medium:
|
||||||
return_dict[medium_name] = image_resize_image_medium(base64_source, avoid_if_small=avoid_resize_medium)
|
return_dict[medium_name] = image_resize_image_medium(base64_source, avoid_if_small=avoid_resize_medium, size=size_medium)
|
||||||
if return_small:
|
if return_small:
|
||||||
return_dict[small_name] = image_resize_image_small(base64_source, avoid_if_small=avoid_resize_small)
|
return_dict[small_name] = image_resize_image_small(base64_source, avoid_if_small=avoid_resize_small, size=size_small)
|
||||||
return return_dict
|
return return_dict
|
||||||
|
|
||||||
def image_resize_images(vals, big_name='image', medium_name='image_medium', small_name='image_small'):
|
def image_resize_images(vals, big_name='image', medium_name='image_medium', small_name='image_small', sizes={}):
|
||||||
""" Update ``vals`` with image fields resized as expected. """
|
""" Update ``vals`` with image fields resized as expected. """
|
||||||
if vals.get(big_name):
|
if vals.get(big_name):
|
||||||
vals.update(image_get_resized_images(vals[big_name],
|
vals.update(image_get_resized_images(vals[big_name],
|
||||||
return_big=True, return_medium=True, return_small=True,
|
return_big=True, return_medium=True, return_small=True,
|
||||||
big_name=big_name, medium_name=medium_name, small_name=small_name,
|
big_name=big_name, medium_name=medium_name, small_name=small_name,
|
||||||
avoid_resize_big=True, avoid_resize_medium=False, avoid_resize_small=False))
|
avoid_resize_big=True, avoid_resize_medium=False, avoid_resize_small=False, sizes=sizes))
|
||||||
elif vals.get(medium_name):
|
elif vals.get(medium_name):
|
||||||
vals.update(image_get_resized_images(vals[medium_name],
|
vals.update(image_get_resized_images(vals[medium_name],
|
||||||
return_big=True, return_medium=True, return_small=True,
|
return_big=True, return_medium=True, return_small=True,
|
||||||
big_name=big_name, medium_name=medium_name, small_name=small_name,
|
big_name=big_name, medium_name=medium_name, small_name=small_name,
|
||||||
avoid_resize_big=True, avoid_resize_medium=True, avoid_resize_small=False))
|
avoid_resize_big=True, avoid_resize_medium=True, avoid_resize_small=False, sizes=sizes))
|
||||||
elif vals.get(small_name):
|
elif vals.get(small_name):
|
||||||
vals.update(image_get_resized_images(vals[small_name],
|
vals.update(image_get_resized_images(vals[small_name],
|
||||||
return_big=True, return_medium=True, return_small=True,
|
return_big=True, return_medium=True, return_small=True,
|
||||||
big_name=big_name, medium_name=medium_name, small_name=small_name,
|
big_name=big_name, medium_name=medium_name, small_name=small_name,
|
||||||
avoid_resize_big=True, avoid_resize_medium=True, avoid_resize_small=True))
|
avoid_resize_big=True, avoid_resize_medium=True, avoid_resize_small=True, sizes=sizes))
|
||||||
elif big_name in vals or medium_name in vals or small_name in vals:
|
elif big_name in vals or medium_name in vals or small_name in vals:
|
||||||
vals[big_name] = vals[medium_name] = vals[small_name] = False
|
vals[big_name] = vals[medium_name] = vals[small_name] = False
|
||||||
|
|
||||||
|
def image_data_uri(base64_source):
|
||||||
|
"""This returns data URL scheme according RFC 2397
|
||||||
|
(https://tools.ietf.org/html/rfc2397) for all kind of supported images
|
||||||
|
(PNG, GIF, JPG and SVG), defaulting on PNG type if not mimetype detected.
|
||||||
|
"""
|
||||||
|
return 'data:image/%s;base64,%s' % (
|
||||||
|
FILETYPE_BASE64_MAGICWORD.get(base64_source[:1], 'png'),
|
||||||
|
base64_source.decode(),
|
||||||
|
)
|
||||||
|
|
||||||
if __name__=="__main__":
|
if __name__=="__main__":
|
||||||
import sys
|
import sys
|
||||||
|
@ -44,13 +44,14 @@ class _Cleaner(clean.Cleaner):
|
|||||||
|
|
||||||
_style_whitelist = [
|
_style_whitelist = [
|
||||||
'font-size', 'font-family', 'font-weight', 'background-color', 'color', 'text-align',
|
'font-size', 'font-family', 'font-weight', 'background-color', 'color', 'text-align',
|
||||||
'line-height', 'letter-spacing', 'text-transform', 'text-decoration',
|
'line-height', 'letter-spacing', 'text-transform', 'text-decoration', 'opacity',
|
||||||
'float', 'vertical-align',
|
'float', 'vertical-align', 'display',
|
||||||
'padding', 'padding-top', 'padding-left', 'padding-bottom', 'padding-right',
|
'padding', 'padding-top', 'padding-left', 'padding-bottom', 'padding-right',
|
||||||
'margin', 'margin-top', 'margin-left', 'margin-bottom', 'margin-right',
|
'margin', 'margin-top', 'margin-left', 'margin-bottom', 'margin-right',
|
||||||
|
'white-space',
|
||||||
# box model
|
# box model
|
||||||
'border', 'border-color', 'border-radius', 'border-style', 'border-width',
|
'border', 'border-color', 'border-radius', 'border-style', 'border-width', 'border-top',
|
||||||
'height', 'margin', 'padding', 'width', 'max-width', 'min-width',
|
'height', 'width', 'max-width', 'min-width', 'min-height',
|
||||||
# tables
|
# tables
|
||||||
'border-collapse', 'border-spacing', 'caption-side', 'empty-cells', 'table-layout']
|
'border-collapse', 'border-spacing', 'caption-side', 'empty-cells', 'table-layout']
|
||||||
|
|
||||||
@ -416,9 +417,6 @@ email_re = re.compile(r"""([a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,63})""",
|
|||||||
# matches a string containing only one email
|
# matches a string containing only one email
|
||||||
single_email_re = re.compile(r"""^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,63}$""", re.VERBOSE)
|
single_email_re = re.compile(r"""^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,63}$""", re.VERBOSE)
|
||||||
|
|
||||||
# update command in emails body
|
|
||||||
command_re = re.compile("^Set-([a-z]+) *: *(.+)$", re.I + re.UNICODE)
|
|
||||||
|
|
||||||
# Updated in 7.0 to match the model name as well
|
# Updated in 7.0 to match the model name as well
|
||||||
# Typical form of references is <timestamp-flectra-record_id-model_name@domain>
|
# Typical form of references is <timestamp-flectra-record_id-model_name@domain>
|
||||||
# group(1) = the record ID ; group(2) = the model (if any) ; group(3) = the domain
|
# group(1) = the record ID ; group(2) = the model (if any) ; group(3) = the domain
|
||||||
@ -504,6 +502,10 @@ def email_split_and_format(text):
|
|||||||
if addr[1]
|
if addr[1]
|
||||||
if '@' in addr[1]]
|
if '@' in addr[1]]
|
||||||
|
|
||||||
|
def email_escape_char(email_address):
|
||||||
|
""" Escape problematic characters in the given email address string"""
|
||||||
|
return email_address.replace('\\', '\\\\').replace('%', '\\%').replace('_', '\\_')
|
||||||
|
|
||||||
def email_references(references):
|
def email_references(references):
|
||||||
ref_match, model, thread_id, hostname, is_private = False, False, False, False, False
|
ref_match, model, thread_id, hostname, is_private = False, False, False, False, False
|
||||||
if references:
|
if references:
|
||||||
|
@ -102,6 +102,13 @@ def _check_olecf(data):
|
|||||||
return 'application/vnd.ms-powerpoint'
|
return 'application/vnd.ms-powerpoint'
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _check_svg(data):
|
||||||
|
"""This simply checks the existence of the opening and ending SVG tags"""
|
||||||
|
if b'<svg' in data and b'/svg>' in data:
|
||||||
|
return 'image/svg+xml'
|
||||||
|
|
||||||
|
|
||||||
# for "master" formats with many subformats, discriminants is a list of
|
# for "master" formats with many subformats, discriminants is a list of
|
||||||
# functions, tried in order and the first non-falsy value returned is the
|
# functions, tried in order and the first non-falsy value returned is the
|
||||||
# selected mime type. If all functions return falsy values, the master
|
# selected mime type. If all functions return falsy values, the master
|
||||||
@ -115,6 +122,9 @@ _mime_mappings = (
|
|||||||
_Entry('image/png', [b'\x89PNG\r\n\x1A\n'], []),
|
_Entry('image/png', [b'\x89PNG\r\n\x1A\n'], []),
|
||||||
_Entry('image/gif', [b'GIF87a', b'GIF89a'], []),
|
_Entry('image/gif', [b'GIF87a', b'GIF89a'], []),
|
||||||
_Entry('image/bmp', [b'BM'], []),
|
_Entry('image/bmp', [b'BM'], []),
|
||||||
|
_Entry('image/svg+xml', [b'<'], [
|
||||||
|
_check_svg,
|
||||||
|
]),
|
||||||
# OLECF files in general (Word, Excel, PPT, default to word because why not?)
|
# OLECF files in general (Word, Excel, PPT, default to word because why not?)
|
||||||
_Entry('application/msword', [b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1', b'\x0D\x44\x4F\x43'], [
|
_Entry('application/msword', [b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1', b'\x0D\x44\x4F\x43'], [
|
||||||
_check_olecf
|
_check_olecf
|
||||||
|
@ -22,10 +22,11 @@ import sys
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import types
|
import types
|
||||||
|
import unicodedata
|
||||||
import werkzeug.utils
|
import werkzeug.utils
|
||||||
import zipfile
|
import zipfile
|
||||||
from collections import defaultdict, Iterable, Mapping, MutableSet, OrderedDict
|
from collections import defaultdict, Iterable, Mapping, MutableMapping, MutableSet, OrderedDict
|
||||||
from itertools import islice, groupby, repeat
|
from itertools import islice, groupby as itergroupby, repeat
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
|
|
||||||
from .which import which
|
from .which import which
|
||||||
@ -252,7 +253,7 @@ def _fileopen(path, mode, basedir, pathinfo, basename=None):
|
|||||||
pass
|
pass
|
||||||
# Not found
|
# Not found
|
||||||
if name.endswith('.rml'):
|
if name.endswith('.rml'):
|
||||||
raise IOError('Report %r doesn\'t exist or deleted' % basename)
|
raise IOError('Report %r does not exist or has been deleted' % basename)
|
||||||
raise IOError('File not found: %s' % basename)
|
raise IOError('File not found: %s' % basename)
|
||||||
|
|
||||||
|
|
||||||
@ -260,7 +261,7 @@ def _fileopen(path, mode, basedir, pathinfo, basename=None):
|
|||||||
# iterables
|
# iterables
|
||||||
#----------------------------------------------------------
|
#----------------------------------------------------------
|
||||||
def flatten(list):
|
def flatten(list):
|
||||||
"""Flatten a list of elements into a uniqu list
|
"""Flatten a list of elements into a unique list
|
||||||
Author: Christophe Simonis (christophe@tinyerp.com)
|
Author: Christophe Simonis (christophe@tinyerp.com)
|
||||||
|
|
||||||
Examples::
|
Examples::
|
||||||
@ -349,7 +350,7 @@ def topological_sort(elems):
|
|||||||
try:
|
try:
|
||||||
import xlwt
|
import xlwt
|
||||||
|
|
||||||
# add some sanitizations to respect the excel sheet name restrictions
|
# add some sanitization to respect the excel sheet name restrictions
|
||||||
# as the sheet name is often translatable, can not control the input
|
# as the sheet name is often translatable, can not control the input
|
||||||
class PatchedWorkbook(xlwt.Workbook):
|
class PatchedWorkbook(xlwt.Workbook):
|
||||||
def add_sheet(self, name, cell_overwrite_ok=False):
|
def add_sheet(self, name, cell_overwrite_ok=False):
|
||||||
@ -368,7 +369,7 @@ except ImportError:
|
|||||||
try:
|
try:
|
||||||
import xlsxwriter
|
import xlsxwriter
|
||||||
|
|
||||||
# add some sanitizations to respect the excel sheet name restrictions
|
# add some sanitization to respect the excel sheet name restrictions
|
||||||
# as the sheet name is often translatable, can not control the input
|
# as the sheet name is often translatable, can not control the input
|
||||||
class PatchedXlsxWorkbook(xlsxwriter.Workbook):
|
class PatchedXlsxWorkbook(xlsxwriter.Workbook):
|
||||||
|
|
||||||
@ -732,6 +733,19 @@ def attrgetter(*items):
|
|||||||
return tuple(resolve_attr(obj, attr) for attr in items)
|
return tuple(resolve_attr(obj, attr) for attr in items)
|
||||||
return g
|
return g
|
||||||
|
|
||||||
|
# ---------------------------------------------
|
||||||
|
# String management
|
||||||
|
# ---------------------------------------------
|
||||||
|
|
||||||
|
# Inspired by http://stackoverflow.com/questions/517923
|
||||||
|
def remove_accents(input_str):
|
||||||
|
"""Suboptimal-but-better-than-nothing way to replace accented
|
||||||
|
latin letters by an ASCII equivalent. Will obviously change the
|
||||||
|
meaning of input_str and work only for some cases"""
|
||||||
|
input_str = ustr(input_str)
|
||||||
|
nkfd_form = unicodedata.normalize('NFKD', input_str)
|
||||||
|
return u''.join([c for c in nkfd_form if not unicodedata.combining(c)])
|
||||||
|
|
||||||
class unquote(str):
|
class unquote(str):
|
||||||
"""A subclass of str that implements repr() without enclosing quotation marks
|
"""A subclass of str that implements repr() without enclosing quotation marks
|
||||||
or escaping, keeping the original string untouched. The name come from Lisp's unquote.
|
or escaping, keeping the original string untouched. The name come from Lisp's unquote.
|
||||||
@ -855,7 +869,7 @@ def stripped_sys_argv(*strip_args):
|
|||||||
assert all(config.parser.has_option(s) for s in strip_args)
|
assert all(config.parser.has_option(s) for s in strip_args)
|
||||||
takes_value = dict((s, config.parser.get_option(s).takes_value()) for s in strip_args)
|
takes_value = dict((s, config.parser.get_option(s).takes_value()) for s in strip_args)
|
||||||
|
|
||||||
longs, shorts = list(tuple(y) for _, y in groupby(strip_args, lambda x: x.startswith('--')))
|
longs, shorts = list(tuple(y) for _, y in itergroupby(strip_args, lambda x: x.startswith('--')))
|
||||||
longs_eq = tuple(l + '=' for l in longs if takes_value[l])
|
longs_eq = tuple(l + '=' for l in longs if takes_value[l])
|
||||||
|
|
||||||
args = sys.argv[:]
|
args = sys.argv[:]
|
||||||
@ -886,7 +900,7 @@ class ConstantMapping(Mapping):
|
|||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
"""
|
"""
|
||||||
same as len, defaultdict udpates its iterable keyset with each key
|
same as len, defaultdict updates its iterable keyset with each key
|
||||||
requested, is there a point for this?
|
requested, is there a point for this?
|
||||||
"""
|
"""
|
||||||
return iter([])
|
return iter([])
|
||||||
@ -946,6 +960,10 @@ def freehash(arg):
|
|||||||
else:
|
else:
|
||||||
return id(arg)
|
return id(arg)
|
||||||
|
|
||||||
|
def clean_context(context):
|
||||||
|
""" This function take a dictionary and remove each entry with its key starting with 'default_' """
|
||||||
|
return {k: v for k, v in context.items() if not k.startswith('default_')}
|
||||||
|
|
||||||
class frozendict(dict):
|
class frozendict(dict):
|
||||||
""" An implementation of an immutable dictionary. """
|
""" An implementation of an immutable dictionary. """
|
||||||
def __delitem__(self, key):
|
def __delitem__(self, key):
|
||||||
@ -983,6 +1001,49 @@ class Collector(Mapping):
|
|||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self._map)
|
return len(self._map)
|
||||||
|
|
||||||
|
|
||||||
|
@pycompat.implements_to_string
|
||||||
|
class StackMap(MutableMapping):
|
||||||
|
""" A stack of mappings behaving as a single mapping, and used to implement
|
||||||
|
nested scopes. The lookups search the stack from top to bottom, and
|
||||||
|
returns the first value found. Mutable operations modify the topmost
|
||||||
|
mapping only.
|
||||||
|
"""
|
||||||
|
__slots__ = ['_maps']
|
||||||
|
|
||||||
|
def __init__(self, m=None):
|
||||||
|
self._maps = [] if m is None else [m]
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
for mapping in reversed(self._maps):
|
||||||
|
try:
|
||||||
|
return mapping[key]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
raise KeyError(key)
|
||||||
|
|
||||||
|
def __setitem__(self, key, val):
|
||||||
|
self._maps[-1][key] = val
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
del self._maps[-1][key]
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter({key for mapping in self._maps for key in mapping})
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return sum(1 for key in self)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return u"<StackMap %s>" % self._maps
|
||||||
|
|
||||||
|
def pushmap(self, m=None):
|
||||||
|
self._maps.append({} if m is None else m)
|
||||||
|
|
||||||
|
def popmap(self):
|
||||||
|
return self._maps.pop()
|
||||||
|
|
||||||
|
|
||||||
class OrderedSet(MutableSet):
|
class OrderedSet(MutableSet):
|
||||||
""" A set collection that remembers the elements first insertion order. """
|
""" A set collection that remembers the elements first insertion order. """
|
||||||
__slots__ = ['_map']
|
__slots__ = ['_map']
|
||||||
@ -1005,6 +1066,19 @@ class LastOrderedSet(OrderedSet):
|
|||||||
OrderedSet.discard(self, elem)
|
OrderedSet.discard(self, elem)
|
||||||
OrderedSet.add(self, elem)
|
OrderedSet.add(self, elem)
|
||||||
|
|
||||||
|
def groupby(iterable, key=None):
|
||||||
|
""" Return a collection of pairs ``(key, elements)`` from ``iterable``. The
|
||||||
|
``key`` is a function computing a key value for each element. This
|
||||||
|
function is similar to ``itertools.groupby``, but aggregates all
|
||||||
|
elements under the same key, not only consecutive elements.
|
||||||
|
"""
|
||||||
|
if key is None:
|
||||||
|
key = lambda arg: arg
|
||||||
|
groups = defaultdict(list)
|
||||||
|
for elem in iterable:
|
||||||
|
groups[key(elem)].append(elem)
|
||||||
|
return groups.items()
|
||||||
|
|
||||||
def unique(it):
|
def unique(it):
|
||||||
""" "Uniquifier" for the provided iterable: will output each element of
|
""" "Uniquifier" for the provided iterable: will output each element of
|
||||||
the iterable once.
|
the iterable once.
|
||||||
@ -1151,3 +1225,23 @@ pickle.load = _pickle_load
|
|||||||
pickle.loads = lambda text, encoding='ASCII': _pickle_load(io.BytesIO(text), encoding=encoding)
|
pickle.loads = lambda text, encoding='ASCII': _pickle_load(io.BytesIO(text), encoding=encoding)
|
||||||
pickle.dump = pickle_.dump
|
pickle.dump = pickle_.dump
|
||||||
pickle.dumps = pickle_.dumps
|
pickle.dumps = pickle_.dumps
|
||||||
|
|
||||||
|
def wrap_module(module, attr_list):
|
||||||
|
"""Helper for wrapping a package/module to expose selected attributes
|
||||||
|
|
||||||
|
:param Module module: the actual package/module to wrap, as returned by ``import <module>``
|
||||||
|
:param iterable attr_list: a global list of attributes to expose, usually the top-level
|
||||||
|
attributes and their own main attributes. No support for hiding attributes in case
|
||||||
|
of name collision at different levels.
|
||||||
|
"""
|
||||||
|
attr_list = set(attr_list)
|
||||||
|
class WrappedModule(object):
|
||||||
|
def __getattr__(self, attrib):
|
||||||
|
if attrib in attr_list:
|
||||||
|
target = getattr(module, attrib)
|
||||||
|
if isinstance(target, types.ModuleType):
|
||||||
|
return wrap_module(target, attr_list)
|
||||||
|
return target
|
||||||
|
raise AttributeError(attrib)
|
||||||
|
# module and attr_list are in the closure
|
||||||
|
return WrappedModule()
|
||||||
|
@ -33,7 +33,7 @@ def listdir(dir, recursive=False):
|
|||||||
def walksymlinks(top, topdown=True, onerror=None):
|
def walksymlinks(top, topdown=True, onerror=None):
|
||||||
"""
|
"""
|
||||||
same as os.walk but follow symlinks
|
same as os.walk but follow symlinks
|
||||||
attention: all symlinks are walked before all normals directories
|
attention: all symlinks are walked before all normal directories
|
||||||
"""
|
"""
|
||||||
for dirpath, dirnames, filenames in os.walk(top, topdown, onerror):
|
for dirpath, dirnames, filenames in os.walk(top, topdown, onerror):
|
||||||
if topdown:
|
if topdown:
|
||||||
|
@ -43,7 +43,7 @@ def parse_version(s):
|
|||||||
The algorithm assumes that strings like "-" and any alpha string that
|
The algorithm assumes that strings like "-" and any alpha string that
|
||||||
alphabetically follows "final" represents a "patch level". So, "2.4-1"
|
alphabetically follows "final" represents a "patch level". So, "2.4-1"
|
||||||
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
|
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
|
||||||
considered newer than "2.4-1", whic in turn is newer than "2.4".
|
considered newer than "2.4-1", which in turn is newer than "2.4".
|
||||||
|
|
||||||
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
|
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
|
||||||
come before "final" alphabetically) are assumed to be pre-release versions,
|
come before "final" alphabetically) are assumed to be pre-release versions,
|
||||||
|
@ -107,8 +107,8 @@ _SAFE_OPCODES = _EXPR_OPCODES.union(set(opmap[x] for x in [
|
|||||||
'JUMP_FORWARD', 'JUMP_IF_TRUE', 'JUMP_IF_FALSE', 'JUMP_ABSOLUTE',
|
'JUMP_FORWARD', 'JUMP_IF_TRUE', 'JUMP_IF_FALSE', 'JUMP_ABSOLUTE',
|
||||||
# New in Python 2.7 - http://bugs.python.org/issue4715 :
|
# New in Python 2.7 - http://bugs.python.org/issue4715 :
|
||||||
'JUMP_IF_FALSE_OR_POP', 'JUMP_IF_TRUE_OR_POP', 'POP_JUMP_IF_FALSE',
|
'JUMP_IF_FALSE_OR_POP', 'JUMP_IF_TRUE_OR_POP', 'POP_JUMP_IF_FALSE',
|
||||||
'POP_JUMP_IF_TRUE', 'SETUP_EXCEPT', 'END_FINALLY', 'RAISE_VARARGS',
|
'POP_JUMP_IF_TRUE', 'SETUP_EXCEPT', 'SETUP_FINALLY', 'END_FINALLY',
|
||||||
'LOAD_NAME', 'STORE_NAME', 'DELETE_NAME', 'LOAD_ATTR',
|
'RAISE_VARARGS', 'LOAD_NAME', 'STORE_NAME', 'DELETE_NAME', 'LOAD_ATTR',
|
||||||
'LOAD_FAST', 'STORE_FAST', 'DELETE_FAST', 'UNPACK_SEQUENCE',
|
'LOAD_FAST', 'STORE_FAST', 'DELETE_FAST', 'UNPACK_SEQUENCE',
|
||||||
'LOAD_GLOBAL', # Only allows access to restricted globals
|
'LOAD_GLOBAL', # Only allows access to restricted globals
|
||||||
] if x in opmap))
|
] if x in opmap))
|
||||||
|
Loading…
Reference in New Issue
Block a user