[ADD] upstream patching

This commit is contained in:
Parthiv 2018-07-19 16:22:28 +05:30
parent 67a50b29b4
commit ec59f69127
16 changed files with 114 additions and 33 deletions

View File

@ -10,7 +10,7 @@
'description':
"""
Flectra dashboard
==============
==================
* Quick access to install apps
* Quick users add
* Access all planners at one place

3
debian/postrm vendored
View File

@ -10,9 +10,6 @@ case "${1}" in
remove)
deluser --quiet --system $FLECTRA_USER || true
delgroup --quiet --system --only-if-empty $FLECTRA_GROUP || true
if [ -d "$FLECTRA_LIB_DIR" ]; then
rm -rf $FLECTRA_LIB_DIR
fi
;;
purge)

View File

@ -117,11 +117,23 @@ def autodirective_bound(app, modules):
# strip 'js:auto'
objname = self.arguments[0].strip()
if not modules:
read_js(app, modules)
path = self.env.temp_data.get('autojs:prefix', []) + [objname]
item = modules[path[0]]
# build complete path to object
path = self.env.temp_data.get('autojs:prefix', []) + objname.split('.')
# look for module/object split
for i in range(1, len(path)):
modname, objpath = '.'.join(path[:-i]), path[-i:]
module = modules.get(modname)
if module:
break
else:
raise Exception("Found no valid module in " + '.'.join(path))
item = module
# deref' namespaces until we reach the object we're looking for
for k in path[1:]:
for k in objpath:
item = item.get_property(k)
docclass = documenters[self.name]
@ -210,8 +222,12 @@ class NSDocumenter(Documenter):
def make_content(self, all_members):
doc = self.item
ret = nodes.section()
if doc.doc:
self.directive.state.nested_parse(to_list(doc.doc), 0, ret)
self.directive.state.nested_parse(self.directive.content, 0, ret)
ret += self.document_properties(all_members)
return ret.children
@ -307,12 +323,12 @@ class ModuleDocumenter(NSDocumenter):
with addto(fields, nodes.field()) as field:
self.make_dependencies(field, doc)
self.directive.state.nested_parse(self.directive.content, 0, content)
if doc.doc:
# FIXME: source offset
self.directive.state.nested_parse(to_list(doc.doc, source=doc['sourcefile']), 0, content)
self.directive.state.nested_parse(self.directive.content, 0, content)
content += self.document_properties(all_members)
return content
@ -395,6 +411,8 @@ class ClassDocumenter(NSDocumenter):
if doc.doc:
self.directive.state.nested_parse(to_list(doc.doc), 0, ret)
self.directive.state.nested_parse(self.directive.content, 0, ret)
ret += self.document_properties(all_members)
ret += self.document_subtypes(subtypes)
@ -466,9 +484,12 @@ class InstanceDocumenter(Documenter):
def make_content(self, all_members):
ret = nodes.section()
if self.item.doc:
self.directive.state.nested_parse(to_list(self.item.doc), 0, ret)
return ret.children
self.directive.state.nested_parse(self.directive.content, 0, ret)
return ret.children
class FunctionDocumenter(Documenter):
@ -487,9 +508,12 @@ class FunctionDocumenter(Documenter):
def make_content(self, all_members):
ret = nodes.section()
doc = self.item
if doc.doc:
self.directive.state.nested_parse(to_list(doc.doc), 0, ret)
self.directive.state.nested_parse(self.directive.content, 0, ret)
check_parameters(self, doc)
params, subtypes = extract_subtypes(self.item.name, self.item)
@ -677,6 +701,9 @@ class PropertyDocumenter(Documenter):
def make_content(self, all_members):
doc = self.item
ret = nodes.section()
self.directive.state.nested_parse(self.directive.content, 0, ret)
if doc.doc:
self.directive.state.nested_parse(to_list(doc.doc), 0, ret)
return ret.children

View File

@ -35,6 +35,16 @@ def test_parser():
types.Literal('null'),
])])
])
assert types.parse('Function<Array<Object[]>>') == types.Alt([
types.Type('Function', [types.Alt([
types.Type('Array', [types.Alt([
types.Type('Array', [
types.Type('Object', [])
])
])])
])])
])
def test_tokens():
toks = list(types.tokenize('A'))
@ -64,6 +74,19 @@ def test_tokens():
(types.OP, '>')
]
toks = list(types.tokenize('Function<Array<Object[]>>'))
assert toks == [
(types.NAME, 'Function'),
(types.OP, '<'),
(types.NAME, 'Array'),
(types.OP, '<'),
(types.NAME, 'Object'),
(types.OP, '['),
(types.OP, ']'),
(types.OP, '>'),
(types.OP, '>')
]
def test_peekable():
p = types.Peekable(range(5))

View File

@ -29,6 +29,9 @@ def tokenize(typespec):
elif toktype == token.OP:
if string in '|<>[].,':
yield (OP, string)
elif string == '>>':
yield (OP, '>')
yield (OP, '>')
elif string == '*': # maybe?
yield (NAME, 'any')
elif string in '()':

View File

@ -4,6 +4,7 @@
import logging
from flectra import api, models
from flectra.exceptions import AccessDenied
_logger = logging.getLogger(__name__)
@ -36,6 +37,8 @@ class AutoVacuum(models.AbstractModel):
@api.model
def power_on(self, *args, **kwargs):
if not self.env.user._is_admin():
raise AccessDenied()
self.env['ir.attachment']._file_gc()
self._gc_transient_models()
self._gc_user_logs()

View File

@ -101,9 +101,12 @@ class AssetsBundle(object):
elif f['atype'] == 'text/javascript':
self.javascripts.append(JavascriptAsset(self, url=f['url'], filename=f['filename'], inline=f['content']))
# depreciated and will remove after v1
def to_html(self, sep=None, css=True, js=True, debug=False, async=False, url_for=(lambda url: url)):
nodes = self.to_node(css=css, js=js, debug=debug, async=async)
# depreciated and will remove after v11
def to_html(self, sep=None, css=True, js=True, debug=False, async_load=False, url_for=(lambda url: url), **kw):
if 'async' in kw:
_logger.warning("Using deprecated argument 'async' in to_html call, use 'async_load' instead.")
async_load = kw['async']
nodes = self.to_node(css=css, js=js, debug=debug, async_load=async_load)
if sep is None:
sep = u'\n '
@ -121,10 +124,13 @@ class AssetsBundle(object):
return sep + sep.join(response)
def to_node(self, css=True, js=True, debug=False, async=False):
def to_node(self, css=True, js=True, debug=False, async_load=False, **kw):
"""
:returns [(tagName, attributes, content)] if the tag is auto close
"""
if 'async' in kw:
_logger.warning("Using deprecated argument 'async' in to_node call, use 'async_load' instead.")
async_load = kw['async']
response = []
if debug == 'assets':
if css and self.stylesheets:
@ -157,7 +163,7 @@ class AssetsBundle(object):
response.append(JavascriptAsset(self, inline=self.dialog_message(msg)).to_node())
if js and self.javascripts:
attr = OrderedDict([
["async", "async" if async else None],
["async", "async" if async_load else None],
["type", "text/javascript"],
["src", self.js().url],
])

View File

@ -278,24 +278,28 @@ class IrQWeb(models.AbstractModel, QWeb):
# compatibility to remove after v11 - DEPRECATED
@tools.conditional(
'xml' not in tools.config['dev_mode'],
tools.ormcache_context('xmlid', 'options.get("lang", "en_US")', 'css', 'js', 'debug', 'async', keys=("website_id",)),
tools.ormcache_context('xmlid', 'options.get("lang", "en_US")', 'css', 'js', 'debug', 'kw.get("async")', 'async_load', keys=("website_id",)),
)
def _get_asset(self, xmlid, options, css=True, js=True, debug=False, async=False, values=None):
def _get_asset(self, xmlid, options, css=True, js=True, debug=False, async_load=False, values=None, **kw):
if 'async' in kw:
async_load = kw['async']
files, remains = self._get_asset_content(xmlid, options)
asset = self.get_asset_bundle(xmlid, files, remains, env=self.env)
return asset.to_html(css=css, js=js, debug=debug, async=async, url_for=(values or {}).get('url_for', lambda url: url))
return asset.to_html(css=css, js=js, debug=debug, async_load=async_load, url_for=(values or {}).get('url_for', lambda url: url))
@tools.conditional(
# in non-xml-debug mode we want assets to be cached forever, and the admin can force a cache clear
# by restarting the server after updating the source code (or using the "Clear server cache" in debug tools)
'xml' not in tools.config['dev_mode'],
tools.ormcache_context('xmlid', 'options.get("lang", "en_US")', 'css', 'js', 'debug', 'async', keys=("website_id",)),
tools.ormcache_context('xmlid', 'options.get("lang", "en_US")', 'css', 'js', 'debug', 'kw.get("async")', 'async_load', keys=("website_id",)),
)
def _get_asset_nodes(self, xmlid, options, css=True, js=True, debug=False, async=False, values=None):
def _get_asset_nodes(self, xmlid, options, css=True, js=True, debug=False, async_load=False, values=None, **kw):
if 'async' in kw:
async_load = kw['async']
files, remains = self._get_asset_content(xmlid, options)
asset = self.get_asset_bundle(xmlid, files, env=self.env)
remains = [node for node in remains if (css and node[0] == 'link') or (js and node[0] != 'link')]
return remains + asset.to_node(css=css, js=js, debug=debug, async=async)
return remains + asset.to_node(css=css, js=js, debug=debug, async_load=async_load)
@tools.ormcache_context('xmlid', 'options.get("lang", "en_US")', keys=("website_id",))
def _get_asset_content(self, xmlid, options):

View File

@ -477,6 +477,7 @@ class IrTranslation(models.Model):
discarded += trans
else:
trans.write({'src': matches[0], 'state': trans.state})
trans_src.append(matches[0]) # avoid reuse of term
else:
outdated += trans

View File

@ -157,6 +157,7 @@
<record model="ir.module.category" id="module_category_theme">
<field name="name">Theme</field>
<field name="exclusive" eval="1"/>
<field name="sequence">50</field>
</record>

View File

@ -324,7 +324,9 @@ class Partner(models.Model):
@api.multi
def copy(self, default=None):
self.ensure_one()
default = dict(default or {}, name=_('%s (copy)') % self.name)
chosen_name = default.get('name') if default else ''
new_name = chosen_name or _('%s (copy)') % self.name
default = dict(default or {}, name=new_name)
return super(Partner, self).copy(default)
@api.onchange('parent_id')

View File

@ -145,7 +145,9 @@ class Groups(models.Model):
@api.multi
def copy(self, default=None):
self.ensure_one()
default = dict(default or {}, name=_('%s (copy)') % self.name)
chosen_name = default.get('name') if default else ''
default_name = chosen_name or _('%s (copy)') % self.name
default = dict(default or {}, name=default_name)
return super(Groups, self).copy(default)
@api.multi

View File

@ -123,7 +123,17 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
continue
_logger.debug('loading module %s (%d/%d)', module_name, index, module_count)
migrations.migrate_module(package, 'pre')
needs_update = (
hasattr(package, "init")
or hasattr(package, "update")
or package.state in ("to install", "to upgrade")
)
if needs_update:
if package.name != 'base':
registry.setup_models(cr)
migrations.migrate_module(package, 'pre')
load_openerp_module(package.name)
new_install = package.state == 'to install'
@ -136,8 +146,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
model_names = registry.load(cr, package)
loaded_modules.append(package.name)
if (hasattr(package, 'init') or hasattr(package, 'update')
or package.state in ('to install', 'to upgrade')):
if needs_update:
models_updated |= set(model_names)
models_to_check -= set(model_names)
registry.setup_models(cr)
@ -157,7 +166,7 @@ def load_module_graph(cr, graph, status=None, perform_checks=True,
if hasattr(package, 'init') or package.state == 'to install':
mode = 'init'
if hasattr(package, 'init') or hasattr(package, 'update') or package.state in ('to install', 'to upgrade'):
if needs_update:
env = api.Environment(cr, SUPERUSER_ID, {})
# Can't put this line out of the loop: ir.module.module will be
# registered by init_models() above.

View File

@ -130,7 +130,7 @@ class Registry(Mapping):
self.loaded = False # whether all modules are loaded
self.ready = False # whether everything is set up
# Inter-process signaling (used only when flectra.multi_process is True):
# Inter-process signaling:
# The `base_registry_signaling` sequence indicates the whole registry
# must be reloaded.
# The `base_cache_signaling sequence` indicates all caches must be
@ -358,7 +358,7 @@ class Registry(Mapping):
def setup_signaling(self):
""" Setup the inter-process signaling on this registry. """
if not flectra.multi_process:
if self.in_test_mode():
return
with self.cursor() as cr:
@ -384,7 +384,7 @@ class Registry(Mapping):
""" Check whether the registry has changed, and performs all necessary
operations to update the registry. Return an up-to-date registry.
"""
if not flectra.multi_process:
if self.in_test_mode():
return self
with closing(self.cursor()) as cr:
@ -410,7 +410,7 @@ class Registry(Mapping):
def signal_changes(self):
""" Notifies other processes if registry or cache has been invalidated. """
if flectra.multi_process and self.registry_invalidated:
if self.registry_invalidated and not self.in_test_mode():
_logger.info("Registry changed, signaling through the database")
with closing(self.cursor()) as cr:
cr.execute("select nextval('base_registry_signaling')")
@ -418,7 +418,7 @@ class Registry(Mapping):
# no need to notify cache invalidation in case of registry invalidation,
# because reloading the registry implies starting with an empty cache
elif flectra.multi_process and self.cache_invalidated:
elif self.cache_invalidated and not self.in_test_mode():
_logger.info("At least one model cache has been invalidated, signaling through the database.")
with closing(self.cursor()) as cr:
cr.execute("select nextval('base_cache_signaling')")

View File

@ -18,7 +18,8 @@ def compute_session_token(session, env):
def check_session(session, env):
self = env['res.users'].browse(session.uid)
if flectra.tools.misc.consteq(self._compute_session_token(session.sid), session.session_token):
expected = self._compute_session_token(session.sid)
if expected and flectra.tools.misc.consteq(expected, session.session_token):
return True
self._invalidate_session_cache()
return False

View File

@ -101,6 +101,8 @@ _SAFE_OPCODES = _EXPR_OPCODES.union(set(opmap[x] for x in [
'CALL_FUNCTION_EX',
# Already in P2 but apparently the first one is used more aggressively in P3
'CALL_FUNCTION_KW', 'CALL_FUNCTION_VAR', 'CALL_FUNCTION_VAR_KW',
# Added in P3.7 https://bugs.python.org/issue26110
'CALL_METHOD', 'LOAD_METHOD',
'GET_ITER', 'FOR_ITER', 'YIELD_VALUE',
'JUMP_FORWARD', 'JUMP_IF_TRUE', 'JUMP_IF_FALSE', 'JUMP_ABSOLUTE',
# New in Python 2.7 - http://bugs.python.org/issue4715 :