added initial implementation of a include handler
Splitting configuration files into multiple files and implementing an include mechanism allows to handle configurations more flexible and allowing following scenarios: - including kas configuration file from external meta layer into own project - splitting many different similar configurations into multiple files with only a couple of common files To include file its necessary to add a 'includes' entry into the kas configuration. To include files from the same repo, do it like this: ----- includes: - ../base/base_include.yml ----- The path is relative to the current configuration file. If they start with a path seperator ("/") they are absolute. To include files from a different repository, do it like this: ----- includes: - file: bsps/my_machine_include.yml repo: collected_machines repos: collected_machines: url: https://url.to.git.repo/ revspec: master ----- You have to make sure that the repository definition is available in your current file, or in any include that is available at this point. Yaml ("*.yml") and Json ("*.json") files are supported. Included in this patch are also some changes to the configuration file structure. Here is an overview: The value of the 'repos' key is a dictionary that maps a repo identifier to a dictionary that contains 5 entries: - url: The url to the repository. If that is missing, no git operations are used - refspec: The git refspec of the repository that is used. If that is missing the latest commit of the default branch is used. - name: The is defines under which name the repository is stored. If that is missing the repository identifier is used - path: The path where the repository is stored. If no url is given and a path is missing, the repository is referencing the repository under which the configuration file is stored. If no url is given and a path is specified, the repository is referencing a directory where layers might be stored. If an url is specified, path can be used to overwrite the default (kas_work_dir + repo.name) checkout directory. - layers: This contains a dictionary of layers, that should be included into the bblayers.conf. The keys are paths relative to the repository and the values can be used to exclude layers if they are one of "excluded", "disabled", "false", "0", "n", or "no". Also boolean values are accepted. Any other value, including "None" means that this layer is included into the bblayers.conf. If "layers" is missing or empty, the repository itself is included into the bblayers. If this is specified, the repository itself is not included into the bblayers.conf. Signed-off-by: Claudius Heine <ch@denx.de>
This commit is contained in:
parent
c950de46f9
commit
34dd07e76e
339
kas/config.py
339
kas/config.py
@ -27,8 +27,6 @@ import os
|
||||
import sys
|
||||
import logging
|
||||
import errno
|
||||
import json
|
||||
import yaml
|
||||
|
||||
try:
|
||||
from distro import id as get_distro_id
|
||||
@ -59,6 +57,7 @@ class Config:
|
||||
def __init__(self):
|
||||
self.__kas_work_dir = os.environ.get('KAS_WORK_DIR', os.getcwd())
|
||||
self.environ = {}
|
||||
self._config = {}
|
||||
|
||||
@property
|
||||
def build_dir(self):
|
||||
@ -100,6 +99,84 @@ class Config:
|
||||
|
||||
return os.environ.get('KAS_REPO_REF_DIR', None)
|
||||
|
||||
def get_proxy_config(self):
|
||||
"""
|
||||
Returns the proxy settings
|
||||
"""
|
||||
return self._config.get('proxy_config', {
|
||||
'http_proxy': os.environ.get('http_proxy', ''),
|
||||
'https_proxy': os.environ.get('https_proxy', ''),
|
||||
'no_proxy': os.environ.get('no_proxy', '')
|
||||
})
|
||||
|
||||
def get_repos(self):
|
||||
"""
|
||||
Returns the list of repos.
|
||||
"""
|
||||
# pylint: disable=no-self-use
|
||||
|
||||
return []
|
||||
|
||||
def pre_hook(self, fname):
|
||||
"""
|
||||
Returns a function that is executed before every command or None.
|
||||
"""
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
pass
|
||||
|
||||
def post_hook(self, fname):
|
||||
"""
|
||||
Returs a function that is executed after every command or None.
|
||||
"""
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
pass
|
||||
|
||||
def get_hook(self, fname):
|
||||
"""
|
||||
Returns a function that is executed instead of the command or None.
|
||||
"""
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
pass
|
||||
|
||||
def get_bitbake_target(self):
|
||||
"""
|
||||
Return the bitbake target
|
||||
"""
|
||||
return self._config.get('target', 'core-image-minimal')
|
||||
|
||||
def get_bblayers_conf_header(self):
|
||||
"""
|
||||
Returns the bblayers.conf header
|
||||
"""
|
||||
return '\n'.join(self._config.get('bblayers_conf_header', {}).values())
|
||||
|
||||
def get_local_conf_header(self):
|
||||
"""
|
||||
Returns the local.conf header
|
||||
"""
|
||||
return '\n'.join(self._config.get('local_conf_header', {}).values())
|
||||
|
||||
def get_machine(self):
|
||||
"""
|
||||
Returns the machine
|
||||
"""
|
||||
return self._config.get('machine', 'qemu')
|
||||
|
||||
def get_distro(self):
|
||||
"""
|
||||
Returns the distro
|
||||
"""
|
||||
return self._config.get('distro', 'poky')
|
||||
|
||||
def get_gitlabci_config(self):
|
||||
"""
|
||||
Returns the GitlabCI configuration
|
||||
"""
|
||||
return self._config.get('gitlabci_config', '')
|
||||
|
||||
|
||||
class ConfigPython(Config):
|
||||
"""
|
||||
@ -137,27 +214,18 @@ class ConfigPython(Config):
|
||||
return output
|
||||
|
||||
def pre_hook(self, fname):
|
||||
"""
|
||||
Returns a function that is executed before every command or None.
|
||||
"""
|
||||
try:
|
||||
self._config[fname + '_prepend'](self)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def post_hook(self, fname):
|
||||
"""
|
||||
Returs a function that is executed after every command or None.
|
||||
"""
|
||||
try:
|
||||
self._config[fname + '_append'](self)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def get_hook(self, fname):
|
||||
"""
|
||||
Returns a function that is executed instead of the command or None.
|
||||
"""
|
||||
try:
|
||||
return self._config[fname]
|
||||
except KeyError:
|
||||
@ -171,15 +239,9 @@ class ConfigPython(Config):
|
||||
self.repos = self._config['get_repos'](self, target)
|
||||
|
||||
def get_proxy_config(self):
|
||||
"""
|
||||
Returns the proxy settings
|
||||
"""
|
||||
return self._config['get_proxy_config']()
|
||||
|
||||
def get_repos(self):
|
||||
"""
|
||||
Returns the list of repos
|
||||
"""
|
||||
return iter(self.repos)
|
||||
|
||||
def get_target(self):
|
||||
@ -245,178 +307,119 @@ class ConfigPython(Config):
|
||||
|
||||
class ConfigStatic(Config):
|
||||
"""
|
||||
An abstract class for static configuration files
|
||||
Implements the static kas configuration based on config files.
|
||||
"""
|
||||
|
||||
def __init__(self, filename, _):
|
||||
from .includehandler import GlobalIncludes, IncludeException
|
||||
super().__init__()
|
||||
self.filename = os.path.abspath(filename)
|
||||
self._config = {}
|
||||
|
||||
def pre_hook(self, _):
|
||||
"""
|
||||
Not used
|
||||
"""
|
||||
pass
|
||||
|
||||
def post_hook(self, _):
|
||||
"""
|
||||
Not used
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_hook(self, _):
|
||||
"""
|
||||
Not used
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_proxy_config(self):
|
||||
"""
|
||||
Returns the proxy settings
|
||||
"""
|
||||
try:
|
||||
return self._config['proxy_config']
|
||||
except KeyError:
|
||||
return {'http_proxy': os.environ.get('http_proxy', ''),
|
||||
'https_proxy': os.environ.get('https_proxy', ''),
|
||||
'no_proxy': os.environ.get('no_proxy', '')}
|
||||
self.setup_environ()
|
||||
self.filename = os.path.abspath(filename)
|
||||
self.handler = GlobalIncludes(self.filename)
|
||||
complete = False
|
||||
repos = {}
|
||||
missing_repos_old = []
|
||||
while not complete:
|
||||
complete = True
|
||||
self._config, missing_repos = self.handler.get_config(repos=repos)
|
||||
if missing_repos_old and missing_repos == missing_repos_old:
|
||||
raise IncludeException('Could not fetch all repos needed by '
|
||||
'includes.')
|
||||
missing_repos_old = missing_repos
|
||||
if missing_repos:
|
||||
complete = False
|
||||
self._fetch_missing_repos(missing_repos)
|
||||
repo_dict = self.get_repo_dict()
|
||||
repos = {r: repo_dict[r].path for r in repo_dict}
|
||||
|
||||
def get_repos(self):
|
||||
"""
|
||||
Returns the list of repos
|
||||
Returns the list of repos.
|
||||
"""
|
||||
repos = []
|
||||
for repo in self._config['repos']:
|
||||
try:
|
||||
layers = repo['layers']
|
||||
except KeyError:
|
||||
layers = None
|
||||
return list(self.get_repo_dict().values())
|
||||
|
||||
url = repo['url']
|
||||
if url == '':
|
||||
# in-tree configuration
|
||||
(_, output) = run_cmd(['/usr/bin/git',
|
||||
'rev-parse',
|
||||
'--show-toplevel'],
|
||||
cwd=os.path.dirname(self.filename),
|
||||
env=self.environ)
|
||||
url = output.strip()
|
||||
def get_repo_dict(self):
|
||||
"""
|
||||
Returns a dictionary containing the repositories with
|
||||
their name (as it is defined in the config file) as key
|
||||
and the `Repo` instances as value.
|
||||
"""
|
||||
repo_config_dict = self._config.get('repos', {})
|
||||
repo_dict = {}
|
||||
for repo in repo_config_dict:
|
||||
|
||||
repo_config_dict[repo] = repo_config_dict[repo] or {}
|
||||
layers_dict = repo_config_dict[repo].get('layers', {})
|
||||
layers = list(filter(lambda x, laydict=layers_dict:
|
||||
str(laydict[x]).lower() not in
|
||||
['disabled', 'excluded', 'n', 'no', '0',
|
||||
'false'],
|
||||
layers_dict))
|
||||
url = repo_config_dict[repo].get('url', None)
|
||||
name = repo_config_dict[repo].get('name', repo)
|
||||
refspec = repo_config_dict[repo].get('refspec', None)
|
||||
path = repo_config_dict[repo].get('path', None)
|
||||
|
||||
if url is None:
|
||||
# No git operation on repository
|
||||
if path is None:
|
||||
# In-tree configuration
|
||||
path = os.path.dirname(self.filename)
|
||||
(_, output) = run_cmd(['/usr/bin/git',
|
||||
'rev-parse',
|
||||
'--show-toplevel'],
|
||||
cwd=path,
|
||||
env=self.environ)
|
||||
path = output.strip()
|
||||
|
||||
url = path
|
||||
rep = Repo(url=url,
|
||||
path=url,
|
||||
path=path,
|
||||
layers=layers)
|
||||
rep.disable_git_operations()
|
||||
else:
|
||||
name = os.path.basename(os.path.splitext(url)[0])
|
||||
path = path or os.path.join(self.kas_work_dir, name)
|
||||
rep = Repo(url=url,
|
||||
path=os.path.join(self.kas_work_dir, name),
|
||||
refspec=repo['refspec'],
|
||||
path=path,
|
||||
refspec=refspec,
|
||||
layers=layers)
|
||||
repos.append(rep)
|
||||
repo_dict[repo] = rep
|
||||
return repo_dict
|
||||
|
||||
return repos
|
||||
|
||||
def get_bitbake_target(self):
|
||||
def _fetch_missing_repos(self, missing_repos):
|
||||
"""
|
||||
Return the bitbake target
|
||||
Fetches all repos from the missing_repos list.
|
||||
"""
|
||||
try:
|
||||
return self._config['target']
|
||||
except KeyError:
|
||||
return 'core-image-minimal'
|
||||
from .libcmds import (Macro, ReposFetch, ReposCheckout)
|
||||
|
||||
def get_bblayers_conf_header(self):
|
||||
"""
|
||||
Returns the bblayers.conf header
|
||||
"""
|
||||
try:
|
||||
return self._config['bblayers_conf_header']
|
||||
except KeyError:
|
||||
return ''
|
||||
class MissingRepoConfig(Config):
|
||||
"""
|
||||
Custom config class, because we only want to
|
||||
fetch the missing repositories needed for a
|
||||
complete configuration not all of them.
|
||||
"""
|
||||
def __init__(self, outerself):
|
||||
super().__init__()
|
||||
self.outerself = outerself
|
||||
self.filename = outerself.filename
|
||||
self.environ = outerself.environ
|
||||
self.__kas_work_dir = outerself.kas_work_dir
|
||||
|
||||
def get_local_conf_header(self):
|
||||
"""
|
||||
Returns the local.conf header
|
||||
"""
|
||||
try:
|
||||
return self._config['local_conf_header']
|
||||
except KeyError:
|
||||
return ''
|
||||
def get_repo_ref_dir(self):
|
||||
return self.outerself.get_repo_ref_dir()
|
||||
|
||||
def get_machine(self):
|
||||
"""
|
||||
Returns the machine
|
||||
"""
|
||||
try:
|
||||
return self._config['machine']
|
||||
except KeyError:
|
||||
return 'qemu'
|
||||
def get_proxy_config(self):
|
||||
return self.outerself.get_proxy_config()
|
||||
|
||||
def get_distro(self):
|
||||
"""
|
||||
Returns the distro
|
||||
"""
|
||||
try:
|
||||
return self._config['distro']
|
||||
except KeyError:
|
||||
return 'poky'
|
||||
def get_repos(self):
|
||||
return list(map(lambda x: self.outerself.get_repo_dict()[x],
|
||||
missing_repos))
|
||||
|
||||
def get_gitlabci_config(self):
|
||||
"""
|
||||
Returns the GitlabCI configuration
|
||||
"""
|
||||
try:
|
||||
return self._config['gitlabci_config']
|
||||
except KeyError:
|
||||
return ''
|
||||
|
||||
|
||||
class ConfigJson(ConfigStatic):
|
||||
"""
|
||||
Implements the configuration based on JSON files
|
||||
"""
|
||||
|
||||
def __init__(self, filename, target):
|
||||
super().__init__(filename, target)
|
||||
self.filename = os.path.abspath(filename)
|
||||
try:
|
||||
with open(self.filename, 'r') as fds:
|
||||
self._config = json.load(fds)
|
||||
except json.decoder.JSONDecodeError as msg:
|
||||
logging.error('Could not load JSON config: %s', msg)
|
||||
sys.exit(1)
|
||||
self.setup_environ()
|
||||
|
||||
def get_bblayers_conf_header(self):
|
||||
header_list = super().get_bblayers_conf_header()
|
||||
conf = ''
|
||||
for line in header_list:
|
||||
conf += str(line) + '\n'
|
||||
return conf
|
||||
|
||||
def get_local_conf_header(self):
|
||||
header_list = super().get_local_conf_header()
|
||||
conf = ''
|
||||
for line in header_list:
|
||||
conf += str(line) + '\n'
|
||||
return conf
|
||||
|
||||
|
||||
class ConfigYaml(ConfigStatic):
|
||||
"""
|
||||
Implements the configuration based on Yaml files
|
||||
"""
|
||||
|
||||
def __init__(self, filename, target):
|
||||
super().__init__(filename, target)
|
||||
self.filename = os.path.abspath(filename)
|
||||
try:
|
||||
with open(self.filename, 'r') as fds:
|
||||
self._config = yaml.load(fds)
|
||||
except yaml.loader.ParserError as msg:
|
||||
logging.error('Could not load YAML config: %s', msg)
|
||||
sys.exit(1)
|
||||
self.setup_environ()
|
||||
macro = Macro()
|
||||
macro.add(ReposFetch())
|
||||
macro.add(ReposCheckout())
|
||||
macro.run(MissingRepoConfig(self))
|
||||
|
||||
|
||||
def load_config(filename, target):
|
||||
@ -428,10 +431,8 @@ def load_config(filename, target):
|
||||
(_, ext) = os.path.splitext(filename)
|
||||
if ext == '.py':
|
||||
cfg = ConfigPython(filename, target)
|
||||
elif ext == '.json':
|
||||
cfg = ConfigJson(filename, target)
|
||||
elif ext == '.yml':
|
||||
cfg = ConfigYaml(filename, target)
|
||||
elif ext in ['.json', '.yml']:
|
||||
cfg = ConfigStatic(filename, target)
|
||||
else:
|
||||
logging.error('Config file extenstion not recognized')
|
||||
sys.exit(1)
|
||||
|
217
kas/includehandler.py
Normal file
217
kas/includehandler.py
Normal file
@ -0,0 +1,217 @@
|
||||
# kas - setup tool for bitbake based projects
|
||||
#
|
||||
# Copyright (c) Siemens AG, 2017
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""
|
||||
This module implements how includes of configuration files are handled in
|
||||
kas.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import collections
|
||||
import functools
|
||||
import logging
|
||||
|
||||
__license__ = 'MIT'
|
||||
__copyright__ = 'Copyright (c) Siemens AG, 2017'
|
||||
|
||||
|
||||
class IncludeException(Exception):
|
||||
"""
|
||||
Class for exceptions that appear in the include mechanism.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class IncludeHandler(object):
|
||||
"""
|
||||
Abstract class that defines the interface of an include handler.
|
||||
"""
|
||||
|
||||
def __init__(self, top_file):
|
||||
self.top_file = top_file
|
||||
|
||||
def get_config(self, repos=None):
|
||||
"""
|
||||
Parameters:
|
||||
repos -- A dictionary that maps repo name to directory path
|
||||
|
||||
Returns:
|
||||
(config, repos)
|
||||
config -- A dictionary containing the configuration
|
||||
repos -- A list of missing repo names that are needed \
|
||||
to create a complete configuration
|
||||
"""
|
||||
# pylint: disable=no-self-use,unused-argument
|
||||
|
||||
logging.error('get_config is not implemented')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
class GlobalIncludes(IncludeHandler):
|
||||
"""
|
||||
Implements a handler where every configuration file should
|
||||
contain a dictionary as the base type with and 'includes'
|
||||
key containing a list of includes.
|
||||
|
||||
The includes can be specified in two ways, as a string
|
||||
containg the relative path from the current file or as a
|
||||
dictionary. The dictionary should have a 'file' key, containing
|
||||
the relative path to the include file and optionally a 'repo'
|
||||
key, containing the key of the repository. If the 'repo' key is
|
||||
missing the value of the 'file' key is threated the same as if
|
||||
just a string was defined, meaning the path is relative to the
|
||||
current config file otherwise its relative to the repository path.
|
||||
|
||||
The includes are read and merged depth first from top to buttom.
|
||||
"""
|
||||
|
||||
def get_config(self, repos=None):
|
||||
repos = repos or {}
|
||||
|
||||
def _internal_file_parser(filename):
|
||||
(_, ext) = os.path.splitext(filename)
|
||||
if ext == '.json':
|
||||
import json
|
||||
with open(filename, 'rb') as fds:
|
||||
return json.load(fds)
|
||||
elif ext == '.yml':
|
||||
import yaml
|
||||
with open(filename, 'rb') as fds:
|
||||
return yaml.safe_load(fds)
|
||||
logging.error('Config file extension not recognized: %s', ext)
|
||||
sys.exit(1)
|
||||
|
||||
def _internal_include_handler(filename):
|
||||
"""
|
||||
Recursively load include files and find missing repos.
|
||||
|
||||
Includes are done in the following way:
|
||||
|
||||
topfile.yml:
|
||||
-------
|
||||
includes:
|
||||
- include1.yml
|
||||
- file: include2.yml
|
||||
- repo: repo1
|
||||
file: include-repo1.yml
|
||||
- repo: repo2
|
||||
file: include-repo2.yml
|
||||
- include3.yml
|
||||
-------
|
||||
|
||||
Includes are merged in in this order:
|
||||
['include1.yml', 'include2.yml', 'include-repo1.yml',
|
||||
'include-repo2.yml', 'include-repo2.yml', 'topfile.yml']
|
||||
On conflict the latter includes overwrite previous ones and
|
||||
the current file overwrites every include. (evaluation depth first
|
||||
and from top to buttom)
|
||||
"""
|
||||
missing_repos = []
|
||||
configs = []
|
||||
current_config = _internal_file_parser(filename)
|
||||
if not isinstance(current_config, collections.Mapping):
|
||||
raise IncludeException('Configuration file does not contain a '
|
||||
'dictionary as base type')
|
||||
for include in current_config.get('includes', []):
|
||||
if isinstance(include, str):
|
||||
includefile = ''
|
||||
if include.startswith(os.path.pathsep):
|
||||
includefile = include
|
||||
else:
|
||||
includefile = os.path.abspath(
|
||||
os.path.join(
|
||||
os.path.dirname(filename),
|
||||
include))
|
||||
(cfg, rep) = _internal_include_handler(includefile)
|
||||
configs.extend(cfg)
|
||||
missing_repos.extend(rep)
|
||||
elif isinstance(include, collections.Mapping):
|
||||
includerepo = include.get('repo', None)
|
||||
if includerepo is not None:
|
||||
includedir = repos.get(includerepo, None)
|
||||
else:
|
||||
raise IncludeException(
|
||||
'"repo" is not specified: {}'
|
||||
.format(include))
|
||||
if includedir is not None:
|
||||
try:
|
||||
includefile = include['file']
|
||||
except KeyError:
|
||||
raise IncludeException(
|
||||
'"file" is not specified: {}'
|
||||
.format(include))
|
||||
(cfg, rep) = _internal_include_handler(
|
||||
os.path.abspath(
|
||||
os.path.join(
|
||||
includedir,
|
||||
includefile)))
|
||||
configs.extend(cfg)
|
||||
missing_repos.extend(rep)
|
||||
else:
|
||||
missing_repos.append(includerepo)
|
||||
configs.append((filename, current_config))
|
||||
return (configs, missing_repos)
|
||||
|
||||
def _internal_dict_merge(dest, upd, recursive_merge=True):
|
||||
"""
|
||||
Merges upd recursively into a copy of dest as OrderedDict
|
||||
|
||||
If recursive_merge=False, will use the classic dict.update,
|
||||
or fall back on a manual merge (helpful for non-dict types
|
||||
like FunctionWrapper)
|
||||
"""
|
||||
if (not isinstance(dest, collections.Mapping)) \
|
||||
or (not isinstance(upd, collections.Mapping)):
|
||||
raise IncludeException('Cannot merge using non-dict')
|
||||
dest = collections.OrderedDict(dest)
|
||||
updkeys = list(upd.keys())
|
||||
if not set(list(dest.keys())) & set(updkeys):
|
||||
recursive_merge = False
|
||||
if recursive_merge:
|
||||
for key in updkeys:
|
||||
val = upd[key]
|
||||
try:
|
||||
dest_subkey = dest.get(key, None)
|
||||
except AttributeError:
|
||||
dest_subkey = None
|
||||
if isinstance(dest_subkey, collections.Mapping) \
|
||||
and isinstance(val, collections.Mapping):
|
||||
ret = _internal_dict_merge(dest_subkey, val)
|
||||
dest[key] = ret
|
||||
else:
|
||||
dest[key] = upd[key]
|
||||
return dest
|
||||
else:
|
||||
try:
|
||||
for k in upd:
|
||||
dest[k] = upd[k]
|
||||
except AttributeError:
|
||||
# this mapping is not a dict
|
||||
for k in upd:
|
||||
dest[k] = upd[k]
|
||||
return dest
|
||||
|
||||
configs, missing_repos = _internal_include_handler(self.top_file)
|
||||
config = functools.reduce(_internal_dict_merge,
|
||||
map(lambda x: x[1], configs))
|
||||
return config, missing_repos
|
Loading…
Reference in New Issue
Block a user