2017-06-14 13:36:37 +02:00
|
|
|
# kas - setup tool for bitbake based projects
|
|
|
|
#
|
|
|
|
# Copyright (c) Siemens AG, 2017
|
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be
|
|
|
|
# included in all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
This module contains the core implementation of kas.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import logging
|
|
|
|
import tempfile
|
|
|
|
import asyncio
|
|
|
|
from subprocess import Popen, PIPE
|
|
|
|
|
|
|
|
__license__ = 'MIT'
|
|
|
|
__copyright__ = 'Copyright (c) Siemens AG, 2017'
|
|
|
|
|
|
|
|
|
|
|
|
class LogOutput:
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Handles the log output of executed applications
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
def __init__(self, live):
|
|
|
|
self.live = live
|
|
|
|
self.stdout = []
|
|
|
|
self.stderr = []
|
|
|
|
|
|
|
|
def log_stdout(self, line):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
This method is called when a line over stdout is received.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
if self.live:
|
|
|
|
logging.info(line.strip())
|
|
|
|
self.stdout.append(line)
|
|
|
|
|
|
|
|
def log_stderr(self, line):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
This method is called when a line over stderr is received.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
if self.live:
|
|
|
|
logging.error(line.strip())
|
|
|
|
self.stderr.append(line)
|
|
|
|
|
|
|
|
|
|
|
|
@asyncio.coroutine
|
2017-06-21 13:32:56 +02:00
|
|
|
def _read_stream(stream, callback):
|
|
|
|
"""
|
2017-06-26 16:07:53 +02:00
|
|
|
This asynchronous method reads from the output stream of the
|
2017-06-21 13:32:56 +02:00
|
|
|
application and transfers each line to the callback function.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
while True:
|
|
|
|
line = yield from stream.readline()
|
|
|
|
try:
|
|
|
|
line = line.decode('utf-8')
|
2017-06-21 13:32:56 +02:00
|
|
|
except UnicodeDecodeError as err:
|
|
|
|
logging.warning('Could not decode line from stream, ignore it: %s',
|
|
|
|
err)
|
2017-06-14 13:36:37 +02:00
|
|
|
if line:
|
2017-06-21 13:32:56 +02:00
|
|
|
callback(line)
|
2017-06-14 13:36:37 +02:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2017-06-21 13:32:55 +02:00
|
|
|
|
2017-06-14 13:36:37 +02:00
|
|
|
@asyncio.coroutine
|
2017-06-28 12:43:55 +02:00
|
|
|
def run_cmd_async(cmd, cwd, env=None, fail=True, shell=False, liveupdate=True):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2017-06-28 12:43:55 +02:00
|
|
|
Run a command asynchronously.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
# pylint: disable=too-many-arguments
|
|
|
|
|
2017-06-28 12:43:55 +02:00
|
|
|
env = env or {}
|
|
|
|
cmdstr = cmd
|
|
|
|
if not shell:
|
|
|
|
cmdstr = ' '.join(cmd)
|
|
|
|
logging.info('%s$ %s', cwd, cmdstr)
|
|
|
|
|
|
|
|
logo = LogOutput(liveupdate)
|
|
|
|
|
2017-06-14 13:36:37 +02:00
|
|
|
if shell:
|
|
|
|
process = yield from asyncio.create_subprocess_shell(
|
|
|
|
cmd,
|
|
|
|
env=env,
|
|
|
|
cwd=cwd,
|
|
|
|
universal_newlines=True,
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE)
|
|
|
|
else:
|
|
|
|
process = yield from asyncio.create_subprocess_exec(
|
|
|
|
*cmd,
|
|
|
|
cwd=cwd,
|
|
|
|
env=env,
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE)
|
|
|
|
|
|
|
|
yield from asyncio.wait([
|
2017-06-28 12:43:55 +02:00
|
|
|
_read_stream(process.stdout, logo.log_stdout),
|
|
|
|
_read_stream(process.stderr, logo.log_stderr)
|
2017-06-14 13:36:37 +02:00
|
|
|
])
|
|
|
|
ret = yield from process.wait()
|
2017-06-28 12:43:55 +02:00
|
|
|
|
|
|
|
if ret and fail:
|
2017-06-28 19:25:59 +02:00
|
|
|
msg = 'Command "{cwd}$ {cmd}" failed'.format(cwd=cwd, cmd=cmdstr)
|
2017-07-07 12:15:54 +02:00
|
|
|
if logo.stderr:
|
|
|
|
msg += '\n--- Error summary ---\n'
|
|
|
|
for line in logo.stderr:
|
|
|
|
msg += line
|
2017-06-28 12:43:55 +02:00
|
|
|
logging.error(msg)
|
|
|
|
|
|
|
|
return (ret, ''.join(logo.stdout))
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
def run_cmd(cmd, cwd, env=None, fail=True, shell=False, liveupdate=True):
|
|
|
|
"""
|
2017-06-28 12:43:55 +02:00
|
|
|
Runs a command synchronously.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
# pylint: disable=too-many-arguments
|
|
|
|
|
2017-06-28 12:43:52 +02:00
|
|
|
loop = asyncio.get_event_loop()
|
2017-06-28 19:26:00 +02:00
|
|
|
(ret, output) = loop.run_until_complete(
|
2017-06-28 12:43:55 +02:00
|
|
|
run_cmd_async(cmd, cwd, env, fail, shell, liveupdate))
|
2017-06-28 19:26:00 +02:00
|
|
|
if ret and fail:
|
|
|
|
sys.exit(ret)
|
|
|
|
return (ret, output)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def find_program(paths, name):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Find a file within the paths array and returns its path.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
for path in paths.split(os.pathsep):
|
|
|
|
prg = os.path.join(path, name)
|
|
|
|
if os.path.isfile(prg):
|
|
|
|
return prg
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2017-06-28 12:43:57 +02:00
|
|
|
@asyncio.coroutine
|
|
|
|
def _repo_fetch_async(config, repo):
|
2017-06-22 12:28:32 +02:00
|
|
|
"""
|
2017-06-28 12:43:57 +02:00
|
|
|
Start asynchronous repository fetch.
|
2017-06-22 12:28:32 +02:00
|
|
|
"""
|
|
|
|
if repo.git_operation_disabled:
|
2017-07-03 11:34:32 +02:00
|
|
|
return 0
|
2017-06-22 12:28:32 +02:00
|
|
|
|
|
|
|
if not os.path.exists(repo.path):
|
|
|
|
os.makedirs(os.path.dirname(repo.path), exist_ok=True)
|
|
|
|
gitsrcdir = os.path.join(config.get_repo_ref_dir() or '',
|
|
|
|
repo.qualified_name)
|
|
|
|
logging.debug('Looking for repo ref dir in %s', gitsrcdir)
|
2017-06-26 16:07:52 +02:00
|
|
|
|
|
|
|
cmd = ['/usr/bin/git', 'clone', '-q', repo.url, repo.path]
|
2017-06-22 12:28:32 +02:00
|
|
|
if config.get_repo_ref_dir() and os.path.exists(gitsrcdir):
|
2017-06-26 16:07:52 +02:00
|
|
|
cmd.extend(['--reference', gitsrcdir])
|
2017-06-28 19:25:59 +02:00
|
|
|
(retc, _) = yield from run_cmd_async(cmd,
|
|
|
|
env=config.environ,
|
|
|
|
cwd=config.kas_work_dir)
|
|
|
|
if retc == 0:
|
|
|
|
logging.info('Repository %s cloned', repo.name)
|
2017-07-03 11:34:32 +02:00
|
|
|
return retc
|
2017-06-22 12:28:32 +02:00
|
|
|
|
2017-06-26 16:07:53 +02:00
|
|
|
# Does refspec exist in the current repository?
|
2017-06-28 12:43:57 +02:00
|
|
|
(retc, output) = yield from run_cmd_async(['/usr/bin/git',
|
|
|
|
'cat-file', '-t',
|
|
|
|
repo.refspec],
|
|
|
|
env=config.environ,
|
|
|
|
cwd=repo.path,
|
|
|
|
fail=False,
|
|
|
|
liveupdate=False)
|
2017-06-22 12:28:32 +02:00
|
|
|
if retc == 0:
|
2017-06-28 12:43:54 +02:00
|
|
|
logging.info('Repository %s already contains %s as %s',
|
|
|
|
repo.name, repo.refspec, output.strip())
|
2017-07-03 11:34:32 +02:00
|
|
|
return retc
|
2017-06-22 12:28:32 +02:00
|
|
|
|
|
|
|
# No it is missing, try to fetch
|
2017-06-28 12:43:57 +02:00
|
|
|
(retc, output) = yield from run_cmd_async(['/usr/bin/git',
|
|
|
|
'fetch', '--all'],
|
|
|
|
env=config.environ,
|
|
|
|
cwd=repo.path,
|
|
|
|
fail=False)
|
2017-06-22 12:28:32 +02:00
|
|
|
if retc:
|
|
|
|
logging.warning('Could not update repository %s: %s',
|
|
|
|
repo.name, output)
|
2017-06-28 19:25:59 +02:00
|
|
|
else:
|
|
|
|
logging.info('Repository %s updated', repo.name)
|
2017-07-03 11:34:32 +02:00
|
|
|
return 0
|
2017-06-28 12:43:57 +02:00
|
|
|
|
|
|
|
|
|
|
|
def repos_fetch(config, repos):
|
|
|
|
"""
|
|
|
|
Fetches the list of repositories to the kas_work_dir.
|
|
|
|
"""
|
2017-07-03 11:34:32 +02:00
|
|
|
tasks = []
|
2017-06-28 12:43:57 +02:00
|
|
|
for repo in repos:
|
2017-07-07 12:59:25 +02:00
|
|
|
if not hasattr(asyncio, 'ensure_future'):
|
|
|
|
# pylint: disable=no-member,deprecated-method
|
2017-07-03 11:34:32 +02:00
|
|
|
task = asyncio.async(_repo_fetch_async(config, repo))
|
2017-07-07 12:59:25 +02:00
|
|
|
else:
|
|
|
|
task = asyncio.ensure_future(_repo_fetch_async(config, repo))
|
2017-07-03 11:34:32 +02:00
|
|
|
tasks.append(task)
|
2017-06-28 12:43:57 +02:00
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
2017-07-03 11:34:32 +02:00
|
|
|
loop.run_until_complete(asyncio.wait(tasks))
|
|
|
|
|
|
|
|
for task in tasks:
|
|
|
|
if task.result():
|
|
|
|
sys.exit(task.result())
|
2017-06-22 12:28:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
def repo_checkout(config, repo):
|
|
|
|
"""
|
|
|
|
Checks out the correct revision of the repo.
|
|
|
|
"""
|
|
|
|
if repo.git_operation_disabled:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Check if repos is dirty
|
|
|
|
(_, output) = run_cmd(['/usr/bin/git', 'diff', '--shortstat'],
|
|
|
|
env=config.environ, cwd=repo.path,
|
|
|
|
fail=False)
|
2017-06-28 14:48:40 +02:00
|
|
|
if output:
|
2017-06-22 12:28:32 +02:00
|
|
|
logging.warning('Repo %s is dirty. no checkout', repo.name)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Check if current HEAD is what in the config file is defined.
|
|
|
|
(_, output) = run_cmd(['/usr/bin/git', 'rev-parse',
|
|
|
|
'--verify', 'HEAD'],
|
|
|
|
env=config.environ, cwd=repo.path)
|
|
|
|
|
|
|
|
if output.strip() == repo.refspec:
|
|
|
|
logging.info('Repo %s has already checkout out correct '
|
|
|
|
'refspec. nothing to do', repo.name)
|
|
|
|
return
|
|
|
|
|
|
|
|
run_cmd(['/usr/bin/git', 'checkout', '-q',
|
|
|
|
'{refspec}'.format(refspec=repo.refspec)],
|
|
|
|
cwd=repo.path)
|
|
|
|
|
|
|
|
|
2017-06-22 10:29:10 +02:00
|
|
|
def get_build_environ(config, build_dir):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2017-06-22 10:29:10 +02:00
|
|
|
Create the build environment variables.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
# pylint: disable=too-many-locals
|
2017-06-22 10:29:10 +02:00
|
|
|
# nasty side effect function: running oe/isar-init-build-env also
|
2017-06-14 13:36:37 +02:00
|
|
|
# creates the conf directory
|
|
|
|
|
2017-06-22 10:29:10 +02:00
|
|
|
permutations = \
|
|
|
|
[(repo, script) for repo in config.get_repos()
|
|
|
|
for script in ['oe-init-build-env', 'isar-init-build-env']]
|
|
|
|
for (repo, script) in permutations:
|
|
|
|
if os.path.exists(repo.path + '/' + script):
|
|
|
|
init_path = repo.path
|
|
|
|
init_script = script
|
2017-06-14 13:36:37 +02:00
|
|
|
break
|
2017-06-22 10:29:10 +02:00
|
|
|
else:
|
|
|
|
logging.error('Did not find any init-build-env script')
|
2017-06-14 13:36:37 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
get_bb_env_file = tempfile.mktemp()
|
2017-06-21 13:32:56 +02:00
|
|
|
with open(get_bb_env_file, 'w') as fds:
|
2017-06-14 13:36:37 +02:00
|
|
|
script = """#!/bin/bash
|
2017-06-22 10:29:10 +02:00
|
|
|
source %s $1 > /dev/null 2>&1
|
2017-06-14 13:36:37 +02:00
|
|
|
env
|
2017-06-22 10:29:10 +02:00
|
|
|
""" % init_script
|
2017-06-21 13:32:56 +02:00
|
|
|
fds.write(script)
|
2017-06-14 13:36:37 +02:00
|
|
|
os.chmod(get_bb_env_file, 0o775)
|
|
|
|
|
|
|
|
env = {}
|
|
|
|
env['PATH'] = '/bin:/usr/bin'
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
(_, output) = run_cmd([get_bb_env_file, build_dir],
|
2017-06-22 10:29:10 +02:00
|
|
|
cwd=init_path, env=env, liveupdate=False)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
os.remove(get_bb_env_file)
|
|
|
|
|
|
|
|
env = {}
|
|
|
|
for line in output.splitlines():
|
|
|
|
try:
|
|
|
|
(key, val) = line.split('=', 1)
|
|
|
|
env[key] = val
|
2017-06-21 13:32:56 +02:00
|
|
|
except ValueError:
|
2017-06-14 13:36:37 +02:00
|
|
|
pass
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
env_vars = ['SSTATE_DIR', 'DL_DIR', 'TMPDIR']
|
2017-06-14 13:36:37 +02:00
|
|
|
if 'BB_ENV_EXTRAWHITE' in env:
|
2017-06-21 13:32:56 +02:00
|
|
|
extra_white = env['BB_ENV_EXTRAWHITE'] + ' '.join(env_vars)
|
|
|
|
env.update({'BB_ENV_EXTRAWHITE': extra_white})
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
env_vars.extend(['SSH_AGENT_PID', 'SSH_AUTH_SOCK',
|
2017-07-19 08:51:19 +02:00
|
|
|
'SHELL', 'TERM',
|
|
|
|
'GIT_PROXY_COMMAND', 'NO_PROXY'])
|
2017-06-19 10:11:35 +02:00
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
for env_var in env_vars:
|
|
|
|
if env_var in os.environ:
|
|
|
|
env[env_var] = os.environ[env_var]
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
return env
|
|
|
|
|
|
|
|
|
|
|
|
def ssh_add_key(env, key):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Add ssh key to the ssh-agent
|
|
|
|
"""
|
|
|
|
process = Popen(['/usr/bin/ssh-add', '-'], stdin=PIPE, stdout=None,
|
|
|
|
stderr=PIPE, env=env)
|
|
|
|
(_, error) = process.communicate(input=str.encode(key))
|
|
|
|
if process.returncode and error:
|
|
|
|
logging.error('failed to add ssh key: %s', error)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def ssh_cleanup_agent(config):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Removes the identities and stop the ssh-agent instance
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
# remove the identities
|
2017-06-21 13:32:56 +02:00
|
|
|
process = Popen(['/usr/bin/ssh-add', '-D'], env=config.environ)
|
|
|
|
process.wait()
|
|
|
|
if process.returncode != 0:
|
2017-06-14 13:36:37 +02:00
|
|
|
logging.error('failed to delete SSH identities')
|
|
|
|
|
|
|
|
# stop the ssh-agent
|
2017-06-21 13:32:56 +02:00
|
|
|
process = Popen(['/usr/bin/ssh-agent', '-k'], env=config.environ)
|
|
|
|
process.wait()
|
|
|
|
if process.returncode != 0:
|
2017-06-14 13:36:37 +02:00
|
|
|
logging.error('failed to stop SSH agent')
|
|
|
|
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
def ssh_setup_agent(config, envkeys=None):
|
|
|
|
"""
|
|
|
|
Starts the ssh-agent
|
|
|
|
"""
|
|
|
|
envkeys = envkeys or ['SSH_PRIVATE_KEY']
|
2017-06-14 13:36:37 +02:00
|
|
|
output = os.popen('/usr/bin/ssh-agent -s').readlines()
|
|
|
|
for line in output:
|
2017-06-21 13:32:56 +02:00
|
|
|
matches = re.search(r"(\S+)\=(\S+)\;", line)
|
2017-06-14 13:36:37 +02:00
|
|
|
if matches:
|
|
|
|
config.environ[matches.group(1)] = matches.group(2)
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
for envkey in envkeys:
|
|
|
|
key = os.environ.get(envkey)
|
2017-06-14 13:36:37 +02:00
|
|
|
if key:
|
|
|
|
ssh_add_key(config.environ, key)
|
|
|
|
else:
|
2017-06-21 13:32:56 +02:00
|
|
|
logging.warning('%s is missing', envkey)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
def ssh_no_host_key_check(_):
|
|
|
|
"""
|
|
|
|
Disables ssh host key check
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
home = os.path.expanduser('~')
|
|
|
|
if not os.path.exists(home + '/.ssh'):
|
|
|
|
os.mkdir(home + '/.ssh')
|
2017-06-21 13:32:56 +02:00
|
|
|
with open(home + '/.ssh/config', 'w') as fds:
|
|
|
|
fds.write('Host *\n\tStrictHostKeyChecking no\n\n')
|
2017-06-28 14:48:41 +02:00
|
|
|
|
|
|
|
|
|
|
|
def kasplugin(plugin_class):
|
|
|
|
"""
|
|
|
|
A decorator that registeres kas plugins
|
|
|
|
"""
|
|
|
|
if not hasattr(kasplugin, 'plugins'):
|
|
|
|
setattr(kasplugin, 'plugins', [])
|
|
|
|
getattr(kasplugin, 'plugins').append(plugin_class)
|