2017-06-14 13:36:37 +02:00
|
|
|
# kas - setup tool for bitbake based projects
|
|
|
|
#
|
|
|
|
# Copyright (c) Siemens AG, 2017
|
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be
|
|
|
|
# included in all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
This module contains the core implementation of kas.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import logging
|
|
|
|
import tempfile
|
|
|
|
import asyncio
|
|
|
|
from subprocess import Popen, PIPE
|
|
|
|
|
|
|
|
__license__ = 'MIT'
|
|
|
|
__copyright__ = 'Copyright (c) Siemens AG, 2017'
|
|
|
|
|
|
|
|
|
|
|
|
class LogOutput:
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Handles the log output of executed applications
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
def __init__(self, live):
|
|
|
|
self.live = live
|
|
|
|
self.stdout = []
|
|
|
|
self.stderr = []
|
|
|
|
|
|
|
|
def log_stdout(self, line):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
This method is called when a line over stdout is received.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
if self.live:
|
|
|
|
logging.info(line.strip())
|
|
|
|
self.stdout.append(line)
|
|
|
|
|
|
|
|
def log_stderr(self, line):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
This method is called when a line over stderr is received.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
if self.live:
|
|
|
|
logging.error(line.strip())
|
|
|
|
self.stderr.append(line)
|
|
|
|
|
|
|
|
|
|
|
|
@asyncio.coroutine
|
2017-06-21 13:32:56 +02:00
|
|
|
def _read_stream(stream, callback):
|
|
|
|
"""
|
|
|
|
This asynchronious method reads from the output stream of the
|
|
|
|
application and transfers each line to the callback function.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
while True:
|
|
|
|
line = yield from stream.readline()
|
|
|
|
try:
|
|
|
|
line = line.decode('utf-8')
|
2017-06-21 13:32:56 +02:00
|
|
|
except UnicodeDecodeError as err:
|
|
|
|
logging.warning('Could not decode line from stream, ignore it: %s',
|
|
|
|
err)
|
2017-06-14 13:36:37 +02:00
|
|
|
if line:
|
2017-06-21 13:32:56 +02:00
|
|
|
callback(line)
|
2017-06-14 13:36:37 +02:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2017-06-21 13:32:55 +02:00
|
|
|
|
2017-06-14 13:36:37 +02:00
|
|
|
@asyncio.coroutine
|
|
|
|
def _stream_subprocess(cmd, cwd, env, shell, stdout_cb, stderr_cb):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
This function starts the subprocess, sets up the output stream
|
|
|
|
handlers and waits until the process has existed
|
|
|
|
"""
|
|
|
|
# pylint: disable=too-many-arguments
|
|
|
|
|
2017-06-14 13:36:37 +02:00
|
|
|
if shell:
|
|
|
|
process = yield from asyncio.create_subprocess_shell(
|
|
|
|
cmd,
|
|
|
|
env=env,
|
|
|
|
cwd=cwd,
|
|
|
|
universal_newlines=True,
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE)
|
|
|
|
else:
|
|
|
|
process = yield from asyncio.create_subprocess_exec(
|
|
|
|
*cmd,
|
|
|
|
cwd=cwd,
|
|
|
|
env=env,
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE)
|
|
|
|
|
|
|
|
yield from asyncio.wait([
|
|
|
|
_read_stream(process.stdout, stdout_cb),
|
|
|
|
_read_stream(process.stderr, stderr_cb)
|
|
|
|
])
|
|
|
|
ret = yield from process.wait()
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
def run_cmd(cmd, cwd, env=None, fail=True, shell=False, liveupdate=True):
|
|
|
|
"""
|
|
|
|
Starts a command.
|
|
|
|
"""
|
|
|
|
# pylint: disable=too-many-arguments
|
|
|
|
|
|
|
|
env = env or {}
|
|
|
|
retc = 0
|
2017-06-14 13:36:37 +02:00
|
|
|
cmdstr = cmd
|
|
|
|
if not shell:
|
|
|
|
cmdstr = ' '.join(cmd)
|
2017-06-21 13:32:56 +02:00
|
|
|
logging.info('%s$ %s', cwd, cmdstr)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
logo = LogOutput(liveupdate)
|
|
|
|
if asyncio.get_event_loop().is_closed():
|
|
|
|
loop = asyncio.new_event_loop()
|
|
|
|
asyncio.set_event_loop(loop)
|
|
|
|
else:
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
retc = loop.run_until_complete(
|
2017-06-14 13:36:37 +02:00
|
|
|
_stream_subprocess(cmd, cwd, env, shell,
|
|
|
|
logo.log_stdout, logo.log_stderr))
|
|
|
|
loop.close()
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
if retc and fail:
|
2017-06-14 13:36:37 +02:00
|
|
|
msg = 'Command "{cwd}$ {cmd}" failed\n'.format(cwd=cwd, cmd=cmdstr)
|
|
|
|
for line in logo.stderr:
|
|
|
|
msg += line
|
|
|
|
logging.error(msg)
|
2017-06-21 13:32:56 +02:00
|
|
|
sys.exit(retc)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
return (retc, ''.join(logo.stdout))
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def find_program(paths, name):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Find a file within the paths array and returns its path.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
for path in paths.split(os.pathsep):
|
|
|
|
prg = os.path.join(path, name)
|
|
|
|
if os.path.isfile(prg):
|
|
|
|
return prg
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2017-06-22 12:28:32 +02:00
|
|
|
def repo_fetch(config, repo):
|
|
|
|
"""
|
|
|
|
Fetches the repository to the kas_work_dir.
|
|
|
|
"""
|
|
|
|
if repo.git_operation_disabled:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not os.path.exists(repo.path):
|
|
|
|
os.makedirs(os.path.dirname(repo.path), exist_ok=True)
|
|
|
|
gitsrcdir = os.path.join(config.get_repo_ref_dir() or '',
|
|
|
|
repo.qualified_name)
|
|
|
|
logging.debug('Looking for repo ref dir in %s', gitsrcdir)
|
|
|
|
if config.get_repo_ref_dir() and os.path.exists(gitsrcdir):
|
|
|
|
run_cmd(['/usr/bin/git',
|
|
|
|
'clone',
|
|
|
|
'--reference', gitsrcdir,
|
|
|
|
repo.url, repo.path],
|
|
|
|
env=config.environ,
|
|
|
|
cwd=config.kas_work_dir)
|
|
|
|
else:
|
|
|
|
run_cmd(['/usr/bin/git', 'clone', '-q', repo.url,
|
|
|
|
repo.path],
|
|
|
|
env=config.environ,
|
|
|
|
cwd=config.kas_work_dir)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Does refspec in the current repository?
|
|
|
|
(retc, output) = run_cmd(['/usr/bin/git', 'cat-file',
|
|
|
|
'-t', repo.refspec], env=config.environ,
|
|
|
|
cwd=repo.path, fail=False)
|
|
|
|
if retc == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
# No it is missing, try to fetch
|
|
|
|
(retc, output) = run_cmd(['/usr/bin/git', 'fetch', '--all'],
|
|
|
|
env=config.environ,
|
|
|
|
cwd=repo.path, fail=False)
|
|
|
|
if retc:
|
|
|
|
logging.warning('Could not update repository %s: %s',
|
|
|
|
repo.name, output)
|
|
|
|
|
|
|
|
|
|
|
|
def repo_checkout(config, repo):
|
|
|
|
"""
|
|
|
|
Checks out the correct revision of the repo.
|
|
|
|
"""
|
|
|
|
if repo.git_operation_disabled:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Check if repos is dirty
|
|
|
|
(_, output) = run_cmd(['/usr/bin/git', 'diff', '--shortstat'],
|
|
|
|
env=config.environ, cwd=repo.path,
|
|
|
|
fail=False)
|
|
|
|
if len(output):
|
|
|
|
logging.warning('Repo %s is dirty. no checkout', repo.name)
|
|
|
|
return
|
|
|
|
|
|
|
|
# Check if current HEAD is what in the config file is defined.
|
|
|
|
(_, output) = run_cmd(['/usr/bin/git', 'rev-parse',
|
|
|
|
'--verify', 'HEAD'],
|
|
|
|
env=config.environ, cwd=repo.path)
|
|
|
|
|
|
|
|
if output.strip() == repo.refspec:
|
|
|
|
logging.info('Repo %s has already checkout out correct '
|
|
|
|
'refspec. nothing to do', repo.name)
|
|
|
|
return
|
|
|
|
|
|
|
|
run_cmd(['/usr/bin/git', 'checkout', '-q',
|
|
|
|
'{refspec}'.format(refspec=repo.refspec)],
|
|
|
|
cwd=repo.path)
|
|
|
|
|
|
|
|
|
2017-06-22 10:29:10 +02:00
|
|
|
def get_build_environ(config, build_dir):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2017-06-22 10:29:10 +02:00
|
|
|
Create the build environment variables.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
# pylint: disable=too-many-locals
|
2017-06-22 10:29:10 +02:00
|
|
|
# nasty side effect function: running oe/isar-init-build-env also
|
2017-06-14 13:36:37 +02:00
|
|
|
# creates the conf directory
|
|
|
|
|
2017-06-22 10:29:10 +02:00
|
|
|
permutations = \
|
|
|
|
[(repo, script) for repo in config.get_repos()
|
|
|
|
for script in ['oe-init-build-env', 'isar-init-build-env']]
|
|
|
|
for (repo, script) in permutations:
|
|
|
|
if os.path.exists(repo.path + '/' + script):
|
|
|
|
init_path = repo.path
|
|
|
|
init_script = script
|
2017-06-14 13:36:37 +02:00
|
|
|
break
|
2017-06-22 10:29:10 +02:00
|
|
|
else:
|
|
|
|
logging.error('Did not find any init-build-env script')
|
2017-06-14 13:36:37 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
get_bb_env_file = tempfile.mktemp()
|
2017-06-21 13:32:56 +02:00
|
|
|
with open(get_bb_env_file, 'w') as fds:
|
2017-06-14 13:36:37 +02:00
|
|
|
script = """#!/bin/bash
|
2017-06-22 10:29:10 +02:00
|
|
|
source %s $1 > /dev/null 2>&1
|
2017-06-14 13:36:37 +02:00
|
|
|
env
|
2017-06-22 10:29:10 +02:00
|
|
|
""" % init_script
|
2017-06-21 13:32:56 +02:00
|
|
|
fds.write(script)
|
2017-06-14 13:36:37 +02:00
|
|
|
os.chmod(get_bb_env_file, 0o775)
|
|
|
|
|
|
|
|
env = {}
|
|
|
|
env['PATH'] = '/bin:/usr/bin'
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
(_, output) = run_cmd([get_bb_env_file, build_dir],
|
2017-06-22 10:29:10 +02:00
|
|
|
cwd=init_path, env=env, liveupdate=False)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
os.remove(get_bb_env_file)
|
|
|
|
|
|
|
|
env = {}
|
|
|
|
for line in output.splitlines():
|
|
|
|
try:
|
|
|
|
(key, val) = line.split('=', 1)
|
|
|
|
env[key] = val
|
2017-06-21 13:32:56 +02:00
|
|
|
except ValueError:
|
2017-06-14 13:36:37 +02:00
|
|
|
pass
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
env_vars = ['SSTATE_DIR', 'DL_DIR', 'TMPDIR']
|
2017-06-14 13:36:37 +02:00
|
|
|
if 'BB_ENV_EXTRAWHITE' in env:
|
2017-06-21 13:32:56 +02:00
|
|
|
extra_white = env['BB_ENV_EXTRAWHITE'] + ' '.join(env_vars)
|
|
|
|
env.update({'BB_ENV_EXTRAWHITE': extra_white})
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
env_vars.extend(['SSH_AGENT_PID', 'SSH_AUTH_SOCK',
|
|
|
|
'SHELL', 'TERM'])
|
2017-06-19 10:11:35 +02:00
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
for env_var in env_vars:
|
|
|
|
if env_var in os.environ:
|
|
|
|
env[env_var] = os.environ[env_var]
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
return env
|
|
|
|
|
|
|
|
|
|
|
|
def ssh_add_key(env, key):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Add ssh key to the ssh-agent
|
|
|
|
"""
|
|
|
|
process = Popen(['/usr/bin/ssh-add', '-'], stdin=PIPE, stdout=None,
|
|
|
|
stderr=PIPE, env=env)
|
|
|
|
(_, error) = process.communicate(input=str.encode(key))
|
|
|
|
if process.returncode and error:
|
|
|
|
logging.error('failed to add ssh key: %s', error)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def ssh_cleanup_agent(config):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Removes the identities and stop the ssh-agent instance
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
# remove the identities
|
2017-06-21 13:32:56 +02:00
|
|
|
process = Popen(['/usr/bin/ssh-add', '-D'], env=config.environ)
|
|
|
|
process.wait()
|
|
|
|
if process.returncode != 0:
|
2017-06-14 13:36:37 +02:00
|
|
|
logging.error('failed to delete SSH identities')
|
|
|
|
|
|
|
|
# stop the ssh-agent
|
2017-06-21 13:32:56 +02:00
|
|
|
process = Popen(['/usr/bin/ssh-agent', '-k'], env=config.environ)
|
|
|
|
process.wait()
|
|
|
|
if process.returncode != 0:
|
2017-06-14 13:36:37 +02:00
|
|
|
logging.error('failed to stop SSH agent')
|
|
|
|
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
def ssh_setup_agent(config, envkeys=None):
|
|
|
|
"""
|
|
|
|
Starts the ssh-agent
|
|
|
|
"""
|
|
|
|
envkeys = envkeys or ['SSH_PRIVATE_KEY']
|
2017-06-14 13:36:37 +02:00
|
|
|
output = os.popen('/usr/bin/ssh-agent -s').readlines()
|
|
|
|
for line in output:
|
2017-06-21 13:32:56 +02:00
|
|
|
matches = re.search(r"(\S+)\=(\S+)\;", line)
|
2017-06-14 13:36:37 +02:00
|
|
|
if matches:
|
|
|
|
config.environ[matches.group(1)] = matches.group(2)
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
for envkey in envkeys:
|
|
|
|
key = os.environ.get(envkey)
|
2017-06-14 13:36:37 +02:00
|
|
|
if key:
|
|
|
|
ssh_add_key(config.environ, key)
|
|
|
|
else:
|
2017-06-21 13:32:56 +02:00
|
|
|
logging.warning('%s is missing', envkey)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
def ssh_no_host_key_check(_):
|
|
|
|
"""
|
|
|
|
Disables ssh host key check
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
home = os.path.expanduser('~')
|
|
|
|
if not os.path.exists(home + '/.ssh'):
|
|
|
|
os.mkdir(home + '/.ssh')
|
2017-06-21 13:32:56 +02:00
|
|
|
with open(home + '/.ssh/config', 'w') as fds:
|
|
|
|
fds.write('Host *\n\tStrictHostKeyChecking no\n\n')
|