2017-06-14 13:36:37 +02:00
|
|
|
# kas - setup tool for bitbake based projects
|
|
|
|
#
|
2020-11-15 09:57:51 +01:00
|
|
|
# Copyright (c) Siemens AG, 2017-2020
|
2017-06-14 13:36:37 +02:00
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be
|
|
|
|
# included in all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
This module contains the core implementation of kas.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import logging
|
|
|
|
import tempfile
|
|
|
|
import asyncio
|
2018-01-25 19:07:39 +01:00
|
|
|
import errno
|
2020-07-27 16:27:56 +02:00
|
|
|
import pathlib
|
2020-12-02 10:30:22 +01:00
|
|
|
import signal
|
2017-06-14 13:36:37 +02:00
|
|
|
from subprocess import Popen, PIPE
|
2018-08-24 19:18:00 +02:00
|
|
|
from .context import get_context
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
__license__ = 'MIT'
|
2018-09-05 11:13:03 +02:00
|
|
|
__copyright__ = 'Copyright (c) Siemens AG, 2017-2018'
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
class LogOutput:
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Handles the log output of executed applications
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
def __init__(self, live):
|
|
|
|
self.live = live
|
|
|
|
self.stdout = []
|
|
|
|
self.stderr = []
|
|
|
|
|
|
|
|
def log_stdout(self, line):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
This method is called when a line is received over stdout.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
if self.live:
|
|
|
|
logging.info(line.strip())
|
|
|
|
self.stdout.append(line)
|
|
|
|
|
|
|
|
def log_stderr(self, line):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
This method is called when a line is received over stderr.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
if self.live:
|
|
|
|
logging.error(line.strip())
|
|
|
|
self.stderr.append(line)
|
|
|
|
|
|
|
|
|
2020-01-09 11:40:24 +01:00
|
|
|
async def _read_stream(stream, callback):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2017-06-26 16:07:53 +02:00
|
|
|
This asynchronous method reads from the output stream of the
|
2017-06-21 13:32:56 +02:00
|
|
|
application and transfers each line to the callback function.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
while True:
|
2020-01-09 11:40:24 +01:00
|
|
|
line = await stream.readline()
|
2017-06-14 13:36:37 +02:00
|
|
|
try:
|
|
|
|
line = line.decode('utf-8')
|
2017-06-21 13:32:56 +02:00
|
|
|
except UnicodeDecodeError as err:
|
2018-09-05 11:13:03 +02:00
|
|
|
logging.warning('Could not decode line from stream, ignoring: %s',
|
2017-06-21 13:32:56 +02:00
|
|
|
err)
|
2017-06-14 13:36:37 +02:00
|
|
|
if line:
|
2017-06-21 13:32:56 +02:00
|
|
|
callback(line)
|
2017-06-14 13:36:37 +02:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2017-06-21 13:32:55 +02:00
|
|
|
|
2020-01-09 11:40:24 +01:00
|
|
|
async def run_cmd_async(cmd, cwd, env=None, fail=True, liveupdate=True):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2017-06-28 12:43:55 +02:00
|
|
|
Run a command asynchronously.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2018-07-18 15:15:12 +02:00
|
|
|
|
2018-08-24 19:18:00 +02:00
|
|
|
env = env or get_context().environ
|
2018-09-03 10:01:58 +02:00
|
|
|
cmdstr = ' '.join(cmd)
|
2017-06-28 12:43:55 +02:00
|
|
|
logging.info('%s$ %s', cwd, cmdstr)
|
|
|
|
|
|
|
|
logo = LogOutput(liveupdate)
|
|
|
|
|
2020-12-02 10:30:22 +01:00
|
|
|
try:
|
|
|
|
orig_fd = signal.set_wakeup_fd(-1, warn_on_full_buffer=False)
|
|
|
|
signal.set_wakeup_fd(orig_fd, warn_on_full_buffer=False)
|
|
|
|
except TypeError:
|
|
|
|
# Python < 3.7 - we tried our best
|
|
|
|
pass
|
|
|
|
|
2018-01-25 19:07:39 +01:00
|
|
|
try:
|
2020-01-09 11:40:24 +01:00
|
|
|
process = await asyncio.create_subprocess_exec(
|
2018-09-03 10:01:58 +02:00
|
|
|
*cmd,
|
|
|
|
cwd=cwd,
|
|
|
|
env=env,
|
|
|
|
stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE)
|
2018-01-25 19:07:39 +01:00
|
|
|
except FileNotFoundError as ex:
|
|
|
|
if fail:
|
|
|
|
raise ex
|
|
|
|
return (errno.ENOENT, str(ex))
|
|
|
|
except PermissionError as ex:
|
|
|
|
if fail:
|
|
|
|
raise ex
|
|
|
|
return (errno.EPERM, str(ex))
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2020-11-13 08:23:05 +01:00
|
|
|
tasks = [
|
|
|
|
asyncio.ensure_future(_read_stream(process.stdout, logo.log_stdout)),
|
|
|
|
asyncio.ensure_future(_read_stream(process.stderr, logo.log_stderr))
|
|
|
|
]
|
|
|
|
await asyncio.wait(tasks)
|
2020-01-09 11:40:24 +01:00
|
|
|
ret = await process.wait()
|
2017-06-28 12:43:55 +02:00
|
|
|
|
|
|
|
if ret and fail:
|
2017-06-28 19:25:59 +02:00
|
|
|
msg = 'Command "{cwd}$ {cmd}" failed'.format(cwd=cwd, cmd=cmdstr)
|
2017-07-07 12:15:54 +02:00
|
|
|
if logo.stderr:
|
|
|
|
msg += '\n--- Error summary ---\n'
|
|
|
|
for line in logo.stderr:
|
|
|
|
msg += line
|
2017-06-28 12:43:55 +02:00
|
|
|
logging.error(msg)
|
|
|
|
|
|
|
|
return (ret, ''.join(logo.stdout))
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
2018-09-03 10:01:58 +02:00
|
|
|
def run_cmd(cmd, cwd, env=None, fail=True, liveupdate=True):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2017-06-28 12:43:55 +02:00
|
|
|
Runs a command synchronously.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
|
2017-06-28 12:43:52 +02:00
|
|
|
loop = asyncio.get_event_loop()
|
2017-06-28 19:26:00 +02:00
|
|
|
(ret, output) = loop.run_until_complete(
|
2018-09-03 10:01:58 +02:00
|
|
|
run_cmd_async(cmd, cwd, env, fail, liveupdate))
|
2017-06-28 19:26:00 +02:00
|
|
|
if ret and fail:
|
|
|
|
sys.exit(ret)
|
|
|
|
return (ret, output)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def find_program(paths, name):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Find a file within the paths array and returns its path.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
for path in paths.split(os.pathsep):
|
|
|
|
prg = os.path.join(path, name)
|
|
|
|
if os.path.isfile(prg):
|
|
|
|
return prg
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2018-08-24 19:18:06 +02:00
|
|
|
def repos_fetch(repos):
|
2017-06-28 12:43:57 +02:00
|
|
|
"""
|
|
|
|
Fetches the list of repositories to the kas_work_dir.
|
|
|
|
"""
|
2021-08-18 16:28:56 +02:00
|
|
|
if len(repos) == 0:
|
|
|
|
return
|
|
|
|
|
2017-07-03 11:34:32 +02:00
|
|
|
tasks = []
|
2017-06-28 12:43:57 +02:00
|
|
|
for repo in repos:
|
2020-11-13 08:16:01 +01:00
|
|
|
tasks.append(asyncio.ensure_future(repo.fetch_async()))
|
2017-06-28 12:43:57 +02:00
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
2017-07-03 11:34:32 +02:00
|
|
|
loop.run_until_complete(asyncio.wait(tasks))
|
|
|
|
|
|
|
|
for task in tasks:
|
|
|
|
if task.result():
|
|
|
|
sys.exit(task.result())
|
2017-06-22 12:28:32 +02:00
|
|
|
|
|
|
|
|
2018-08-24 19:18:06 +02:00
|
|
|
def repos_apply_patches(repos):
|
2018-03-09 08:22:52 +01:00
|
|
|
"""
|
|
|
|
Applies the patches to the repositories.
|
|
|
|
"""
|
2021-08-18 16:28:56 +02:00
|
|
|
if len(repos) == 0:
|
|
|
|
return
|
|
|
|
|
2018-03-09 08:22:52 +01:00
|
|
|
tasks = []
|
|
|
|
for repo in repos:
|
2020-11-13 08:16:01 +01:00
|
|
|
tasks.append(asyncio.ensure_future(repo.apply_patches_async()))
|
2018-03-09 08:22:52 +01:00
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
|
|
|
loop.run_until_complete(asyncio.wait(tasks))
|
|
|
|
|
|
|
|
for task in tasks:
|
|
|
|
if task.result():
|
|
|
|
sys.exit(task.result())
|
|
|
|
|
|
|
|
|
2020-11-15 09:57:51 +01:00
|
|
|
def get_build_environ(build_system):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
Creates the build environment variables.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2017-06-22 10:29:10 +02:00
|
|
|
# nasty side effect function: running oe/isar-init-build-env also
|
2017-06-14 13:36:37 +02:00
|
|
|
# creates the conf directory
|
|
|
|
|
2017-07-28 17:28:56 +02:00
|
|
|
init_repo = None
|
2020-11-15 09:57:51 +01:00
|
|
|
if build_system in ['openembedded', 'oe']:
|
|
|
|
scripts = ['oe-init-build-env']
|
|
|
|
elif build_system == 'isar':
|
|
|
|
scripts = ['isar-init-build-env']
|
|
|
|
else:
|
|
|
|
scripts = ['oe-init-build-env', 'isar-init-build-env']
|
2017-06-22 10:29:10 +02:00
|
|
|
permutations = \
|
2018-08-24 19:18:05 +02:00
|
|
|
[(repo, script) for repo in get_context().config.get_repos()
|
2020-11-15 09:57:51 +01:00
|
|
|
for script in scripts]
|
2017-06-22 10:29:10 +02:00
|
|
|
for (repo, script) in permutations:
|
|
|
|
if os.path.exists(repo.path + '/' + script):
|
2017-07-28 17:28:56 +02:00
|
|
|
if init_repo:
|
|
|
|
logging.error('Multiple init scripts found (%s vs. %s). ',
|
|
|
|
repo.name, init_repo.name)
|
|
|
|
logging.error('Resolve ambiguity by removing one of the repos')
|
|
|
|
sys.exit(1)
|
|
|
|
init_repo = repo
|
2017-06-22 10:29:10 +02:00
|
|
|
init_script = script
|
2017-07-28 17:28:56 +02:00
|
|
|
if not init_repo:
|
2017-06-22 10:29:10 +02:00
|
|
|
logging.error('Did not find any init-build-env script')
|
2017-06-14 13:36:37 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2020-07-27 16:27:56 +02:00
|
|
|
with tempfile.TemporaryDirectory() as temp_dir:
|
2017-06-14 13:36:37 +02:00
|
|
|
script = """#!/bin/bash
|
2017-07-28 16:29:44 +02:00
|
|
|
set -e
|
|
|
|
source %s $1 > /dev/null
|
2017-06-14 13:36:37 +02:00
|
|
|
env
|
2017-06-22 10:29:10 +02:00
|
|
|
""" % init_script
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2020-07-27 16:27:56 +02:00
|
|
|
get_bb_env_file = pathlib.Path(temp_dir) / "get_bb_env"
|
|
|
|
get_bb_env_file.write_text(script)
|
|
|
|
get_bb_env_file.chmod(0o775)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2020-07-27 16:27:56 +02:00
|
|
|
env = {}
|
|
|
|
env['PATH'] = '/usr/sbin:/usr/bin:/sbin:/bin'
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2020-07-27 16:27:56 +02:00
|
|
|
(_, output) = run_cmd([str(get_bb_env_file), get_context().build_dir],
|
|
|
|
cwd=init_repo.path, env=env, liveupdate=False)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
env = {}
|
|
|
|
for line in output.splitlines():
|
|
|
|
try:
|
|
|
|
(key, val) = line.split('=', 1)
|
|
|
|
env[key] = val
|
2017-06-21 13:32:56 +02:00
|
|
|
except ValueError:
|
2017-06-14 13:36:37 +02:00
|
|
|
pass
|
|
|
|
|
2018-08-24 19:18:05 +02:00
|
|
|
conf_env = get_context().config.get_environment()
|
2017-11-23 14:15:51 +01:00
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
env_vars = ['SSTATE_DIR', 'DL_DIR', 'TMPDIR']
|
2017-11-23 14:15:51 +01:00
|
|
|
env_vars.extend(conf_env)
|
|
|
|
|
|
|
|
env.update(conf_env)
|
|
|
|
|
2022-02-22 17:28:53 +01:00
|
|
|
if 'BB_ENV_PASSTHROUGH_ADDITIONS' in env:
|
|
|
|
passthrough_additions = env['BB_ENV_PASSTHROUGH_ADDITIONS'] + ' ' + \
|
|
|
|
' '.join(env_vars)
|
|
|
|
env.update({'BB_ENV_PASSTHROUGH_ADDITIONS': passthrough_additions})
|
|
|
|
elif 'BB_ENV_EXTRAWHITE' in env:
|
2017-12-22 09:51:16 +01:00
|
|
|
extra_white = env['BB_ENV_EXTRAWHITE'] + ' ' + ' '.join(env_vars)
|
2017-06-21 13:32:56 +02:00
|
|
|
env.update({'BB_ENV_EXTRAWHITE': extra_white})
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2018-09-14 19:27:47 +02:00
|
|
|
env_vars.extend(['SSH_AUTH_SOCK',
|
2017-07-19 08:51:19 +02:00
|
|
|
'SHELL', 'TERM',
|
|
|
|
'GIT_PROXY_COMMAND', 'NO_PROXY'])
|
2017-06-19 10:11:35 +02:00
|
|
|
|
2017-06-21 13:32:56 +02:00
|
|
|
for env_var in env_vars:
|
|
|
|
if env_var in os.environ:
|
|
|
|
env[env_var] = os.environ[env_var]
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2022-11-10 13:46:45 +01:00
|
|
|
# filter out 'None' values
|
|
|
|
env = {k: v for (k, v) in env.items() if v is not None}
|
|
|
|
|
2017-06-14 13:36:37 +02:00
|
|
|
return env
|
|
|
|
|
|
|
|
|
2021-07-03 23:35:40 +02:00
|
|
|
def ssh_add_key_file(env, key_path):
|
|
|
|
"""
|
|
|
|
Adds an ssh key file to the ssh-agent
|
|
|
|
"""
|
|
|
|
with open(key_path) as f:
|
|
|
|
key = f.read()
|
|
|
|
ssh_add_key(env, key)
|
|
|
|
|
|
|
|
|
2017-06-14 13:36:37 +02:00
|
|
|
def ssh_add_key(env, key):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
Adds an ssh key to the ssh-agent
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2020-01-09 04:56:32 +01:00
|
|
|
# The ssh-agent needs the key to end with a newline, otherwise it
|
|
|
|
# unhelpfully prompts for a password
|
|
|
|
if not key.endswith('\n'):
|
|
|
|
key += '\n'
|
|
|
|
|
2017-09-08 17:09:22 +02:00
|
|
|
process = Popen(['ssh-add', '-'], stdin=PIPE, stdout=None,
|
2017-06-21 13:32:56 +02:00
|
|
|
stderr=PIPE, env=env)
|
|
|
|
(_, error) = process.communicate(input=str.encode(key))
|
|
|
|
if process.returncode and error:
|
|
|
|
logging.error('failed to add ssh key: %s', error)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
2018-08-24 19:18:04 +02:00
|
|
|
def ssh_cleanup_agent():
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
Removes the identities and stops the ssh-agent instance
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
2018-08-24 19:18:04 +02:00
|
|
|
env = get_context().environ
|
2017-06-14 13:36:37 +02:00
|
|
|
# remove the identities
|
2018-08-24 19:18:04 +02:00
|
|
|
process = Popen(['ssh-add', '-D'], env=env)
|
2017-06-21 13:32:56 +02:00
|
|
|
process.wait()
|
|
|
|
if process.returncode != 0:
|
2017-06-14 13:36:37 +02:00
|
|
|
logging.error('failed to delete SSH identities')
|
|
|
|
|
|
|
|
# stop the ssh-agent
|
2018-08-24 19:18:04 +02:00
|
|
|
process = Popen(['ssh-agent', '-k'], env=env)
|
2017-06-21 13:32:56 +02:00
|
|
|
process.wait()
|
|
|
|
if process.returncode != 0:
|
2017-06-14 13:36:37 +02:00
|
|
|
logging.error('failed to stop SSH agent')
|
|
|
|
|
|
|
|
|
2018-08-24 19:18:04 +02:00
|
|
|
def ssh_setup_agent(envkeys=None):
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Starts the ssh-agent
|
|
|
|
"""
|
2018-08-24 19:18:04 +02:00
|
|
|
env = get_context().environ
|
2021-07-03 23:35:40 +02:00
|
|
|
envkeys = envkeys or ['SSH_PRIVATE_KEY', 'SSH_PRIVATE_KEY_FILE']
|
2017-09-08 17:09:22 +02:00
|
|
|
output = os.popen('ssh-agent -s').readlines()
|
2017-06-14 13:36:37 +02:00
|
|
|
for line in output:
|
2017-06-21 13:32:56 +02:00
|
|
|
matches = re.search(r"(\S+)\=(\S+)\;", line)
|
2017-06-14 13:36:37 +02:00
|
|
|
if matches:
|
2018-08-24 19:18:04 +02:00
|
|
|
env[matches.group(1)] = matches.group(2)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2021-07-03 23:35:40 +02:00
|
|
|
found = False
|
2017-06-21 13:32:56 +02:00
|
|
|
for envkey in envkeys:
|
2021-07-03 23:35:40 +02:00
|
|
|
if envkey == 'SSH_PRIVATE_KEY_FILE':
|
|
|
|
key_path = os.environ.get(envkey)
|
|
|
|
if key_path:
|
|
|
|
found = True
|
|
|
|
logging.info("adding SSH key")
|
|
|
|
ssh_add_key_file(env, key_path)
|
2017-06-14 13:36:37 +02:00
|
|
|
else:
|
2021-07-03 23:35:40 +02:00
|
|
|
key = os.environ.get(envkey)
|
|
|
|
if key:
|
|
|
|
found = True
|
|
|
|
logging.info("adding SSH key")
|
|
|
|
ssh_add_key(env, key)
|
|
|
|
|
|
|
|
if found is not True:
|
|
|
|
warning = "None of the following environment keys were set: " + \
|
|
|
|
", ".join(envkeys)
|
|
|
|
logging.warning(warning)
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
2018-08-24 19:18:04 +02:00
|
|
|
def ssh_no_host_key_check():
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Disables ssh host key check
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
home = os.path.expanduser('~')
|
2021-11-23 18:11:32 +01:00
|
|
|
ssh_dir = home + '/.ssh'
|
|
|
|
if not os.path.exists(ssh_dir):
|
|
|
|
os.mkdir(ssh_dir)
|
|
|
|
ssh_config = ssh_dir + "/config"
|
|
|
|
generated_content = 'Host *\n\tStrictHostKeyChecking no\n\n'
|
|
|
|
try:
|
|
|
|
with open(ssh_config, 'x') as fds:
|
|
|
|
fds.write(generated_content)
|
|
|
|
except FileExistsError:
|
|
|
|
with open(ssh_config, 'r') as fds:
|
|
|
|
content = fds.read()
|
|
|
|
if content != generated_content:
|
|
|
|
logging.warning("%s already exists, "
|
|
|
|
"not touching it to disable StrictHostKeyChecking",
|
|
|
|
ssh_config)
|
2021-07-10 11:22:34 +02:00
|
|
|
|
|
|
|
|
|
|
|
def setup_parser_common_args(parser):
|
|
|
|
parser.add_argument('config',
|
2022-07-29 07:46:18 +02:00
|
|
|
help='Config file, using .config.yaml in KAS_WORK_DIR '
|
|
|
|
'if none is specified',
|
2021-07-12 08:47:34 +02:00
|
|
|
nargs='?')
|
2021-07-10 11:22:34 +02:00
|
|
|
parser.add_argument('--skip',
|
|
|
|
help='Skip build steps',
|
|
|
|
default=[])
|
|
|
|
parser.add_argument('--force-checkout', action='store_true',
|
|
|
|
help='Always checkout the desired refspec of each '
|
|
|
|
'repository, discarding any local changes')
|
|
|
|
parser.add_argument('--update', action='store_true',
|
|
|
|
help='Pull new upstream changes to the desired '
|
|
|
|
'refspec even if it is already checked out locally')
|
2022-07-19 22:47:34 +02:00
|
|
|
|
|
|
|
|
|
|
|
def setup_parser_preserve_env_arg(parser):
|
|
|
|
parser.add_argument('-E', '--preserve-env',
|
2022-11-11 11:03:43 +01:00
|
|
|
help='Keep current user environment block',
|
2022-07-19 22:47:34 +02:00
|
|
|
action='store_true')
|
|
|
|
|
|
|
|
|
|
|
|
def run_handle_preserve_env_arg(ctx, os, args, SetupHome):
|
|
|
|
if args.preserve_env:
|
|
|
|
# Warn if there's any settings that setup_home would apply
|
|
|
|
# but are now ignored
|
|
|
|
for var in SetupHome.ENV_VARS:
|
|
|
|
if var in os.environ:
|
|
|
|
logging.warning('Environment variable "%s" ignored '
|
|
|
|
'because user environment is being used',
|
|
|
|
var)
|
|
|
|
|
|
|
|
if not os.isatty(sys.stdout.fileno()):
|
|
|
|
logging.error("Error: --preserve-env can only be "
|
|
|
|
"run from a tty")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
ctx.environ = os.environ.copy()
|
|
|
|
|
|
|
|
logging.warning("Preserving the current environment block may "
|
|
|
|
"have unintended side effects on the build.")
|