2017-06-14 13:36:37 +02:00
|
|
|
# kas - setup tool for bitbake based projects
|
|
|
|
#
|
2019-09-20 18:11:44 +02:00
|
|
|
# Copyright (c) Siemens AG, 2017-2019
|
2017-06-14 13:36:37 +02:00
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
# of this software and associated documentation files (the "Software"), to deal
|
|
|
|
# in the Software without restriction, including without limitation the rights
|
|
|
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
# copies of the Software, and to permit persons to whom the Software is
|
|
|
|
# furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be
|
|
|
|
# included in all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
# SOFTWARE.
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
This module contains the Repo class.
|
|
|
|
"""
|
2017-06-14 13:36:37 +02:00
|
|
|
|
2019-01-31 10:12:08 +01:00
|
|
|
import re
|
2017-06-14 13:36:37 +02:00
|
|
|
import os
|
2020-07-04 09:53:39 +02:00
|
|
|
import sys
|
2018-01-05 16:00:24 +01:00
|
|
|
import logging
|
2017-06-14 13:36:37 +02:00
|
|
|
from urllib.parse import urlparse
|
2018-08-24 19:18:02 +02:00
|
|
|
from .context import get_context
|
2018-01-05 16:00:24 +01:00
|
|
|
from .libkas import run_cmd_async, run_cmd
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
__license__ = 'MIT'
|
2018-09-05 11:13:03 +02:00
|
|
|
__copyright__ = 'Copyright (c) Siemens AG, 2017-2018'
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Repo:
|
2017-06-21 13:32:56 +02:00
|
|
|
"""
|
|
|
|
Represents a repository in the kas configuration.
|
|
|
|
"""
|
|
|
|
|
2020-11-17 14:51:51 +01:00
|
|
|
def __init__(self, name, url, path, refspec, layers, patches,
|
2018-03-09 08:22:52 +01:00
|
|
|
disable_operations):
|
2020-11-17 14:51:51 +01:00
|
|
|
self.name = name
|
2017-06-14 13:36:37 +02:00
|
|
|
self.url = url
|
|
|
|
self.path = path
|
|
|
|
self.refspec = refspec
|
2017-06-21 13:32:57 +02:00
|
|
|
self._layers = layers
|
2018-03-09 08:22:52 +01:00
|
|
|
self._patches = patches
|
2018-01-05 16:00:27 +01:00
|
|
|
self.operations_disabled = disable_operations
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
def __getattr__(self, item):
|
|
|
|
if item == 'layers':
|
2021-05-05 14:26:15 +02:00
|
|
|
return [os.path.join(self.path, layer).rstrip(os.sep + '.')
|
2020-09-22 19:31:37 +02:00
|
|
|
for layer in self._layers]
|
2017-06-14 13:36:37 +02:00
|
|
|
elif item == 'qualified_name':
|
|
|
|
url = urlparse(self.url)
|
2017-06-21 13:32:56 +02:00
|
|
|
return ('{url.netloc}{url.path}'
|
|
|
|
.format(url=url)
|
|
|
|
.replace('@', '.')
|
|
|
|
.replace(':', '.')
|
|
|
|
.replace('/', '.')
|
|
|
|
.replace('*', '.'))
|
2019-01-31 10:12:08 +01:00
|
|
|
elif item == 'effective_url':
|
|
|
|
mirrors = os.environ.get('KAS_PREMIRRORS', '')
|
|
|
|
for mirror in mirrors.split('\n'):
|
|
|
|
try:
|
|
|
|
expr, subst = mirror.split()
|
|
|
|
if re.match(expr, self.url):
|
|
|
|
return re.sub(expr, subst, self.url)
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
return self.url
|
2022-12-11 08:30:30 +01:00
|
|
|
elif item == 'revision':
|
|
|
|
if not self.refspec:
|
|
|
|
return None
|
|
|
|
(_, output) = run_cmd(self.resolve_branch_cmd(),
|
|
|
|
cwd=self.path, fail=False)
|
|
|
|
if output:
|
|
|
|
return output.strip()
|
|
|
|
return self.refspec
|
|
|
|
|
2018-07-18 14:50:19 +02:00
|
|
|
# Default behaviour
|
|
|
|
raise AttributeError
|
2017-06-14 13:36:37 +02:00
|
|
|
|
|
|
|
def __str__(self):
|
2017-06-21 13:32:57 +02:00
|
|
|
return '%s:%s %s %s' % (self.url, self.refspec,
|
|
|
|
self.path, self._layers)
|
2018-01-05 16:00:24 +01:00
|
|
|
|
2018-01-05 16:00:26 +01:00
|
|
|
@staticmethod
|
2020-07-04 09:53:38 +02:00
|
|
|
def factory(name, repo_config, repo_defaults, repo_fallback_path):
|
2018-01-05 16:00:26 +01:00
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
Returns a Repo instance depending on params.
|
2018-01-05 16:00:26 +01:00
|
|
|
"""
|
2020-07-02 07:42:23 +02:00
|
|
|
layers_dict = repo_config.get('layers', {'': None})
|
2018-01-05 16:00:29 +01:00
|
|
|
layers = list(filter(lambda x, laydict=layers_dict:
|
|
|
|
str(laydict[x]).lower() not in
|
|
|
|
['disabled', 'excluded', 'n', 'no', '0', 'false'],
|
|
|
|
layers_dict))
|
2020-07-04 09:53:39 +02:00
|
|
|
default_patch_repo = repo_defaults.get('patches', {}).get('repo', None)
|
2018-03-09 08:22:52 +01:00
|
|
|
patches_dict = repo_config.get('patches', {})
|
2020-07-04 09:53:39 +02:00
|
|
|
patches = []
|
|
|
|
for p in sorted(patches_dict):
|
|
|
|
if not patches_dict[p]:
|
|
|
|
continue
|
|
|
|
this_patch = {
|
2018-03-09 08:22:52 +01:00
|
|
|
'id': p,
|
2020-07-04 09:53:39 +02:00
|
|
|
'repo': patches_dict[p].get('repo', default_patch_repo),
|
2018-03-09 08:22:52 +01:00
|
|
|
'path': patches_dict[p]['path'],
|
|
|
|
}
|
2020-07-04 09:53:39 +02:00
|
|
|
if this_patch['repo'] is None:
|
|
|
|
logging.error('No repo specified for patch entry "%s" and no '
|
|
|
|
'default repo specified.', p)
|
|
|
|
sys.exit(1)
|
|
|
|
patches.append(this_patch)
|
|
|
|
|
2018-01-05 16:00:29 +01:00
|
|
|
url = repo_config.get('url', None)
|
|
|
|
name = repo_config.get('name', name)
|
|
|
|
typ = repo_config.get('type', 'git')
|
2020-07-04 09:53:38 +02:00
|
|
|
refspec = repo_config.get('refspec',
|
|
|
|
repo_defaults.get('refspec', None))
|
2022-02-21 09:25:37 +01:00
|
|
|
if refspec is None and url is not None:
|
|
|
|
logging.error('No refspec specified for repository "%s". This is '
|
|
|
|
'only allowed for local repositories.', name)
|
|
|
|
sys.exit(1)
|
2018-01-05 16:00:29 +01:00
|
|
|
path = repo_config.get('path', None)
|
|
|
|
disable_operations = False
|
|
|
|
|
2020-12-16 17:55:43 +01:00
|
|
|
if path is None:
|
|
|
|
if url is None:
|
2018-08-24 19:18:02 +02:00
|
|
|
path = Repo.get_root_path(repo_fallback_path)
|
2018-01-05 16:00:29 +01:00
|
|
|
logging.info('Using %s as root for repository %s', path,
|
|
|
|
name)
|
2020-12-16 17:55:42 +01:00
|
|
|
else:
|
2020-12-16 17:55:43 +01:00
|
|
|
path = os.path.join(get_context().kas_work_dir, name)
|
|
|
|
elif not os.path.isabs(path):
|
|
|
|
# Relative pathes are assumed to start from work_dir
|
|
|
|
path = os.path.join(get_context().kas_work_dir, path)
|
2018-01-05 16:00:29 +01:00
|
|
|
|
2020-12-16 17:55:43 +01:00
|
|
|
if url is None:
|
|
|
|
# No version control operation on repository
|
2018-01-05 16:00:29 +01:00
|
|
|
url = path
|
|
|
|
disable_operations = True
|
|
|
|
|
2018-01-05 16:00:28 +01:00
|
|
|
if typ == 'git':
|
2020-11-17 14:51:51 +01:00
|
|
|
return GitRepo(name, url, path, refspec, layers, patches,
|
2018-03-09 08:22:52 +01:00
|
|
|
disable_operations)
|
2018-01-05 16:00:31 +01:00
|
|
|
if typ == 'hg':
|
2020-11-17 14:51:51 +01:00
|
|
|
return MercurialRepo(name, url, path, refspec, layers, patches,
|
2018-01-05 16:00:31 +01:00
|
|
|
disable_operations)
|
2020-06-29 22:12:05 +02:00
|
|
|
raise NotImplementedError('Repo type "%s" not supported.' % typ)
|
2018-01-05 16:00:26 +01:00
|
|
|
|
|
|
|
@staticmethod
|
2018-08-24 19:18:07 +02:00
|
|
|
def get_root_path(path, fallback=True):
|
2018-01-05 16:00:26 +01:00
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
Checks if path is under version control and returns its root path.
|
2018-01-05 16:00:26 +01:00
|
|
|
"""
|
2018-01-05 16:00:31 +01:00
|
|
|
(ret, output) = run_cmd(['git', 'rev-parse', '--show-toplevel'],
|
2018-08-24 19:18:01 +02:00
|
|
|
cwd=path, fail=False, liveupdate=False)
|
2018-01-05 16:00:31 +01:00
|
|
|
if ret == 0:
|
|
|
|
return output.strip()
|
|
|
|
|
|
|
|
(ret, output) = run_cmd(['hg', 'root'],
|
2018-08-24 19:18:01 +02:00
|
|
|
cwd=path, fail=False, liveupdate=False)
|
2018-01-05 16:00:26 +01:00
|
|
|
if ret == 0:
|
|
|
|
return output.strip()
|
|
|
|
|
2018-08-24 19:18:07 +02:00
|
|
|
return path if fallback else None
|
2018-01-05 16:00:26 +01:00
|
|
|
|
|
|
|
|
2018-01-05 16:00:30 +01:00
|
|
|
class RepoImpl(Repo):
|
2018-01-05 16:00:26 +01:00
|
|
|
"""
|
2018-01-05 16:00:30 +01:00
|
|
|
Provides a generic implementation for a Repo.
|
2018-01-05 16:00:26 +01:00
|
|
|
"""
|
|
|
|
|
2020-01-09 11:40:24 +01:00
|
|
|
async def fetch_async(self):
|
2018-01-05 16:00:24 +01:00
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
Starts asynchronous repository fetch.
|
2018-01-05 16:00:24 +01:00
|
|
|
"""
|
|
|
|
if self.operations_disabled:
|
|
|
|
return 0
|
|
|
|
|
|
|
|
if not os.path.exists(self.path):
|
|
|
|
os.makedirs(os.path.dirname(self.path), exist_ok=True)
|
2018-08-24 19:18:06 +02:00
|
|
|
sdir = os.path.join(get_context().kas_repo_ref_dir or '',
|
2018-01-05 16:00:30 +01:00
|
|
|
self.qualified_name)
|
|
|
|
logging.debug('Looking for repo ref dir in %s', sdir)
|
|
|
|
|
2020-01-09 11:40:24 +01:00
|
|
|
(retc, _) = await run_cmd_async(
|
2018-08-24 19:18:06 +02:00
|
|
|
self.clone_cmd(sdir),
|
|
|
|
cwd=get_context().kas_work_dir)
|
2018-01-05 16:00:24 +01:00
|
|
|
if retc == 0:
|
|
|
|
logging.info('Repository %s cloned', self.name)
|
2021-07-06 15:12:51 +02:00
|
|
|
if not self.refspec.startswith('refs/'):
|
|
|
|
return retc
|
2018-01-05 16:00:24 +01:00
|
|
|
|
2019-06-19 10:49:44 +02:00
|
|
|
# Make sure the remote origin is set to the value
|
2020-10-13 14:02:34 +02:00
|
|
|
# in the kas file to avoid surprises
|
2019-06-19 10:49:44 +02:00
|
|
|
try:
|
2020-01-09 11:40:24 +01:00
|
|
|
(retc, output) = await run_cmd_async(
|
2019-06-19 10:49:44 +02:00
|
|
|
self.set_remote_url_cmd(),
|
|
|
|
cwd=self.path,
|
|
|
|
liveupdate=False)
|
|
|
|
if retc != 0:
|
|
|
|
return retc
|
|
|
|
except NotImplementedError:
|
|
|
|
logging.warning('Repo implementation does not support changing '
|
|
|
|
'the remote url.')
|
|
|
|
|
2018-01-05 16:00:24 +01:00
|
|
|
# take what came out of clone and stick to that forever
|
|
|
|
if self.refspec is None:
|
|
|
|
return 0
|
|
|
|
|
2020-06-15 22:03:31 +02:00
|
|
|
if not get_context().update:
|
|
|
|
# Does refspec exist in the current repository?
|
|
|
|
(retc, output) = await run_cmd_async(self.contains_refspec_cmd(),
|
|
|
|
cwd=self.path,
|
|
|
|
fail=False,
|
|
|
|
liveupdate=False)
|
|
|
|
if retc == 0:
|
|
|
|
logging.info('Repository %s already contains %s as %s',
|
|
|
|
self.name, self.refspec, output.strip())
|
|
|
|
return retc
|
2018-01-05 16:00:24 +01:00
|
|
|
|
2020-06-15 22:03:31 +02:00
|
|
|
# Try to fetch if refspec is missing or if --update argument was passed
|
2020-01-09 11:40:24 +01:00
|
|
|
(retc, output) = await run_cmd_async(self.fetch_cmd(),
|
|
|
|
cwd=self.path,
|
|
|
|
fail=False)
|
2018-01-05 16:00:24 +01:00
|
|
|
if retc:
|
|
|
|
logging.warning('Could not update repository %s: %s',
|
|
|
|
self.name, output)
|
|
|
|
else:
|
|
|
|
logging.info('Repository %s updated', self.name)
|
|
|
|
return 0
|
|
|
|
|
2018-08-24 19:18:06 +02:00
|
|
|
def checkout(self):
|
2018-01-05 16:00:24 +01:00
|
|
|
"""
|
|
|
|
Checks out the correct revision of the repo.
|
|
|
|
"""
|
|
|
|
if self.operations_disabled or self.refspec is None:
|
|
|
|
return
|
|
|
|
|
2020-06-15 22:03:30 +02:00
|
|
|
if not get_context().force_checkout:
|
|
|
|
# Check if repos is dirty
|
|
|
|
(_, output) = run_cmd(self.is_dirty_cmd(),
|
|
|
|
cwd=self.path,
|
|
|
|
fail=False)
|
|
|
|
if output:
|
|
|
|
logging.warning('Repo %s is dirty - no checkout', self.name)
|
|
|
|
return
|
2018-01-05 16:00:24 +01:00
|
|
|
|
2020-06-15 17:59:51 +02:00
|
|
|
(_, output) = run_cmd(self.resolve_branch_cmd(),
|
2020-05-29 14:33:09 +02:00
|
|
|
cwd=self.path, fail=False)
|
2020-06-15 17:59:51 +02:00
|
|
|
if output:
|
|
|
|
desired_ref = output.strip()
|
|
|
|
branch = True
|
|
|
|
else:
|
|
|
|
desired_ref = self.refspec
|
|
|
|
branch = False
|
2018-01-05 16:00:24 +01:00
|
|
|
|
2020-06-15 17:59:51 +02:00
|
|
|
run_cmd(self.checkout_cmd(desired_ref, branch), cwd=self.path)
|
2018-01-05 16:00:30 +01:00
|
|
|
|
2020-01-09 11:40:24 +01:00
|
|
|
async def apply_patches_async(self):
|
2018-03-09 08:22:52 +01:00
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
Applies patches to a repository asynchronously.
|
2018-03-09 08:22:52 +01:00
|
|
|
"""
|
2019-09-20 06:31:04 +02:00
|
|
|
if self.operations_disabled or not self._patches:
|
2018-03-09 08:22:52 +01:00
|
|
|
return 0
|
|
|
|
|
2020-01-09 11:40:24 +01:00
|
|
|
(retc, _) = await run_cmd_async(self.prepare_patches_cmd(),
|
|
|
|
cwd=self.path)
|
2019-09-20 06:31:04 +02:00
|
|
|
if retc:
|
|
|
|
return retc
|
|
|
|
|
2019-08-20 12:48:56 +02:00
|
|
|
my_patches = []
|
|
|
|
|
2018-03-09 08:22:52 +01:00
|
|
|
for patch in self._patches:
|
2018-08-24 19:18:06 +02:00
|
|
|
other_repo = get_context().config.repo_dict.get(patch['repo'],
|
|
|
|
None)
|
2018-03-09 08:22:52 +01:00
|
|
|
|
|
|
|
if not other_repo:
|
2019-02-02 12:30:16 +01:00
|
|
|
logging.error('Could not find referenced repo. '
|
|
|
|
'(missing repo: %s, repo: %s, '
|
|
|
|
'patch entry: %s)',
|
|
|
|
patch['repo'],
|
|
|
|
self.name,
|
|
|
|
patch['id'])
|
|
|
|
return 1
|
2018-03-09 08:22:52 +01:00
|
|
|
|
|
|
|
path = os.path.join(other_repo.path, patch['path'])
|
|
|
|
cmd = []
|
|
|
|
|
|
|
|
if os.path.isfile(path):
|
2020-04-10 18:10:17 +02:00
|
|
|
my_patches.append((path, patch['id']))
|
2020-06-17 08:09:14 +02:00
|
|
|
elif os.path.isdir(path) \
|
|
|
|
and os.path.isfile(os.path.join(path, 'series')):
|
2019-08-20 12:48:56 +02:00
|
|
|
with open(os.path.join(path, 'series')) as f:
|
|
|
|
for line in f:
|
|
|
|
if line.startswith('#'):
|
|
|
|
continue
|
|
|
|
p = os.path.join(path, line.split(' #')[0].rstrip())
|
|
|
|
if os.path.isfile(p):
|
2020-04-10 18:10:17 +02:00
|
|
|
my_patches.append((p, patch['id']))
|
2019-08-20 12:48:56 +02:00
|
|
|
else:
|
|
|
|
raise FileNotFoundError(p)
|
2018-03-09 08:22:52 +01:00
|
|
|
else:
|
2019-02-02 12:30:16 +01:00
|
|
|
logging.error('Could not find patch. '
|
|
|
|
'(patch path: %s, repo: %s, patch entry: %s)',
|
|
|
|
path,
|
|
|
|
self.name,
|
|
|
|
patch['id'])
|
|
|
|
return 1
|
2018-03-09 08:22:52 +01:00
|
|
|
|
2020-04-10 18:10:17 +02:00
|
|
|
for (path, patch_id) in my_patches:
|
2019-09-20 18:11:44 +02:00
|
|
|
cmd = self.apply_patches_file_cmd(path)
|
2020-01-09 11:40:24 +01:00
|
|
|
(retc, output) = await run_cmd_async(cmd, cwd=self.path)
|
2018-03-09 08:22:52 +01:00
|
|
|
if retc:
|
|
|
|
logging.error('Could not apply patch. Please fix repos and '
|
|
|
|
'patches. (patch path: %s, repo: %s, patch '
|
|
|
|
'entry: %s, vcs output: %s)',
|
2020-04-10 18:10:17 +02:00
|
|
|
path, self.name, patch_id, output)
|
2018-03-09 08:22:52 +01:00
|
|
|
return 1
|
2018-08-13 14:11:22 +02:00
|
|
|
else:
|
|
|
|
logging.info('Patch applied. '
|
|
|
|
'(patch path: %s, repo: %s, patch entry: %s)',
|
2020-04-10 18:10:17 +02:00
|
|
|
path, self.name, patch_id)
|
2019-08-20 12:48:56 +02:00
|
|
|
|
2019-09-20 18:11:44 +02:00
|
|
|
cmd = self.add_cmd()
|
2020-01-09 11:40:24 +01:00
|
|
|
(retc, output) = await run_cmd_async(cmd, cwd=self.path)
|
2019-09-20 18:11:44 +02:00
|
|
|
if retc:
|
|
|
|
logging.error('Could not add patched files. '
|
|
|
|
'repo: %s, vcs output: %s)',
|
|
|
|
self.name, output)
|
|
|
|
return 1
|
2019-08-20 12:48:56 +02:00
|
|
|
|
2019-09-20 18:11:44 +02:00
|
|
|
cmd = self.commit_cmd()
|
2020-01-09 11:40:24 +01:00
|
|
|
(retc, output) = await run_cmd_async(cmd, cwd=self.path)
|
2019-09-20 18:11:44 +02:00
|
|
|
if retc:
|
|
|
|
logging.error('Could not commit patch changes. '
|
|
|
|
'repo: %s, vcs output: %s)',
|
|
|
|
self.name, output)
|
|
|
|
return 1
|
2019-08-20 12:48:56 +02:00
|
|
|
|
2018-03-09 08:22:52 +01:00
|
|
|
return 0
|
|
|
|
|
2018-01-05 16:00:30 +01:00
|
|
|
|
|
|
|
class GitRepo(RepoImpl):
|
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
Provides the git functionality for a Repo.
|
2018-01-05 16:00:30 +01:00
|
|
|
"""
|
|
|
|
|
2021-07-06 15:12:51 +02:00
|
|
|
def remove_ref_prefix(self, refspec):
|
|
|
|
ref_prefix = 'refs/'
|
|
|
|
return refspec[refspec.startswith(ref_prefix) and len(ref_prefix):]
|
|
|
|
|
2019-08-20 12:48:56 +02:00
|
|
|
def add_cmd(self):
|
|
|
|
return ['git', 'add', '-A']
|
|
|
|
|
2018-08-24 19:18:06 +02:00
|
|
|
def clone_cmd(self, gitsrcdir):
|
2019-01-31 10:12:08 +01:00
|
|
|
cmd = ['git', 'clone', '-q', self.effective_url, self.path]
|
2018-08-24 19:18:06 +02:00
|
|
|
if get_context().kas_repo_ref_dir and os.path.exists(gitsrcdir):
|
2018-01-05 16:00:30 +01:00
|
|
|
cmd.extend(['--reference', gitsrcdir])
|
|
|
|
return cmd
|
|
|
|
|
2019-08-20 12:48:56 +02:00
|
|
|
def commit_cmd(self):
|
|
|
|
return ['git', 'commit', '-a', '--author', 'kas <kas@example.com>',
|
|
|
|
'-m', 'msg']
|
|
|
|
|
2018-01-05 16:00:30 +01:00
|
|
|
def contains_refspec_cmd(self):
|
2021-07-06 15:12:51 +02:00
|
|
|
return ['git', 'cat-file', '-t', self.remove_ref_prefix(self.refspec)]
|
2018-01-05 16:00:30 +01:00
|
|
|
|
|
|
|
def fetch_cmd(self):
|
2022-02-25 16:13:18 +01:00
|
|
|
cmd = ['git', 'fetch', '-q']
|
2021-07-06 15:12:51 +02:00
|
|
|
if self.refspec.startswith('refs/'):
|
2022-02-25 16:13:18 +01:00
|
|
|
cmd.extend(['origin',
|
2021-07-06 15:12:51 +02:00
|
|
|
'+' + self.refspec
|
|
|
|
+ ':refs/remotes/origin/'
|
|
|
|
+ self.remove_ref_prefix(self.refspec)])
|
|
|
|
|
|
|
|
return cmd
|
2018-01-05 16:00:30 +01:00
|
|
|
|
|
|
|
def is_dirty_cmd(self):
|
2018-08-24 16:11:20 +02:00
|
|
|
return ['git', 'status', '-s']
|
2018-01-05 16:00:30 +01:00
|
|
|
|
2020-06-15 17:59:51 +02:00
|
|
|
def resolve_branch_cmd(self):
|
|
|
|
return ['git', 'rev-parse', '--verify', '-q',
|
2021-07-06 15:12:51 +02:00
|
|
|
'origin/{refspec}'.
|
|
|
|
format(refspec=self.remove_ref_prefix(self.refspec))]
|
2018-01-05 16:00:30 +01:00
|
|
|
|
2020-06-15 17:59:51 +02:00
|
|
|
def checkout_cmd(self, desired_ref, branch):
|
2021-07-06 15:12:51 +02:00
|
|
|
cmd = ['git', 'checkout', '-q', self.remove_ref_prefix(desired_ref)]
|
2020-06-15 17:59:51 +02:00
|
|
|
if branch:
|
2021-07-06 15:12:51 +02:00
|
|
|
cmd.extend(['-B', self.remove_ref_prefix(self.refspec)])
|
2020-06-15 22:03:30 +02:00
|
|
|
if get_context().force_checkout:
|
|
|
|
cmd.append('--force')
|
2020-06-15 17:59:51 +02:00
|
|
|
return cmd
|
2018-01-05 16:00:31 +01:00
|
|
|
|
2019-09-20 06:31:04 +02:00
|
|
|
def prepare_patches_cmd(self):
|
|
|
|
return ['git', 'checkout', '-q', '-B',
|
2021-07-06 15:12:51 +02:00
|
|
|
'patched-{refspec}'.
|
|
|
|
format(refspec=self.remove_ref_prefix(self.refspec))]
|
2019-09-20 06:31:04 +02:00
|
|
|
|
2019-09-20 18:11:44 +02:00
|
|
|
def apply_patches_file_cmd(self, path):
|
repos: add --whitespace=nowarn to git apply command
Patches contain tailing white space in empty lines around a change, those
are generally expected by git-apply and cause no issue.
However in cases where a patch file is added, removed or modified
in a patch, that patch would necessarily include `- ` or `+ ` lines,
since they operate on patch files.
Applying those patches with git-apply will cause git to print out
warnings about tailing white spaces per default, but still applies the
patch. Those warnings will be picked up by kas, and since they are
coming from stderr, print them out as `ERRORS`.
To solve this add `--whitespace=nowarn` as a parameter to the git-apply
call to silence those warnings.
In case of kas, it is to be expected that patches are added, removed or
changed in meta layers, so those supposedly errors will cause confusion.
Signed-off-by: Claudius Heine <ch@denx.de>
Signed-off-by: Jan Kiszka <jan.kiszka@siemens.com>
2022-05-12 14:12:50 +02:00
|
|
|
return ['git', 'apply', '--whitespace=nowarn', path]
|
2019-09-20 18:11:44 +02:00
|
|
|
|
2019-06-19 10:49:44 +02:00
|
|
|
def set_remote_url_cmd(self):
|
|
|
|
return ['git', 'remote', 'set-url', 'origin', self.effective_url]
|
|
|
|
|
2018-01-05 16:00:31 +01:00
|
|
|
|
|
|
|
class MercurialRepo(RepoImpl):
|
|
|
|
"""
|
2018-09-05 11:13:03 +02:00
|
|
|
Provides the hg functionality for a Repo.
|
2018-01-05 16:00:31 +01:00
|
|
|
"""
|
|
|
|
|
2019-08-20 12:48:56 +02:00
|
|
|
def add_cmd(self):
|
|
|
|
return ['hg', 'add']
|
|
|
|
|
2019-08-14 10:41:49 +02:00
|
|
|
def clone_cmd(self, srcdir):
|
2019-01-31 10:12:08 +01:00
|
|
|
return ['hg', 'clone', self.effective_url, self.path]
|
2018-01-05 16:00:31 +01:00
|
|
|
|
2019-08-20 12:48:56 +02:00
|
|
|
def commit_cmd(self):
|
|
|
|
return ['hg', 'commit', '--user', 'kas <kas@example.com>', '-m', 'msg']
|
|
|
|
|
2018-01-05 16:00:31 +01:00
|
|
|
def contains_refspec_cmd(self):
|
|
|
|
return ['hg', 'log', '-r', self.refspec]
|
|
|
|
|
|
|
|
def fetch_cmd(self):
|
|
|
|
return ['hg', 'pull']
|
|
|
|
|
|
|
|
def is_dirty_cmd(self):
|
|
|
|
return ['hg', 'diff']
|
|
|
|
|
2020-06-15 17:59:51 +02:00
|
|
|
def resolve_branch_cmd(self):
|
|
|
|
# We never need to care about creating tracking branches in mercurial
|
|
|
|
return ['false']
|
2018-01-05 16:00:31 +01:00
|
|
|
|
2020-06-15 17:59:51 +02:00
|
|
|
def checkout_cmd(self, desired_ref, branch):
|
2020-06-15 22:03:30 +02:00
|
|
|
cmd = ['hg', 'checkout', desired_ref]
|
|
|
|
if get_context().force_checkout:
|
|
|
|
cmd.append('--clean')
|
|
|
|
return cmd
|
2018-03-09 08:22:52 +01:00
|
|
|
|
2019-09-20 06:31:04 +02:00
|
|
|
def prepare_patches_cmd(self):
|
|
|
|
return ['hg', 'branch', '-f',
|
|
|
|
'patched-{refspec}'.format(refspec=self.refspec)]
|
|
|
|
|
2019-09-20 18:11:44 +02:00
|
|
|
def apply_patches_file_cmd(self, path):
|
|
|
|
return ['hg', 'import', '--no-commit', path]
|
|
|
|
|
2019-08-14 10:41:49 +02:00
|
|
|
def set_remote_url_cmd(self):
|
2019-06-19 10:49:44 +02:00
|
|
|
raise NotImplementedError()
|