libkas: Rework repo_fetch to parallelized repos_fetch
Building on top of run_cmd_async, this reworks repo_fetch to a repository list fetcher repos_fetch that runs those operations in parallel. The two users, ReposFetch and ConfigStatic, are converted to exploit this parallelization. Signed-off-by: Jan Kiszka <jan.kiszka@siemens.com>
This commit is contained in:
parent
19ee6edcb1
commit
0bdd7a8d52
@ -43,7 +43,7 @@ except ImportError:
|
||||
return platform.dist()[0]
|
||||
|
||||
from .repos import Repo
|
||||
from .libkas import run_cmd, repo_fetch, repo_checkout
|
||||
from .libkas import run_cmd, repos_fetch, repo_checkout
|
||||
|
||||
__license__ = 'MIT'
|
||||
__copyright__ = 'Copyright (c) Siemens AG, 2017'
|
||||
@ -331,8 +331,9 @@ class ConfigStatic(Config):
|
||||
missing_repos = [repo_dict[repo_name]
|
||||
for repo_name in missing_repo_names]
|
||||
|
||||
repos_fetch(self, missing_repos)
|
||||
|
||||
for repo in missing_repos:
|
||||
repo_fetch(self, repo)
|
||||
repo_checkout(self, repo)
|
||||
|
||||
repo_paths = {r: repo_dict[r].path for r in repo_dict}
|
||||
|
@ -28,7 +28,7 @@ import logging
|
||||
import shutil
|
||||
import os
|
||||
from .libkas import (ssh_cleanup_agent, ssh_setup_agent, ssh_no_host_key_check,
|
||||
get_build_environ, repo_fetch, repo_checkout)
|
||||
get_build_environ, repos_fetch, repo_checkout)
|
||||
|
||||
__license__ = 'MIT'
|
||||
__copyright__ = 'Copyright (c) Siemens AG, 2017'
|
||||
@ -209,8 +209,7 @@ class ReposFetch(Command):
|
||||
return 'repos_fetch'
|
||||
|
||||
def execute(self, config):
|
||||
for repo in config.get_repos():
|
||||
repo_fetch(config, repo)
|
||||
repos_fetch(config, config.get_repos())
|
||||
|
||||
|
||||
class ReposCheckout(Command):
|
||||
|
@ -149,9 +149,10 @@ def find_program(paths, name):
|
||||
return None
|
||||
|
||||
|
||||
def repo_fetch(config, repo):
|
||||
@asyncio.coroutine
|
||||
def _repo_fetch_async(config, repo):
|
||||
"""
|
||||
Fetches the repository to the kas_work_dir.
|
||||
Start asynchronous repository fetch.
|
||||
"""
|
||||
if repo.git_operation_disabled:
|
||||
return
|
||||
@ -165,25 +166,47 @@ def repo_fetch(config, repo):
|
||||
cmd = ['/usr/bin/git', 'clone', '-q', repo.url, repo.path]
|
||||
if config.get_repo_ref_dir() and os.path.exists(gitsrcdir):
|
||||
cmd.extend(['--reference', gitsrcdir])
|
||||
run_cmd(cmd, env=config.environ, cwd=config.kas_work_dir)
|
||||
yield from run_cmd_async(cmd,
|
||||
env=config.environ,
|
||||
cwd=config.kas_work_dir)
|
||||
logging.info('Repository %s cloned', repo.name)
|
||||
return
|
||||
|
||||
# Does refspec exist in the current repository?
|
||||
(retc, output) = run_cmd(['/usr/bin/git', 'cat-file',
|
||||
'-t', repo.refspec], env=config.environ,
|
||||
cwd=repo.path, fail=False, liveupdate=False)
|
||||
(retc, output) = yield from run_cmd_async(['/usr/bin/git',
|
||||
'cat-file', '-t',
|
||||
repo.refspec],
|
||||
env=config.environ,
|
||||
cwd=repo.path,
|
||||
fail=False,
|
||||
liveupdate=False)
|
||||
if retc == 0:
|
||||
logging.info('Repository %s already contains %s as %s',
|
||||
repo.name, repo.refspec, output.strip())
|
||||
return
|
||||
|
||||
# No it is missing, try to fetch
|
||||
(retc, output) = run_cmd(['/usr/bin/git', 'fetch', '--all'],
|
||||
env=config.environ,
|
||||
cwd=repo.path, fail=False)
|
||||
(retc, output) = yield from run_cmd_async(['/usr/bin/git',
|
||||
'fetch', '--all'],
|
||||
env=config.environ,
|
||||
cwd=repo.path,
|
||||
fail=False)
|
||||
if retc:
|
||||
logging.warning('Could not update repository %s: %s',
|
||||
repo.name, output)
|
||||
logging.info('Repository %s updated', repo.name)
|
||||
|
||||
|
||||
def repos_fetch(config, repos):
|
||||
"""
|
||||
Fetches the list of repositories to the kas_work_dir.
|
||||
"""
|
||||
cmds = []
|
||||
for repo in repos:
|
||||
cmds.append(_repo_fetch_async(config, repo))
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
return loop.run_until_complete(asyncio.wait(cmds))
|
||||
|
||||
|
||||
def repo_checkout(config, repo):
|
||||
|
Loading…
Reference in New Issue
Block a user