Commit 656b20e4 authored by Jakub Ružička's avatar Jakub Ružička
Browse files

refactor: new -O/--result-dir option

Following apkg commands got support for -O/--result-dir option to
override directory where apkg puts resulting files:

* get-archive
* make-archive
* srcpkg
* build

When result is cached elsewhere, it's automatically copied to
--result-dir.

This change required refactoring across apkg commands:

* all commands and scripts now return list of all output files
* all output files are cached
* make commands and their options consistent with each other and --help
* new apkg.lib.common module to contain common/shared code

Related: #9
parent c4a651d4
Pipeline #77139 passed with stages
in 2 minutes and 30 seconds
......@@ -50,18 +50,24 @@ class ProjectCache:
self.load()
self.loaded = True
def update(self, cache_name, key, path):
assert key
def update(self, cache_name, key, paths):
"""
update cache entry
"""
log.verbose("cache update for %s: %s -> %s",
cache_name, key, path)
cache_name, key, paths[0])
assert key
self._ensure_load()
if cache_name not in self.cache:
self.cache[cache_name] = {}
entry = self.path2entry(path)
self.cache[cache_name][key] = entry
entries = list(map(path2entry, paths))
self.cache[cache_name][key] = entries
self.save()
def get(self, cache_name, key):
"""
get cache entry or None
"""
log.verbose("cache query for %s: %s",
cache_name, key)
......@@ -77,58 +83,23 @@ class ProjectCache:
return False
return True
def entry2path_valid(e):
return entry2path(e, validate_fun=validate)
assert key
self._ensure_load()
entry = self.cache.get(cache_name, {}).get(key)
if not entry:
entries = self.cache.get(cache_name, {}).get(key)
if not entries:
return None
path = self.entry2path(entry, validate_fun=validate)
return path
paths = list(map(entry2path_valid, entries))
return paths
def delete(self, cache_name, key):
self.cache[cache_name].pop(key, None)
self.save()
def path2entry(self, path):
"""
convert path or a list of paths to corresponding cache entry
return (fn, checksum) or a list of that on multiple paths
"""
is_list = True
if not isinstance(path, list):
path = [path]
is_list = False
e = list(map(
lambda x: (str(x), file_checksum(x)),
path))
if is_list:
return e
return e[0]
def entry2path(self, entry, validate_fun=None):
"""
convert cache entry to file path or list of paths
if validate is True, make sure file has correct checksum
and flush invalid cache entry if it doesn't
delete cache entry
"""
is_list = True
if not isinstance(entry[0], list):
entry = [entry]
is_list = False
paths = []
for fn, checksum in entry:
p = Path(fn)
if validate_fun:
if not validate_fun(p, checksum):
return None
paths.append(p)
if is_list:
return paths
return paths[0]
self.cache[cache_name].pop(key, None)
self.save()
def enabled(self, use_cache=True):
"""
......@@ -147,3 +118,27 @@ class ProjectCache:
else:
log.verbose("cache DISABLED")
return False
def path2entry(path):
"""
convert a path to corresponding cache entry
return (fn, checksum) or a list of that on multiple paths
"""
return str(path), file_checksum(path)
def entry2path(entry, validate_fun=None):
"""
convert cache entry to a corresponding path
if validate_fun is specified, it's used confirm file has
valid checksum and flush invalid cache entry if it doesn't
"""
fn, checksum = entry
p = Path(fn)
if validate_fun:
if not validate_fun(p, checksum):
return None
return p
......@@ -3,6 +3,7 @@ build packages
Usage: apkg build [-u] [-s <srcpkg> | -a <ar>]
[-v <ver>] [-r <rls>] [-d <distro>]
[-O <dir>]
[-i] [-I] [--no-cache]
Options:
......@@ -14,6 +15,8 @@ Options:
-r <rls>, --release <rls> set package release
-d <distro>, --distro <distro> set target distro
default: current distro
-O <dir>, --result-dir <dir> put results into specified dir
default: pkg/pkgs/DISTRO/NVR
-i, --install-dep install build dependencies
-I, --isolated use isolated builder (pbuilder/mock) if supported
default: use direct build
......@@ -23,20 +26,21 @@ Options:
from docopt import docopt
from apkg.lib import build
from apkg.lib import common
def run_command(cargs):
args = docopt(__doc__, argv=cargs)
pkgs = build.build_package(
srcpkg=args['--srcpkg'],
archive=args['--archive'],
upstream=args['--upstream'],
version=args['--version'],
release=args['--release'],
distro=args['--distro'],
install_dep=args['--install-dep'],
isolated=args['--isolated'],
use_cache=not args['--no-cache'])
for pkg in pkgs:
print("%s" % pkg)
return pkgs
results = build.build_package(
srcpkg=args['--srcpkg'],
archive=args['--archive'],
upstream=args['--upstream'],
version=args['--version'],
release=args['--release'],
distro=args['--distro'],
result_dir=args['--result-dir'],
install_dep=args['--install-dep'],
isolated=args['--isolated'],
use_cache=not args['--no-cache'])
common.print_results(results)
return results
......@@ -11,22 +11,26 @@ When no --version is specified, apkg tries to detect latest version:
1) using upstream.version_script if set
2) from HTML listing if upstream.archive_url is set
Usage: apkg get-archive [-v <ver>] [--no-cache]
Usage: apkg get-archive [-v <ver>] [-O <dir>] [--no-cache]
Options:
-v <ver>, --version <ver> version of archive to download
--no-cache disable cache
-v <ver>, --version <ver> version of archive to download
-O <dir>, --result-dir <dir> put results into specified dir
default: pkg/archive/upstream/
--no-cache disable cache
""" # noqa
from docopt import docopt
from apkg.lib import ar
from apkg.lib import common
def run_command(cargs):
args = docopt(__doc__, argv=cargs)
out_path = ar.get_archive(
results = ar.get_archive(
version=args['--version'],
result_dir=args['--result-dir'],
use_cache=not args['--no-cache'])
print(out_path)
return out_path
common.print_results(results)
return results
......@@ -2,22 +2,26 @@
create dev archive from current project state
using script specified by project.make_archive_script config option
Usage: apkg make-archive [-v <ver>] [--no-cache]
Usage: apkg make-archive [-v <ver>] [-O <dir>] [--no-cache]
Options:
-v <ver>, --version <ver> rename archive to match specified version if needed
--no-cache disable cache
-v <ver>, --version <ver> rename archive to match specified version if needed
-O <dir>, --result-dir <dir> put results into specified dir
default: pkg/archive/dev/
--no-cache disable cache
""" # noqa
from docopt import docopt
from apkg.lib import ar
from apkg.lib import common
def run_command(cargs):
args = docopt(__doc__, argv=cargs)
out_path = ar.make_archive(
results = ar.make_archive(
version=args['--version'],
result_dir=args['--result-dir'],
use_cache=not args['--no-cache'])
print(out_path)
return out_path
common.print_results(results)
return results
......@@ -3,6 +3,7 @@ create source package (files to build package from)
Usage: apkg srcpkg [-u] [-a <ar>]
[-v <ver>] [-r <rls>] [-d <distro>]
[-O <dir>]
[--no-cache] [--render-template]
Options:
......@@ -13,6 +14,8 @@ Options:
-r <rls>, --release <rls> set package release
-d <distro>, --distro <distro> set target distro
default: current distro
-O <dir>, --result-dir <dir> put results into specified dir
default: pkg/srcpkg/DISTRO/NVR
--no-cache disable cache
--render-template only render source package template
""" # noqa
......@@ -20,17 +23,19 @@ Options:
from docopt import docopt
from apkg.lib import srcpkg
from apkg.lib import common
def run_command(cargs):
args = docopt(__doc__, argv=cargs)
out_srcpkg = srcpkg.make_srcpkg(
upstream=args['--upstream'],
archive=args['--archive'],
version=args['--version'],
release=args['--release'],
distro=args['--distro'],
use_cache=not args['--no-cache'],
render_template=args['--render-template'])
print(out_srcpkg)
return out_srcpkg
results = srcpkg.make_srcpkg(
upstream=args['--upstream'],
archive=args['--archive'],
version=args['--version'],
release=args['--release'],
distro=args['--distro'],
result_dir=args['--result-dir'],
use_cache=not args['--no-cache'],
render_template=args['--render-template'])
common.print_results(results)
return results
......@@ -7,6 +7,7 @@ import shutil
import requests
from apkg import exception
from apkg.lib import common
from apkg.log import getLogger
from apkg.compat import py35path
from apkg.parse import split_archive_fn, parse_version
......@@ -17,23 +18,32 @@ from apkg.util.run import run
log = getLogger(__name__)
def make_archive(version=None, project=None, use_cache=True):
def make_archive(
version=None,
result_dir=None,
use_cache=True,
project=None):
"""
create archive from current project state
"""
log.bold("creating dev archive")
proj = project or Project()
use_cache = proj.cache.enabled(use_cache)
if use_cache:
archive_path = proj.cache.get('archive/dev', proj.checksum)
if archive_path:
log.success("reuse cached archive: %s", archive_path)
return archive_path
cache_name = 'archive/dev'
cache_key = proj.checksum
cached = common.get_cached_paths(
proj, cache_name, cache_key, result_dir)
if cached:
log.success("reuse cached archive: %s", cached[0])
return cached
script = proj.config_get('project.make_archive_script')
if not script:
msg = ("make-archive requires project.make_archive_script option to\n"
"be set in project config to a script that creates project\n"
"archive and prints path to it on last stdout line.\n\n"
"archive and prints its path to stdout.\n\n"
"Please update project config with required information:\n\n"
"%s" % proj.config_path)
raise exception.MissingRequiredConfigOption(msg=msg)
......@@ -50,6 +60,10 @@ def make_archive(version=None, project=None, use_cache=True):
raise exception.UnexpectedCommandOutput(msg=msg)
log.info("archive created: %s" % in_archive_path)
if result_dir:
ar_base_path = Path(result_dir)
else:
ar_base_path = proj.dev_archive_path
archive_fn = in_archive_path.name
if version:
# specific version requested - rename if needed
......@@ -58,18 +72,23 @@ def make_archive(version=None, project=None, use_cache=True):
archive_fn = name + sep + version + ext
msg = "archive renamed to match requested version: %s"
log.info(msg, archive_fn)
archive_path = proj.dev_archive_path / archive_fn
archive_path = ar_base_path / archive_fn
log.info("copying archive to: %s" % archive_path)
os.makedirs(py35path(proj.dev_archive_path), exist_ok=True)
os.makedirs(py35path(ar_base_path), exist_ok=True)
shutil.copy(py35path(in_archive_path), py35path(archive_path))
log.success("made archive: %s", archive_path)
results = [archive_path]
if use_cache:
proj.cache.update(
'archive/dev', proj.checksum, str(archive_path))
return archive_path
cache_name, cache_key, results)
return results
def get_archive(version=None, project=None, use_cache=True):
def get_archive(
version=None,
result_dir=None,
use_cache=True,
project=None):
"""
download archive for current project
"""
......@@ -78,14 +97,17 @@ def get_archive(version=None, project=None, use_cache=True):
version = proj.upstream_version
if not version:
raise exception.UnableToDetectUpstreamVersion()
use_cache = proj.cache.enabled(use_cache)
archive_url = proj.upstream_archive_url(version)
use_cache = proj.cache.enabled(use_cache)
if use_cache:
archive_path = proj.cache.get('archive/upstream', archive_url)
if archive_path:
log.success("reuse cached archive: %s", archive_path)
return archive_path
cache_name = 'archive/upstream'
cache_key = archive_url
cached = common.get_cached_paths(
proj, cache_name, cache_key, result_dir)
if cached:
log.success("reuse cached archive: %s", cached[0])
return cached
log.info('downloading archive: %s', archive_url)
r = requests.get(archive_url, allow_redirects=True)
......@@ -97,35 +119,41 @@ def get_archive(version=None, project=None, use_cache=True):
raise exception.FileDownloadFailed(
msg=msg % (content_type, archive_url))
_, _, archive_name = archive_url.rpartition('/')
archive_path = proj.upstream_archive_path / archive_name
if result_dir:
ar_base_path = Path(result_dir)
else:
ar_base_path = proj.upstream_archive_path
_, _, archive_fn = archive_url.rpartition('/')
archive_path = ar_base_path / archive_fn
log.info('saving archive to: %s', archive_path)
os.makedirs(py35path(proj.upstream_archive_path), exist_ok=True)
os.makedirs(py35path(ar_base_path), exist_ok=True)
archive_path.open('wb').write(r.content)
log.success('downloaded archive: %s', archive_path)
if use_cache:
proj.cache.update(
'archive/upstream', archive_url, str(archive_path))
results = [archive_path]
signature_url = proj.upstream_signature_url(version)
if not signature_url:
if signature_url:
# singature check
log.info('downloading signature: %s', signature_url)
r = requests.get(signature_url, allow_redirects=True)
if not r.ok:
raise exception.FileDownloadFailed(
code=r.status_code, url=signature_url)
_, _, signature_name = signature_url.rpartition('/')
signature_path = ar_base_path / signature_name
log.info('saving signature to: %s', signature_path)
signature_path.open('wb').write(r.content)
log.success('downloaded signature: %s', signature_path)
results.append(signature_path)
else:
log.verbose("project.upstream_signature_url not set"
" - skipping signature download")
return archive_path
# singature check
log.info('downloading signature: %s', signature_url)
r = requests.get(signature_url, allow_redirects=True)
if not r.ok:
raise exception.FileDownloadFailed(
code=r.status_code, url=signature_url)
_, _, signature_name = signature_url.rpartition('/')
signature_path = proj.upstream_archive_path / signature_name
log.info('saving signature to: %s', signature_path)
signature_path.open('wb').write(r.content)
log.success('downloaded signature: %s', signature_path)
return archive_path
if use_cache:
proj.cache.update(
cache_name, cache_key, results)
return results
def find_archive(archive, upstream=False, project=None):
......
"""
apkg lib for handling package builds
"""
from pathlib import Path
import shutil
from apkg import adistro
from pathlib import Path
from apkg.cache import file_checksum
from apkg import exception
from apkg.lib import srcpkg as _srcpkg
from apkg.lib import common
from apkg.log import getLogger
from apkg.project import Project
from apkg.lib import srcpkg as _srcpkg
log = getLogger(__name__)
def build_package(
upstream=False,
srcpkg=None,
archive=None,
upstream=False,
version=None,
release=None,
distro=None,
result_dir=None,
install_dep=False,
isolated=False,
use_cache=True,
......@@ -29,7 +32,6 @@ def build_package(
proj = project or Project()
distro = adistro.distro_arg(distro)
use_cache = proj.cache.enabled(use_cache)
log.info("target distro: %s" % distro)
if srcpkg:
......@@ -41,23 +43,24 @@ def build_package(
log.info("using existing source package: %s" % srcpkg_path)
else:
# make source package
srcpkg_path = _srcpkg.make_srcpkg(
srcpkg_path = Path(_srcpkg.make_srcpkg(
archive=archive,
version=version,
release=release,
distro=distro,
upstream=upstream,
project=proj,
use_cache=use_cache)
use_cache=use_cache)[0])
if use_cache and not upstream:
cache_name = 'pkg/dev/%s' % distro
pkgs = proj.cache.get(cache_name, proj.checksum)
if pkgs:
log.success(
"reuse %d cached packages from: %s",
len(pkgs), pkgs[0].parent)
return pkgs
use_cache = proj.cache.enabled(use_cache)
if use_cache:
cache_name = 'pkg/%s' % distro
cache_key = file_checksum(srcpkg_path)
cached = common.get_cached_paths(
proj, cache_name, cache_key, result_dir)
if cached:
log.success("reuse %d cached packages", len(cached))
return cached
if install_dep:
# install build deps if requested
......@@ -72,34 +75,37 @@ def build_package(
# get needed paths
nvr = pkgstyle.get_srcpkg_nvr(srcpkg_path)
build_path = proj.package_build_path / distro / nvr
out_path = proj.package_out_path / distro / nvr
if result_dir:
result_path = Path(result_dir)
else:
result_path = proj.package_out_path / distro / nvr
log.info("source package NVR: %s", nvr)
log.info("build dir: %s", build_path)
log.info("result dir: %s", out_path)
log.info("result dir: %s", result_path)
# ensure build build doesn't exist
if build_path.exists():
log.info("removing existing build dir: %s" % build_path)
shutil.rmtree(build_path)
# ensure output dir doesn't exist
if out_path.exists():
log.info("removing existing result dir: %s" % out_path)
shutil.rmtree(out_path)
# ensure result dir doesn't exist unless specified
if not result_dir and result_path.exists():
log.info("removing existing result dir: %s" % result_path)
shutil.rmtree(result_path)
# build package using chosen distro packaging style
pkgs = pkgstyle.build_packages(
build_path, out_path, srcpkg_path,
build_path, result_path, srcpkg_path,
isolated=isolated,
)
if not pkgs:
msg = ("package build reported success but there are "
"no packages:\n\n%s" % out_path)
"no packages:\n\n%s" % result_path)
raise exception.UnexpectedCommandOutput(msg=msg)
log.success("built %s packages in: %s", len(pkgs), out_path)
log.success("built %s packages in: %s", len(pkgs), result_path)
if use_cache and not upstream:
fns = list(map(str, pkgs))
proj.cache.update(
cache_name, proj.checksum, fns)
cache_name, cache_key, fns)
return pkgs
......
import os
from pathlib import Path
import shutil
from apkg.log import getLogger
from apkg.compat import py35path
log = getLogger(__name__)
def copy_paths(paths, dst):
"""
utility to copy a list of paths to dst
"""
if not dst.exists():
os.makedirs(py35path(dst), exist_ok=True)
dst_full = dst.resolve()
new_paths = []
for p in paths:
if p.parent.resolve() == dst_full:
new_paths.append(p)
else:
p_dst = dst / p.name
log.verbose("copying file: %s -> %s", p, p_dst)
shutil.copy(py35path(p), py35path(p_dst))
new_paths.append(p_dst)
return new_paths
def get_cached_paths(proj, cache_name, cache_key,