Skip to content
Snippets Groups Projects
Commit 32e5d94e authored by Jakub Ružička's avatar Jakub Ružička
Browse files

refactor: use YAML as srcpkg input

parent 8c400984
No related branches found
No related tags found
No related merge requests found
Pipeline #136452 passed with warnings
Showing with 282 additions and 145 deletions
......@@ -5,6 +5,7 @@ import requests
import click
from apkg import ex
from apkg.commands.make_archive import archive_dict2list
from apkg.util import common
from apkg.log import getLogger
from apkg.project import Project
......@@ -18,6 +19,9 @@ log = getLogger(__name__)
help='version of archive to download')
@click.option('-O', '--result-dir',
help="put results into specified dir")
@click.option('-o', '--out-format', default='yaml', show_default=True,
type=click.Choice(['yaml', 'list']),
help="set output format")
@click.option('--cache/--no-cache', default=True, show_default=True,
help="enable/distable cache")
@click.help_option('-h', '--help', help='show this help')
......@@ -25,8 +29,17 @@ def cli_get_archive(*args, **kwargs):
"""
download upstream archive for current project
"""
out_format = kwargs.pop('out_format')
results = get_archive(*args, **kwargs)
common.print_results(results)
if out_format == 'list':
rlist = archive_dict2list(results)
common.print_results(rlist)
else:
# YAML
common.print_results_dict(results)
return results
......@@ -61,7 +74,7 @@ def get_archive(
cache_key = 'archive/upstream/%s' % archive_url
cached = common.get_cached_paths(proj, cache_key, result_dir)
if cached:
log.success("reuse cached archive: %s", cached[0])
log.success("reuse cached archive: %s", cached['archive'])
return cached
log.info('downloading archive: %s', archive_url)
......@@ -94,7 +107,12 @@ def get_archive(
ar_base_path.mkdir(parents=True, exist_ok=True)
archive_path.open('wb').write(r.content)
log.success('downloaded archive: %s', archive_path)
results = [archive_path]
results = {
'archive': archive_path,
'archive_url': archive_url,
'version': str(version),
}
signature_url = proj.upstream_signature_url(version)
if signature_url:
......@@ -109,7 +127,8 @@ def get_archive(
log.info('saving signature to: %s', signature_path)
signature_path.open('wb').write(r.content)
log.success('downloaded signature: %s', signature_path)
results.append(signature_path)
results['signature'] = signature_path
results['signature_url'] = signature_url
else:
log.verbose("project.upstream_signature_url not set"
" - skipping signature download")
......
from pathlib import Path
import click
import yaml
from apkg import ex
from apkg.cli import cli
......@@ -14,23 +15,13 @@ import apkg.util.shutil35 as shutil
log = getLogger(__name__)
def copy_archive(source, destdir, name=None):
if name is None:
name = source.name
dest = destdir / name
if source != dest:
log.info("copying archive to: %s", dest)
destdir.mkdir(parents=True, exist_ok=True)
shutil.copy(source, dest)
return dest
@cli.command(name='make-archive', aliases=['ar'])
@click.option('-O', '--result-dir',
help="put results into specified dir")
@click.option('-o', '--out-format', default='yaml', show_default=True,
type=click.Choice(['yaml', 'list']),
help="set output format")
@click.option('--cache/--no-cache', default=True, show_default=True,
help="enable/distable cache")
@click.help_option('-h', '--help', help='show this help')
......@@ -38,8 +29,17 @@ def cli_make_archive(*args, **kwargs):
"""
create dev archive from current project state
"""
out_format = kwargs.pop('out_format')
results = make_archive(*args, **kwargs)
common.print_results_dict(results)
if out_format == 'list':
rlist = archive_dict2list(results)
common.print_results(rlist)
else:
# YAML
common.print_results_dict(results)
return results
......@@ -68,15 +68,15 @@ def make_archive(
if not script:
msg = ("make-archive requires project.make_archive_script option to\n"
"be set in project config to a script that creates project\n"
"archive and prints its path to stdout.\n\n"
"archive and prints its path to stdout in YAML like this:\n\n"
"archive: pkg/archive/dev/foo-1.2.3.tar.gz\n\n"
"Please update project config with required information:\n\n"
"%s" % proj.path.config)
raise ex.MissingRequiredConfigOption(msg=msg)
log.info("running make_archive_script: %s", script)
out = run(script, quiet=True)
lines = out.split('\n')
cmd_out = run(script, quiet=True)
out = str(cmd_out.stdout)
if result_dir:
ar_base_path = Path(result_dir)
......@@ -84,81 +84,110 @@ def make_archive(
ar_base_path = proj.path.dev_archive
results = {}
if proj.compat_level < 5:
# Use old undocumented behaviour when the last like of output was used
in_archive_path = Path(lines[-1])
archive_path = copy_archive(in_archive_path, ar_base_path)
results["archive"] = archive_path
log.success("made archive: %s", archive_path)
if proj.compat_level >= 5:
try:
results = yaml.safe_load(out)
except Exception as e:
msg = ("Failed to parse make_archive_script YAML output:\n\n"
"%s\n\nError:\n\n%s" % (out, e))
raise ex.UnexpectedCommandOutput(msg=msg)
if not isinstance(results, dict):
msg = ("Invalid make_archive_script YAML output format:\n\n"
"%s\n\nExpected format example:\n\n"
"archive: pkg/archives/dev/foo-1.2.3.tar.gz" % (out))
raise ex.UnexpectedCommandOutput(msg=msg)
else:
for line in lines:
if line.startswith('#'):
continue
typ, argument, *rest = line.split()
if rest:
msg = ("make_archive_script finished successfully but\n"
"output could not be parsed on line:\n\n"
"%s" % line)
raise ex.UnexpectedCommandOutput(msg=msg)
if typ == "version":
if "version" in results:
msg = ("make_archive_script returned more than one "
"'version'\n")
raise ex.UnexpectedCommandOutput(msg=msg)
results["version"] = argument
log.success("detected version: %s", argument)
continue
in_archive_path = Path(argument)
if not in_archive_path.exists():
msg = ("make_archive_script finished successfully but\n"
"the file listed doesn't exist:\n\n"
"%s" % in_archive_path)
raise ex.UnexpectedCommandOutput(msg=msg)
tag, *extra = typ.split(':')
if tag == "archive":
if "archive" in results:
msg = ("make_archive_script returned more than one "
"'archive'\n")
raise ex.UnexpectedCommandOutput(msg=msg)
archive_path = copy_archive(in_archive_path, ar_base_path)
results["archive"] = archive_path
log.success("made archive: %s", archive_path)
elif tag == "component":
if extra:
component_name = extra[0]
else:
component_name, _ = in_archive_path.name.split('.', 1)
components = results.setdefault("components", {})
if component_name in components:
msg = ("make_archive_script returned a duplicate\n"
"component:\n\n"
"%s" % component_name)
raise ex.UnexpectedCommandOutput(msg=msg)
archive_path = copy_archive(in_archive_path, ar_base_path)
components[component_name] = archive_path
log.success("made archive for component %r: %s",
component_name, archive_path)
else:
msg = ("make_archive_script returned an unknown tag\n"
"on line:\n\n"
"%s" % line)
if "version" not in results:
results["version"] = get_archive_version(results["archive"])
# use old undocumented behaviour when the last line of output was used
msg = ("using legacy make_archive_script format (last line)"
" due to compat level %s" % proj.compat_level)
log.warning(msg)
lines = out.split('\n')
archive_path = Path(lines[-1])
results["archive"] = archive_path
sanitize_archive_output(results)
archive_path = results.get('archive')
if not archive_path:
msg="make_archive_script didn't return archive:\n\n%s" % out
raise ex.UnexpectedCommandOutput(msg=msg)
# copy all to result dir (updates results in-place)
copy_archives(results, ar_base_path)
archive_path = results.get('archive')
log.success("made archive: %s", archive_path)
if use_cache:
proj.cache.update(cache_key, results)
return results
def sanitize_archive_output(output: dict):
# convert paths to pathlib.Path
archive = output.get('archive')
if archive:
if not isinstance(archive, Path):
archive = Path(archive)
output['archive'] = archive
if archive and not archive.exists():
raise ex.ArchiveNotFound(ar=archive)
components = output.get('components')
if components:
for comp_name, comp_path in list(components.items()):
if not isinstance(comp_path, Path):
comp_path = Path(comp_path)
components[comp_name] = comp_path
if not comp_path.exists():
raise ex.ArchiveNotFound(
msg="Archive component '%s' not found: %s" %
(comp_name, comp_path))
# provide explicit version when possible
if 'version' not in output and archive:
output['version'] = get_archive_version(archive)
# version is always str (can occasionally get int or float)
version = output.get('version')
if version and not isinstance(version, str):
output['version'] = str(version)
def archive_dict2list(output: dict) -> list:
results = []
archive = output.get('archive')
if archive:
results.append(archive)
components = output.get('components')
if components:
results += components.values()
return results
def copy_archives(output: dict, destdir: Path):
"""
copy archive and all components to destdir
"""
archive = output.get('archive')
if archive:
output['archive'] = copy_archive(archive, destdir)
components = output.get('components')
if components:
for comp_name, comp_path in list(components.items()):
components[comp_name] = copy_archive(
comp_path, destdir, txt="'%s' component" % comp_name)
def copy_archive(source, destdir, name=None, txt='archive'):
if name is None:
name = source.name
dest = destdir / name
if source != dest:
log.info("copying %s to: %s", txt, dest)
destdir.mkdir(parents=True, exist_ok=True)
shutil.copy(source, dest)
return dest
APKG_CLI_COMMANDS = [cli_make_archive]
......@@ -7,9 +7,10 @@ from apkg.cache import file_checksum, path_checksum
from apkg import ex
from apkg.util import common
from apkg.commands.get_archive import get_archive
from apkg.commands.make_archive import make_archive
from apkg.commands.make_archive import make_archive, sanitize_archive_output
from apkg.log import getLogger
from apkg.project import Project
from apkg.util.archive import get_archive_version
import apkg.util.shutil35 as shutil
......@@ -38,6 +39,9 @@ log = getLogger(__name__)
help="enable/disable cache")
@click.option('-F', '--in-file', 'in_files', multiple=True,
help="specify input file(s), '-' to read from stdin")
@click.option('-f', '--in-format', default='auto', show_default=True,
type=click.Choice(['auto', 'yaml', 'list']),
help="set input format")
@click.help_option('-h', '--help',
help="show this help message")
def cli_srcpkg(*args, **kwargs):
......@@ -53,6 +57,7 @@ def srcpkg(
archive=False,
inputs=None,
in_files=None,
in_format=None,
upstream=False,
version=None,
release=None,
......@@ -96,14 +101,26 @@ def srcpkg(
if not release:
release = '1'
inputs = common.parse_inputs(inputs, in_files)
if not in_format or in_format == 'auto':
# default to list format when inputs arg is used, otherwise use yaml format
in_format = 'list' if inputs else 'yaml'
inputs = common.parse_inputs(inputs, in_files, in_format=in_format)
if in_format == 'list':
# convert inputs from list to yaml format
if inputs:
inputs = {'archive': inputs[0]}
else:
inputs = {}
sanitize_archive_output(inputs)
if not archive:
# archive not specified - use make_archive or get_archive
if inputs:
instr = ", ".join([str(i) for i in inputs])
raise ex.InvalidInput(
fail="unexpected input file(s): %s" % instr)
fail="unexpected input:\n\n%s" % common.yaml_dump(inputs))
if upstream:
inputs = get_archive(
......@@ -115,10 +132,20 @@ def srcpkg(
cache=cache,
project=proj)
ar_path = inputs["archive"]
version = inputs["version"]
ar_path = inputs.get('archive')
if not ar_path:
raise ex.InvalidInput(
msg="Missing required input: archive")
ar_path = Path(ar_path)
paths = [ar_path] + list(inputs.get("components", {}).values())
version = inputs.get('version')
if not version:
version = get_archive_version(ar_path)
components = inputs.get('components', {})
paths = [ar_path] + list(components.values())
common.ensure_inputs(paths)
if use_cache:
......@@ -217,4 +244,16 @@ def srcpkg(
return results
def sanitize_inputs(inputs):
# convert paths to pathlib.Path
archive = inputs.get('archive')
if archive and not isinstance(archive, Path):
inputs['archive'] = Path(archive)
components = inputs.get('components')
if components:
for key, val in list(components.items()):
if not isinstance(val, Path):
components[key] = Path(val)
APKG_CLI_COMMANDS = [cli_srcpkg]
......@@ -106,7 +106,7 @@ class MissingRequiredModule(ApkgException):
class ArchiveNotFound(ApkgException):
msg_fmt = "{type} archive not found: {ar}"
msg_fmt = "Archive not found: {ar}"
returncode = 36
......
......@@ -6,6 +6,8 @@ import sys
import tempfile
from typing import Iterable, Mapping, Optional, Union
import yaml
from apkg import ex
from apkg.log import getLogger
import apkg.util.shutil35 as shutil
......@@ -70,43 +72,84 @@ def print_results(results):
def print_results_dict(results):
"""
print results dict received from apkg command (used in make-archive)
print results dict as YAML (used in make-archive and get-archive)
"""
for key, val in results.items():
print("%s: %s" % (key, val))
print(yaml_dump(results))
def yaml_path_representer(dumper, obj):
# to print pathlib.Path as str
return dumper.represent_scalar("tag:yaml.org,2002:str", str(obj))
class SafeDumper(yaml.dumper.SafeDumper):
# don't modify global PyYAML state
pass
yaml.add_representer(
# print pathlib.Path as str
type(Path()),
yaml_path_representer,
SafeDumper,
)
def yaml_dump(*args, **kwargs):
kwargs['Dumper'] = SafeDumper
return yaml.dump(*args, **kwargs).rstrip()
def parse_inputs(files, file_lists):
def parse_inputs(inputs, in_files, in_format='list'):
"""
utility to parse apkg input files and input file lists
into a single list of input files
utility parser of apkg command inputs
"""
if not files:
files = []
if not file_lists:
file_lists = []
all_inputs = list(inputs) if inputs else []
if in_files:
if len([fl for fl in in_files if fl == '-']) > 1:
fail = "requested to read stdin multiple times"
raise ex.InvalidInput(fail=fail)
all_files = [Path(f) for f in files]
for fl in in_files:
if fl == '-':
f = sys.stdin
else:
f = open(fl, 'r', encoding='utf-8')
all_inputs += [ln.rstrip() for ln in f.readlines()]
f.close()
if len([fl for fl in file_lists if fl == '-']) > 1:
fail = "requested to read stdin multiple times"
raise ex.InvalidInput(fail=fail)
if in_format == 'yaml':
result = parse_yaml_inputs(all_inputs)
else:
result = parse_list_inputs(all_inputs)
for fl in file_lists:
if fl == '-':
f = sys.stdin
else:
f = open(fl, 'r', encoding='utf-8')
all_files += [Path(ln.strip()) for ln in f.readlines()]
f.close()
return result
return all_files
def parse_list_inputs(inputs):
return [Path(i) for i in inputs]
def ensure_inputs(inputs):
def parse_yaml_inputs(inputs):
if not inputs:
return {}
txt = '\n'.join(inputs)
result = yaml.safe_load(txt)
return result
def ensure_inputs(inputs, n=0):
if not inputs:
raise ex.InvalidInput(
fail="no input file(s) specified")
fail="no input file specified")
if n:
n_in = len(inputs)
if n_in != n:
exp = 'single input file' if n == 1 else '%s input files' % n
ins = '\n'.join([str(p) for p in inputs])
raise ex.InvalidInput(
fail="expected %s, but got %s:\n\n%s" % (exp, n_in, ins))
for f in inputs:
if not f or not f.exists():
raise ex.InvalidInput(
......
......@@ -10,9 +10,9 @@ RUN zypper dup -y
# packaging deps
RUN zypper install -y curl git rpm-build python3-pip python311-pip
# basic deps (default ancient python 3.6)
RUN zypper install -y python3-beautifulsoup4 python3-setuptools python3-Jinja2 python3-click python3-distro python3-blessings python3-requests python3-toml python3-cached-property
RUN zypper install -y python3-beautifulsoup4 python3-setuptools python3-Jinja2 python3-click python3-distro python3-blessings python3-requests python3-toml python3-cached-property python3-PyYAML
# basic deps (optional python 3.11)
RUN zypper install -y python311-beautifulsoup4 python311-setuptools python311-Jinja2 python311-click python311-distro python311-blessings python311-requests python311-toml python311-cached-property
RUN zypper install -y python311-beautifulsoup4 python311-setuptools python311-Jinja2 python311-click python311-distro python311-blessings python311-requests python311-toml python311-cached-property python311-PyYAML
# testing deps
RUN zypper install -y python3-pytest python311-pytest
# systemd (some tests list services using systemctl)
......
......@@ -17,4 +17,4 @@ python_module = "apkg.templatevars.debseries"
[apkg]
# recommended - apkg compat level
compat = 4
compat = 5
......@@ -25,6 +25,7 @@ python3Packages.buildPythonApplication rec {
distro # current distro detection
jinja2 # templating
packaging # version parsing
pyyaml # YAML for serialization
requests # HTTP for humans™
setuptools # required for build
toml # config files
......
......@@ -14,4 +14,4 @@ tar -czf "$ARPATH" --transform "s#^minimal-no-git#$NAMEVER#" \
-C .. minimal-no-git
# apkg expects stdout to list archive files
echo archive $ARPATH
echo "archive: '$ARPATH'"
......@@ -15,27 +15,26 @@ tar -czf "$ARPATH" --transform "s#^multiple-sources#$NAMEVER#" \
-C .. multiple-sources
# apkg expects stdout to describe archive files
echo archive "$ARPATH"
echo archive: "$ARPATH"
# we can also indicate the upstream version explicitly,
# if we don't, apkg will use archive's filename
#
# Silly example: we attached a '+repack' to the filename
# and don't want it in the package version
echo version "$VERSION"
echo version: $VERSION
# we can print whatever we like to stderr
echo "About to prepare the additional (component) archives:" >&2
echo "components:"
# Upstream can be split into several archives, we can use "component" to
# collect them and extract the others where needed
ARPATH="$OUTPATH/files.tar.gz"
echo -e "\t$ARPATH" >&2
tar -czf "$ARPATH" --strip-components=1 -C distro/components/files .
echo component "$ARPATH"
echo " files: '$ARPATH'"
# A component whose name doesn't match where it needs to be extracted
ARPATH="$OUTPATH/extra-v0.5.tar.gz"
echo -e "\t$ARPATH" >&2
tar -czf "$ARPATH" --strip-components=1 -C distro/components/extra .
echo component:extra "$ARPATH"
echo " extra: '$ARPATH'"
......@@ -14,4 +14,4 @@ tar -czf "$ARPATH" --transform "s#^template-variables#$NAMEVER#" \
-C .. template-variables
# apkg expects stdout to list archive files
echo archive $ARPATH
echo "archive: '$ARPATH'"
......@@ -33,6 +33,7 @@ dependencies = [
"distro",
"jinja2",
"packaging",
"pyyaml",
"requests",
]
dynamic = ["version"]
......
......@@ -6,6 +6,7 @@ click # nice CLI framework
distro # current distro detection
jinja2 # templating
packaging # version parsing
pyyaml # YAML for serialization
requests # HTTP for humans™
setuptools # required for build
toml # config files
toml # TOML for config files
......@@ -4,7 +4,7 @@ set -e
VERSION_TAG=$(git describe --tags --abbrev=0)
VERSION=${VERSION_TAG#v}
if ! git describe --tags --exact-match; then
if ! git describe --tags --exact-match 2> /dev/null; then
# devel version (not tagged)
GIT_HASH=$(git rev-parse --short=6 HEAD)
N_COMMITS=$(git rev-list $VERSION_TAG.. --count)
......@@ -27,7 +27,7 @@ if [[ $VERSION = *"dev"* ]]; then
# update devel version
sed -i "s/\(__version__ *= *'\)[^']\+'/\1$VERSION'/" apkg/__init__.py
git add apkg/__init__.py
if git commit -a -m "DROP: update __version__ = $VERSION"; then
if git commit -a -m "DROP: update __version__ = $VERSION" > /dev/null; then
# undo commit in the end
cleanup() {
git reset --hard HEAD^ >/dev/null
......@@ -43,5 +43,5 @@ fi
mkdir -p "$OUTPATH"
git archive --format tgz --output $ARPATH --prefix $NAMEVER/ HEAD
# apkg expects stdout to list archive files
echo $ARPATH
# apkg expects stdout contains YAML dict with archive set
echo "archive: '$ARPATH'"
......@@ -34,6 +34,7 @@ install_requires =
distro
jinja2
packaging
pyyaml
requests
[options.packages.find]
......
......@@ -3,6 +3,8 @@ import os
import pytest
import re
import yaml
from apkg.util import test
from apkg.util.run import cd
from apkg.util.git import git
......@@ -81,12 +83,13 @@ def test_apkg_make_archive_cache(repo_path, caplog):
def test_apkg_get_archive_manual(repo_path, capsys):
VERSION = '0.0.4'
VERSION = '0.4.2'
with cd(repo_path):
assert apkg('get-archive', '--version', VERSION) == 0
out, _ = capsys.readouterr()
# first stdout line should be downloaded archive
assert out.startswith("pkg/archives/upstream/apkg-v%s.tar.gz" % VERSION)
results = yaml.safe_load(out)
assert results['archive'] == "pkg/archives/upstream/apkg-v%s.tar.gz" % VERSION
assert results['version'] == VERSION
def test_apkg_get_archive_auto(repo_path, capsys):
......@@ -94,8 +97,9 @@ def test_apkg_get_archive_auto(repo_path, capsys):
with cd(repo_path):
assert apkg('get-archive') == 0
out, _ = capsys.readouterr()
# first stdout line should be downloaded archive
assert out.startswith("pkg/archives/upstream/apkg-")
results = yaml.safe_load(out)
version = results['version']
assert results['archive'] == "pkg/archives/upstream/apkg-v%s.tar.gz" % version
def test_apkg_srcpkg(repo_path, capsys):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment