#!/bin/sh # contains functions used by more than one script # shellcheck disable=SC2039 if [ -z "${base_dir}" ]; then # just to make shellcheck happy . 'conf/default.conf' fi # find_pkgbuilds package repository git_repository git_revision mod_git_revision # find the PKGBUILD and modification of $package from $repository # sets $PKGBUILD and $PKGBUILD_mod find_pkgbuilds() { local package="$1" local repository="$2" local git_repository="$3" local git_revision="$4" local mod_git_revision="$5" local repo_path eval 'repo_path="${repo_paths__'"${git_repository}"'}"' # TODO: remove, when FS#54696 closed local _perl_list_moreutils_repair_flag if [ "${package}" = 'perl-list-moreutils' ]; then _perl_list_moreutils_repair_flag='-r' fi PKGBUILD=$( git -C "${repo_path}" archive "${git_revision}" -- "${package}/repos/" 2> /dev/null | \ tar -t 2> /dev/null | \ grep "$(printf '^%s-.*/PKGBUILD' "$(str_to_regex "${package}/repos/${repository}")")" | \ grep -v -- '-i686/PKGBUILD$' | \ grep -v -- '[-/]\(staging\|testing\|unstable\)-[^/]\+/PKGBUILD$' | \ sort ${_perl_list_moreutils_repair_flag} | \ tail -n1 ) PKGBUILD_mod=$( git -C "${repo_paths__archlinux32}" archive "${mod_git_revision}" 2> /dev/null | \ tar -t "${repository}/${package}/PKGBUILD" 2> /dev/null ) || true if [ -z "${PKGBUILD}" ] && \ [ -z "${PKGBUILD_mod}" ]; then >&2 printf 'Neither PKGBUILD nor modification of PKGBUILD found for package "%s" from %s (%s), revisions %s and %s.\n' \ "${package}" \ "${repository}" \ "${git_repository}" \ "${git_revision}" \ "${mod_git_revision}" return 1 fi } # find_repository_with_commit commit # find the repository which has $commit find_repository_with_commit() { local repository for repository in ${repo_names}; do # shellcheck disable=SC2016 if [ "$(eval git -C "$(printf '"${repo_paths__%s}"' "${repository}")" cat-file -t '"$1"' 2> /dev/null)" = "commit" ]; then echo "${repository}" return 0 fi done >&2 printf 'find_repository_with_commit: Cannot find repository with commit "%s"\n' "$1" exit 1 } # find_git_repository_to_package_repository repository # find the git repository which tracks the package repository $repository find_git_repository_to_package_repository() { local repository local package_repository local repo_path package_repository="$1" if [ "$1" = 'build-support' ]; then echo 'packages' return 0 fi for repository in ${repo_names}; do if [ "${repository}" = "archlinux32" ]; then continue fi eval 'repo_path="${repo_paths__'"${repository}"'}"' if git -C "${repo_path}" archive "$(cat "${work_dir}/${repository}.revision")" -- | \ tar -t --wildcards '*/repos' | \ grep '^\([^/]\+/\)\{3\}PKGBUILD$' | \ cut -d/ -f3 | \ sed 's|-[^-]\+$||' | \ sort -u | \ grep -qxF "${package_repository}"; then echo "${repository}" return 0 fi done >&2 echo "can't find git repository with package repository '$1'" exit 1 } # package_locked_or_blocked package git_revision mod_git_revision repository # return if package - of given repository and revisions - is [locked or blocked] package_locked_or_blocked() { [ -f "${work_dir}/package-states/$1.$2.$3.$4.locked" ] || \ [ -f "${work_dir}/package-states/$1.$2.$3.$4.blocked" ] } # generate_package_metadata $package $git_revision $mod_git_revision $repository # or # generate_package_metadata $package.$git_revision.$mod_git_revision.$repository # generate the meta data files of a package (dependencies, built packages, ...) generate_package_metadata() { local package="$1" local git_revision="$2" local mod_git_revision="$3" local repository="$4" local file_prefix local file local PKGBUILD if [ $# -eq 1 ]; then # second form repository="${package##*.}" package="${package%.*}" mod_git_revision="${package##*.}" package="${package%.*}" git_revision="${package##*.}" package="${package%.*}" fi file_prefix="${work_dir}/package-infos/${package}.${git_revision}.${mod_git_revision}.${repository}" if [ -e "${file_prefix}.builds" ] && \ [ -e "${file_prefix}.build-depends" ] && \ [ -e "${file_prefix}.run-depends" ] && \ [ -e "${file_prefix}.groups" ] && \ [ -e "${file_prefix}.packages" ]; then return 0 fi if ! make_source_info "${package}" "${repository}" "${git_revision}" "${mod_git_revision}" "${file_prefix}.SRCINFO"; then printf '"make_source_info %s %s %s %s %s" failed.\n' "${package}" "${repository}" "${git_revision}" "${mod_git_revision}" "${file_prefix}.SRCINFO" exit 1 fi if [ ! -s "${file_prefix}.SRCINFO" ]; then >&2 printf '"%s" not created by "make_source_info" - eh, what?' "${file_prefix}.SRCINFO" exit 1 fi # otherwise this just calls for trouble sed -i ' /^[^=]*=\s*$/d s/_i686\(\s*=\)/\1/ ' "${file_prefix}.SRCINFO" # extract "groups" = groups \cup provides grep "$(printf '^\t\\(groups\\|provides\\) = ')" "${file_prefix}.SRCINFO" | \ cut -d= -f2 | \ sed 's|^\s\+||; s|[<>]$||' | \ sort -u > \ "${file_prefix}.groups" # extract "packages" = pkgname grep '^pkgname = ' "${file_prefix}.SRCINFO" | \ cut -d= -f2 | \ sed 's|^\s\+||; s|[<>]$||' | \ sort -u > \ "${file_prefix}.packages" # extract "builds" = provides \cup pkgname \cup groups cat "${file_prefix}.groups" "${file_prefix}.packages" | \ sort -u > \ "${file_prefix}.builds" # extract "build-depends" = makedepends \cup checkdepends \cup depends \cup \{ base, base-devel \} \setminus "builds" { { printf 'all_depend = %s\n' 'base' 'base-devel' sed -n "$( printf '/^\t%s = /p\n' \ 'depends' \ 'makedepends' \ 'checkdepends' )" "${file_prefix}.SRCINFO" } | \ cut -d= -f2 | \ sed 's|^\s\+||; s|[<>]$||' | \ sort -u sed 'p' "${file_prefix}.builds" } | \ sort | \ uniq -u > \ "${file_prefix}.build-depends" # extract "run-depends" = depends \cup \{ base \} \setminus "builds" { { printf 'all_depend = %s\n' 'base' sed -n "$(printf '/^\tdepends = /p')" "${file_prefix}.SRCINFO" } | \ cut -d= -f2 | \ sed 's|^\s\+||; s|[<>]$||' | \ sort -u sed 'p' "${file_prefix}.builds" } | \ sort | \ uniq -u > \ "${file_prefix}.run-depends" rm "${file_prefix}.SRCINFO" } # delete_old_metadata # delete old (=unneeded) meta data of packages delete_old_metadata() { local current_metadata current_metadata=$( find "${work_dir}/package-infos" -mindepth 1 -maxdepth 1 -printf '%f\n' | \ sed ' s|\.[^.]\+$|| s|\.\([^.]\+\)\.\([^.]\+\)\.\([^.]\+\)$| \1 \2 \3| ' | \ sort -u ) ( # what we have echo "${current_metadata}" # package-states should stay find "${work_dir}/package-states" -mindepth 1 -maxdepth 1 -printf '%f\n' | \ sed ' s|\.\([^.]\+\)\.\([^.]\+\)\.\([^.]\+\)\.[^.]\+$| \1 \2 \3| ' | \ sort -u | \ sed 'p' # build-list items should stay sed 'p' "${work_dir}/build-list" tmp_dir=$(mktemp -d) trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT echo "${current_metadata}" | \ sort -k1,1 > \ "${tmp_dir}/current-metadata" # the newest of the following should stay: ( # deletion-list items cat "${work_dir}/deletion-list" # all packages in the repos for repo in ${repo_names}; do eval 'git -C "${repo_paths__'"${repo}"'}" archive '"$(cat "${work_dir}/${repo}.revision")" | \ tar -t | \ sed ' s|/$|| /\//d ' done ) | \ sort -u | \ join -j 1 -o 2.2,2.3,2.4,2.1 - "${tmp_dir}/current-metadata" | \ sort -k4,4 > \ "${tmp_dir}/find-newest-revisions" uniq -uf3 < \ "${tmp_dir}/find-newest-revisions" | \ awk '{print $4 " " $1 " " $2 " " $3}' | \ sed 'p' uniq -Df3 < \ "${tmp_dir}/find-newest-revisions" | \ uniq --group=append -f3 | \ ( revs='' mod_revs='' opkg='' orepo='' while read -r rev mod_rev repo pkg; do if [ -z "${rev}" ] && \ [ -z "${mod_rev}" ] && \ [ -z "${repo}" ] && \ [ -z "${pkg}" ]; then printf '%s %s %s %s\n' \ "$( printf '%s\n' ${revs} | \ find_newest_of_git_revisions )" \ "$( printf '%s\n' ${mod_revs} | \ find_newest_of_git_revisions )" \ "${orepo}" \ "${opkg}" revs='' mod_revs='' orepo='' opkg='' continue fi revs=$( # shellcheck disable=SC2086 printf '%s\n' ${revs} ${rev} | \ sort -u ) mod_revs=$( # shellcheck disable=SC2086 printf '%s\n' ${mod_revs} ${mod_rev} | \ sort -u ) orepo="${repo}" opkg="${pkg}" done ) | \ awk '{print $4 " " $1 " " $2 " " $3}' | \ sed 'p' ) | \ sort | \ uniq -u | \ while read -r pkg rev mod_rev repo; do rm -f "${work_dir}/package-infos/${pkg}.${rev}.${mod_rev}.${repo}."* done } # repository_of_package $package.$repo_revision.$mod_repo_revision.$repository # print which (stable) repository a package belongs to repository_of_package() { local package="$1" local repository="${package##*.}" package="${package%.*}" local a32_rev="${package##*.}" package="${package%.*.*}" case "${repository}" in 'multilib') if git -C "${repo_paths__archlinux32}" archive --format=tar "${a32_rev}" -- 'extra-from-multilib' | \ tar -Ox | \ grep -qFx "${package%.*.*.*}"; then echo 'extra' else echo 'community' fi ;; *) echo "${repository}" esac } # official_or_community $package.$repo_revision.$mod_repo_revision.$repository $ending # print wether the specified package is an official package (print # $ending) or a community package (print 'community-$ending') or a # build-suppor package (print 'build-support') official_or_community() { local prepo prepo=$(repository_of_package "$1") if [ "${prepo}" = 'community' ]; then echo 'community-'"$2" elif [ "${prepo}" = 'build-support' ]; then echo 'build-support' else echo "$2" fi } # ls_master_mirror $path # list content of $path on the master mirror (via rsync) ls_master_mirror() { local path="$1" ${master_mirror_rsync_command} \ "${master_mirror_rsync_directory}/${path}/" | \ grep -v '\s\.$' | \ awk '{print $5}' } # remove_old_package_versions $arch $repository $package_file # removes all older (not-newer) versions of $package_file # in all repositories not-older (newer) than $repository # A package is considered not newer if # a) its version is not newer # A package is considered older if # b) its version is older or # c) if it's "not newer" and its architecture is 'any' and different or # d) if it's "not newer" and the other architecture is 'any' and different # this ensures an any package may replace arch-specific packages of the same version and vice versa remove_old_package_versions() { local arch="$1" local repository="$2" local package="$3" # repositories in which older packages should be deleted local delete_older_repositories # repositories in which not-newer packages should be deleted local delete_not_newer_repositories if echo "${standalone_package_repositories}" | \ grep -qxF "${repository}"; then delete_older_repositories="${repository}" delete_not_newer_repositories='' elif echo "${staging_package_repositories}" | \ grep -qxF "${repository}"; then delete_older_repositories="${repository}" delete_not_newer_repositories=$( echo "${staging_package_repositories}" | \ grep -vxF "${repository}" ) || true elif echo "${testing_package_repositories}" | \ grep -qxF "${repository}"; then delete_older_repositories=$( printf '%s\n' "${staging_package_repositories}" "${repository}" ) delete_not_newer_repositories=$( echo "${testing_package_repositories}" | \ grep -vxF "${repository}" ) || true elif echo "${stable_package_repositories}" | \ grep -qxF "${repository}"; then delete_older_repositories=$( printf '%s\n' "${staging_package_repositories}" "${testing_package_repositories}" "${repository}" ) delete_not_newer_repositories=$( echo "${stable_package_repositories}" | \ grep -vxF "${repository}" ) || true else >&2 printf 'remove_old_package_versions: Unknown repository "%s".\n' "${repository}" return 1 fi ( tmp_dir=$(mktemp -d) trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT { # the architecture of the package (any vs. i686) package_arch="${package##*-}" package_arch="${package_arch%%.*}" if [ "${package_arch}" = 'any' ]; then package_arch_regex_inverter='!' else unset package_arch_regex_inverter fi for repo in ${delete_older_repositories}; do ls_master_mirror "${arch}/${repo}" | \ sed -n ' /\.pkg\.tar\.xz$/!d s|-\([^-]\+-[^-]\+\)-\([^-]\+\)$| \1 \2| /^'"$(str_to_regex "${package%-*-*-*}")"' / { s|^|2 '"${arch} ${repo}"' | / any\.pkg\.tar\.xz$/'"${package_arch_regex_inverter}"'{ s|^2|0| } p } ' done for repo in ${delete_not_newer_repositories}; do ls_master_mirror "${arch}/${repo}" | \ sed -n ' /\.pkg\.tar\.xz$/!d s|-\([^-]\+-[^-]\+\)-\([^-]\+\)$| \1 \2| /^'"$(str_to_regex "${package%-*-*-*}")"' / { s|^|0 '"${arch} ${repo}"' | p } ' done echo "${package%-*}" | \ sed 's|^.*-\([^-]\+-[^-]\+\)$|1 %cut% %it% %here% \1|' # the generated list contains the following columns: # $delete-if-newer-vs-not-older $arch-directory $repo-directory $pkgname $pkgver-$pkgrel $pkg-arch.pkg.tar.xz } | \ expand_version 5 | \ sort -k5V,5 -k1n,1 | \ shrink_version 5 | \ sed -n ' /^1 %cut% %it% %here% /q s/^[02] // s/ \(\S\+\)$/-\1/ p ' | \ sort -u > \ "${tmp_dir}/packages-to-delete" # this file contains a list of packages to be delete, one on each line: # $architecture-directory $repository-directory $package-name $pkgver-$pkgrel-$package-architecture.pkg.tar.xz cut -d' ' -f1,2 < \ "${tmp_dir}/packages-to-delete" | \ grep -vxF "${arch} ${repository}" | \ sort -u > \ "${tmp_dir}/repositories-to-modify" # fetch all databases being modified while read -r del_arch del_repo; do mkdir -p "${tmp_dir}/${del_arch}/${del_repo}" ${master_mirror_rsync_command} \ "${master_mirror_rsync_directory}/${del_arch}/${del_repo}/${del_repo}.db."* \ "${master_mirror_rsync_directory}/${del_arch}/${del_repo}/${del_repo}.files."* \ "${tmp_dir}/${del_arch}/${del_repo}/" done < \ "${tmp_dir}/repositories-to-modify" while read -r del_arch del_repo del_package _; do if [ "${del_arch}/${del_repo}" = "${arch}/${repository}" ]; then # we do not repo-remove the package in the target repository continue fi repo-remove -q "${tmp_dir}/${del_arch}/${del_repo}/${del_repo}.db.tar.gz" \ "${del_package}" done < \ "${tmp_dir}/packages-to-delete" # upload modified databases while read -r del_arch del_repo; do ${master_mirror_rsync_command} \ "${tmp_dir}/${del_arch}/${del_repo}/${del_repo}.db."* \ "${tmp_dir}/${del_arch}/${del_repo}/${del_repo}.files."* \ "${master_mirror_rsync_directory}/${del_arch}/${del_repo}/" done < \ "${tmp_dir}/repositories-to-modify" sed ' s| \(\S\+\)$|-\1| y| |/| s|^|rm "| s|$|"| p s|"$|.sig"| ' "${tmp_dir}/packages-to-delete" | \ ${master_mirror_sftp_command} ) } # wait_some_time $minimum $diff # wait between minimum and minimum+diff seconds (diff defaults to 30) wait_some_time() { local minimum=$1 local diff=$2 local random if [ -z "${diff}" ]; then diff=30 fi random=$( dd if='/dev/urandom' count=1 2> /dev/null | \ cksum | \ cut -d' ' -f1 ) sleep $((minimum + random % diff)) } # str_to_regex $string # escape dots for use in regex str_to_regex() { echo "$1" | \ sed 's|\.|\\.|g' } # make_source_info $package $repository $git_revision $mod_git_revision $output # create .SRCINFO from PKGBUILD within git repositories, output to $output make_source_info() { local package="$1" local repository="$2" local git_revision="$3" local mod_git_revision="$4" local output="$5" local git_repo local PKGBUILD local PKGBUILD_mod git_repo=$(find_repository_with_commit "${git_revision}") if [ -z "${git_repo}" ]; then return 1 fi find_pkgbuilds "${package}" "${repository}" "${git_repo}" "${git_revision}" "${mod_git_revision}" ( tmp_dir=$(mktemp -d "${work_dir}/tmp.XXXXXX") trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT extract_source_directory "${git_repo}" "${git_revision}" "${mod_git_revision}" "${tmp_dir}" ( cd "${tmp_dir}" makepkg --printsrcinfo ) > \ "${output}" ) } # recursively_umount_and_rm $dir # umount all mountpoints in $dir which are also in $dir's # filesystem, possibly also $dir itself and then # rm -rf --one-file-system $dir recursively_umount_and_rm() { local dir="$1" if [ -z "${dir}" ]; then >&2 echo 'ERROR: recursively_umount_and_rm requires an argument' exit 42 fi find "${dir}" \ -xdev -depth -type d \ -exec 'mountpoint' '-q' '{}' ';' \ -exec 'sudo' 'umount' '-l' '{}' ';' rm -rf --one-file-system "${dir}" } # append_architectures $PKGBUILD # mangle $arch in PKBUILDs to contain i486, i586, i686 append_architectures() { local PKGBUILD="$1" sed -i ' /^arch=[^#]*any/!{ /^arch=(/s/(/(i686 / } ' "${PKGBUILD}" } # find_newest_of_git_revisions # find newest git revision of the ones provided at stdin # (assuming linear history) find_newest_of_git_revisions() { local revisions local repo revisions=$(cat) if [ "$( echo "${revisions}" | \ wc -l )" -eq 1 ]; then echo "${revisions}" return fi repo=$( find_repository_with_commit \ "$( echo "${revisions}" | \ grep -xm1 '[0-9a-f]\{40\}' )" ) eval 'repo="${repo_paths__'"${repo}"'}"' echo "${revisions}" | \ xargs -rn1 git -C "${repo}" rev-parse | \ ( newest='' while read -r current; do if [ -z "${newest}" ] || \ git -C "${repo}" merge-base --is-ancestor "${newest}" "${current}"; then newest="${current}" fi done echo "${newest}" ) } # find_package_repository_to_package $package $git_repository # find the package repository a package from a given git repository # belongs to find_package_repository_to_package() { local package="$1" local git_repository="$2" local repo_path local repo eval 'repo_path="${repo_paths__'"${git_repository}"'}"' repo=$( git -C "${repo_path}" archive "$(cat "${work_dir}/${git_repository}.revision")" -- "${package}/repos" 2> /dev/null | \ tar -t | \ cut -d/ -f3 | \ grep -vxF '' | \ grep -v 'staging\|testing\|-unstable' | \ grep -v -- '-i686$' | \ sed 's|-[^-]\+$||' | \ sort -u ) if [ -z "${repo}" ]; then return 1 fi if [ "$( echo "${repo}" | \ wc -l )" -ne 1 ]; then return 1 fi echo "${repo}" } # extract_source_directory $git_repo $rev $mod_rev $output # extract files found in the svn/git source directories # $PKGBUILD and $PKGBUILD_mod are expected to be set correctly extract_source_directory() { local git_repo="$1" local rev="$2" local mod_rev="$3" local output="$4" if [ -n "${PKGBUILD}" ]; then eval 'git -C "${repo_paths__'"${git_repo}"'}" archive "${rev}" -- "${PKGBUILD%/*}"' | \ tar -x --strip-components=3 -C "${output}" fi if [ -n "${PKGBUILD_mod}" ]; then git -C "${repo_paths__archlinux32}" archive "${mod_rev}" -- "${PKGBUILD_mod%/*}" | \ tar -x --overwrite --exclude 'PKGBUILD' --strip-components=2 -C "${output}" 2> /dev/null || \ true git -C "${repo_paths__archlinux32}" archive "${mod_rev}" -- "${PKGBUILD_mod}" | \ tar -Ox "${PKGBUILD_mod}" >> \ "${output}/PKGBUILD" fi append_architectures "${output}/PKGBUILD" # shellcheck disable=SC2016 sed -i '/^\$Id\$$/d' "${output}/PKGBUILD" } # find_dependencies_on_build_list $package $git_revision $mod_git_revision $repository # return a list of packages on the build list which are (run- / build- / check-time) # dependencies of the given package find_dependencies_on_build_list() { local package="$1" local git_revision="$2" local mod_git_revision="$3" local repository="$4" generate_package_metadata "${package}" "${git_revision}" "${mod_git_revision}" "${repository}" ( cat "${work_dir}/package-infos/${package}.${git_revision}.${mod_git_revision}.${repository}.build-depends" grep -vxF 'break_loops' "${work_dir}/build-list" | \ awk '{print $1 "." $2 "." $3 "." $4}' | \ sed " s|^|${work_dir}/package-infos/| s|\$|\.builds| " | \ xargs -r cat | \ sort -u ) | \ sort | \ uniq -d } # download_sources_by_hash $package $repository $git_revision $git_mod_revision # try to download all sources by their hash into the current directory # returns 0 if any source was downloaded and 1 otherwise download_sources_by_hash() { local package="$1" local repository="$2" local git_revision="$3" local git_mod_revision="$4" local return_value=1 local tmp_dir local sum_type local arch_suffix tmp_dir=$(mktemp -d) if ! make_source_info "${package}" "${repository}" "${git_revision}" "${git_mod_revision}" "${tmp_dir}/.SRCINFO"; then >&2 echo 'download_sources_by_hash: make_source_info failed.' rm -rf --one-file-system "${tmp_dir}" return 1 fi if ! [ -s "${tmp_dir}/.SRCINFO" ]; then >&2 echo 'download_sources_by_hash: ".SRCINFO" has not been created by make_source_info.' rm -rf --one-file-system "${tmp_dir}" return 1 fi for arch_suffix in '' '_i686'; do for sum_type in 'sha256sum' 'sha512sum'; do grep "^\s*${sum_type}s${arch_suffix} = " "${tmp_dir}/.SRCINFO" | \ sed 's|^.* = ||' | \ cat -n > \ "${tmp_dir}/sums" grep "^\s*source${arch_suffix} = " "${tmp_dir}/.SRCINFO" | \ sed ' s|^.* = || s|::.*$|| s|.*/|| ' | \ cat -n > \ "${tmp_dir}/urls" if [ "$(wc -l < "${tmp_dir}/sums")" -eq "$(wc -l < "${tmp_dir}/urls")" ]; then join -1 1 -2 1 -o 1.2,2.2 "${tmp_dir}/sums" "${tmp_dir}/urls" > \ "${tmp_dir}/joined" while read -r sum file; do if [ "${sum}" = 'SKIP' ]; then continue fi if echo "${sum} ${file}" | \ ${sum_type} -c > /dev/null 2>&1; then # the correct source is already there continue fi if wget -O "${tmp_dir}/transfer" "${source_by_hash_mirror}${sum}"; then mv "${tmp_dir}/transfer" "${file}" return_value=0 fi done < \ "${tmp_dir}/joined" fi done done rm -rf --one-file-system "${tmp_dir}" return ${return_value} } # expand_version $column_num # add "+0" to version in $colum_num-th column if no "+" is there expand_version() { local column_num column_num="$1" sed ' /^\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*+/! s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*\)-/\1+0-/ ' } # shrink_version $column_num # remove "+0" from version in $colum_num-th column shrink_version() { local column_num column_num="$1" sed ' s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*\)+0-/\1-/ ' } # find_biggest_subset_of_packages $omega $keep $all_builds [ $force ] # Return (to stdout) the biggest subset A of the packages in $omega whose # runtime dependencies in $omega \cup $keep are also in A # $all_builds either points to an empty file - then it will get filled # with cached data for subsequent calls - or to the same file of a previous # call # If non-empty, $force contains packages which are assumed to match the above # condition without checking. # The arguments are names of files with one $package.$revision.$mod_revision.$repository # per line. find_biggest_subset_of_packages() { ( omega="$1" keep="$2" all_builds="$3" if [ $# -eq 3 ]; then force='/dev/null' elif [ $# -eq 4 ]; then force="$4" else >&2 printf 'find_biggest_subset_of_packages: Wrong number of arguments: %s given, 3 or 4 expected.' "$#" return 2 fi if [ ! -s "${all_builds}" ]; then find "${work_dir}/package-infos/" -maxdepth 1 -name '*.builds' \ -exec sed ' s|^|{} | s|^\S\+/|| s|\.builds | | ' {} \; | \ sort -k2,2 > \ "${all_builds}" fi sort -u "${omega}" | \ sponge "${omega}" temp_dir=$(mktemp -d) trap 'rm -rf --one-file-system "${temp_dir}"' EXIT { sort -u "${keep}" cat "${force}" "${force}" } | \ sort | \ uniq -u > \ "${temp_dir}/keep.new" touch "${temp_dir}/keep" while [ -s "${temp_dir}/keep.new" ]; do cat "${temp_dir}/keep.new" "${temp_dir}/keep" | \ sort -u | \ sponge "${temp_dir}/keep" sed ' s|^|'"${work_dir}"'/package-infos/| s|$|.run-depends| ' "${temp_dir}/keep" | \ xargs -r grep -HF '' | \ sed ' s|^.*/|| s|\.run-depends:| | ' | \ sort -u | \ sort -k2,2 | \ uniq -f1 | \ join -1 2 -2 2 -o 2.1 - "${all_builds}" | \ sort -u | \ join -1 1 -2 1 -o 2.1 - "${omega}" | \ sort -u > \ "${temp_dir}/keep.new" # "new" is only what has not been there before and what is not forced cat "${temp_dir}/keep" "${temp_dir}/keep" "${force}" "${force}" "${temp_dir}/keep.new" | \ sort | \ uniq -u | \ sponge "${temp_dir}/keep.new" done cat "${omega}" "${temp_dir}/keep" "${temp_dir}/keep" | \ sort | \ uniq -u ) } # sort_square_bracket_content $file # sort the content of [] in $file, print to stdout sort_square_bracket_content() { local file local line local token local token_list local rest file="$1" while read -r line; do printf '%s ' "${line}" | \ tr ' ' '\n' | \ while read -r token; do if echo "${token}" | \ grep -qF '['; then printf '%s[' "${token%[*}" token="${token##*[}" token_list="${token%,}" while ! echo "${token_list}" | \ grep -qF ']'; do read -r token token_list=$( printf '%s\n' \ "${token_list}" \ "${token%,}" ) done rest="]${token_list#*]}" token_list="${token_list%%]*}" token=$( printf '%s' "${token_list}" | \ sort | \ sed 's|$|,|' printf '%s' "${rest}" ) fi printf '%s\n' "${token}" done | \ tr '\n' ' ' | \ sed ' s|, ]|]|g s| $|| ' printf '\n' done < \ "${file}" } # smoothen_namcap_log $file # remove unneccesary differences from namcap-logs: # - remove architecture specific information # - sort lines # - sort content of square brackets smoothen_namcap_log() { local file file="$1" # shellcheck disable=SC2016 sort_square_bracket_content "${file}" | \ sed ' # normalize architecture specific information s|i[34567]86|$ARCH|g s|x86\([-_]64\)\?|$ARCH|g # remove haskell hashes '"s|\('[^']*-[0-9.]\+\)-[a-zA-Z0-9]\{1,22\}\(-ghc[^']*'\)|\1\2|g"' ' | \ sort | \ sponge "${file}" }