Send patches - preferably formatted by git format-patch - to patches at archlinux32 dot org.
summaryrefslogtreecommitdiff
path: root/lib/common-functions
diff options
context:
space:
mode:
Diffstat (limited to 'lib/common-functions')
-rwxr-xr-xlib/common-functions1378
1 files changed, 1378 insertions, 0 deletions
diff --git a/lib/common-functions b/lib/common-functions
new file mode 100755
index 0000000..a84271b
--- /dev/null
+++ b/lib/common-functions
@@ -0,0 +1,1378 @@
+#!/bin/sh
+
+# contains functions used by more than one script
+
+# shellcheck disable=SC2039
+
+# TODO: include link depenendencies in run-depends metadata
+
+# TODO: have full information (currently in files) in database
+
+# TODO: remove state files / metadata files
+
+if [ -z "${base_dir}" ]; then
+ # just to make shellcheck happy
+ . 'conf/default.conf'
+fi
+
+# find_pkgbuilds package repository git_repository git_revision mod_git_revision
+# find the PKGBUILD and modification of $package from $repository
+# sets $PKGBUILD and $PKGBUILD_mod
+
+find_pkgbuilds() {
+
+ local package="$1"
+ local repository="$2"
+ local git_repository="$3"
+ local git_revision="$4"
+ local mod_git_revision="$5"
+
+ local repo_path
+ eval 'repo_path="${repo_paths__'"${git_repository}"'}"'
+
+ PKGBUILD=$(
+ git -C "${repo_path}" archive "${git_revision}" -- "${package}/repos/" 2> /dev/null | \
+ tar -t 2> /dev/null | \
+ grep "$(printf '^%s-.*/PKGBUILD' "$(str_to_regex "${package}/repos/${repository}")")" | \
+ grep -v -- '-i686/PKGBUILD$' | \
+ grep -v -- '[-/]\(staging\|testing\|unstable\)-[^/]\+/PKGBUILD$' | \
+ sort | \
+ tail -n1
+ )
+
+ PKGBUILD_mod=$(
+ git -C "${repo_paths__archlinux32}" archive "${mod_git_revision}" 2> /dev/null | \
+ tar -t "${repository}/${package}/PKGBUILD" 2> /dev/null
+ ) || true
+
+ if [ -z "${PKGBUILD}" ] && \
+ [ -z "${PKGBUILD_mod}" ]; then
+ >&2 printf 'Neither PKGBUILD nor modification of PKGBUILD found for package "%s" from %s (%s), revisions %s and %s.\n' \
+ "${package}" \
+ "${repository}" \
+ "${git_repository}" \
+ "${git_revision}" \
+ "${mod_git_revision}"
+ return 1
+ fi
+
+}
+
+# find_repository_with_commit commit
+# find the repository which has $commit
+
+find_repository_with_commit() {
+
+ local repository
+
+ for repository in ${repo_names}; do
+ # shellcheck disable=SC2016
+ if [ "$(eval git -C "$(printf '"${repo_paths__%s}"' "${repository}")" cat-file -t '"$1"' 2> /dev/null)" = "commit" ]; then
+ echo "${repository}"
+ return 0
+ fi
+ done
+ >&2 printf 'find_repository_with_commit: Cannot find repository with commit "%s"\n' "$1"
+ exit 1
+
+}
+
+# find_git_repository_to_package_repository repository
+# find the git repository which tracks the package repository $repository
+
+find_git_repository_to_package_repository() {
+
+ local repository
+ local package_repository
+ local repo_path
+
+ package_repository="$1"
+
+ if [ "$1" = 'build-support' ]; then
+ echo 'packages'
+ return 0
+ fi
+
+ for repository in ${repo_names}; do
+ if [ "${repository}" = "archlinux32" ]; then
+ continue
+ fi
+ eval 'repo_path="${repo_paths__'"${repository}"'}"'
+ if git -C "${repo_path}" archive "$(cat "${work_dir}/${repository}.revision")" -- | \
+ tar -t --wildcards '*/repos' | \
+ grep '^\([^/]\+/\)\{3\}PKGBUILD$' | \
+ cut -d/ -f3 | \
+ sed 's|-[^-]\+$||' | \
+ sort -u | \
+ grep -qxF "${package_repository}"; then
+ echo "${repository}"
+ return 0
+ fi
+ done
+ >&2 echo "can't find git repository with package repository '$1'"
+ exit 1
+
+}
+
+# generate_package_metadata $package $git_revision $mod_git_revision $repository
+# or
+# generate_package_metadata $package.$git_revision.$mod_git_revision.$repository
+# generate the meta data files of a package (dependencies, built packages, ...)
+
+generate_package_metadata() {
+
+ local package="$1"
+ local git_revision="$2"
+ local mod_git_revision="$3"
+ local repository="$4"
+ local file_prefix
+ local file
+ local PKGBUILD
+
+ if [ $# -eq 1 ]; then
+ # second form
+ repository="${package##*.}"
+ package="${package%.*}"
+ mod_git_revision="${package##*.}"
+ package="${package%.*}"
+ git_revision="${package##*.}"
+ package="${package%.*}"
+ fi
+
+ file_prefix="${work_dir}/package-infos/${package}.${git_revision}.${mod_git_revision}.${repository}"
+
+ if [ -e "${file_prefix}.builds" ] && \
+ [ -e "${file_prefix}.build-depends" ] && \
+ [ -e "${file_prefix}.run-depends" ] && \
+ [ -e "${file_prefix}.groups" ] && \
+ [ -e "${file_prefix}.packages" ]; then
+ return 0
+ fi
+
+ if ! make_source_info "${package}" "${repository}" "${git_revision}" "${mod_git_revision}" "${file_prefix}.SRCINFO"; then
+ printf '"make_source_info %s %s %s %s %s" failed.\n' "${package}" "${repository}" "${git_revision}" "${mod_git_revision}" "${file_prefix}.SRCINFO"
+ exit 1
+ fi
+ if [ ! -s "${file_prefix}.SRCINFO" ]; then
+ >&2 printf '"%s" not created by "make_source_info" - eh, what?' "${file_prefix}.SRCINFO"
+ exit 1
+ fi
+
+ # otherwise this just calls for trouble
+ sed -i '
+ /^[^=]*=\s*$/d
+ s/_i686\(\s*=\)/\1/
+ ' "${file_prefix}.SRCINFO"
+
+ # extract "groups" = groups \cup provides
+ grep "$(printf '^\t\\(groups\\|provides\\) = ')" "${file_prefix}.SRCINFO" | \
+ cut -d= -f2 | \
+ sed 's|^\s\+||; s|[<>]$||' | \
+ sort -u > \
+ "${file_prefix}.groups"
+
+ # extract "packages" = pkgname
+ grep '^pkgname = ' "${file_prefix}.SRCINFO" | \
+ cut -d= -f2 | \
+ sed 's|^\s\+||; s|[<>]$||' | \
+ sort -u > \
+ "${file_prefix}.packages"
+
+ # extract "builds" = provides \cup pkgname \cup groups
+ cat "${file_prefix}.groups" "${file_prefix}.packages" | \
+ sort -u > \
+ "${file_prefix}.builds"
+
+ # extract "build-depends" = makedepends \cup depends \cup \{ base, base-devel \} \setminus "builds"
+ {
+ {
+ printf 'all_depend = %s\n' 'base' 'base-devel'
+ sed -n "$(
+ printf '/^\t%s = /p\n' \
+ 'depends' \
+ 'makedepends'
+ )" "${file_prefix}.SRCINFO"
+ } | \
+ cut -d= -f2 | \
+ sed 's|^\s\+||; s|[<>]$||' | \
+ sort -u
+ sed 'p' "${file_prefix}.builds"
+ } | \
+ sort | \
+ uniq -u > \
+ "${file_prefix}.build-depends"
+
+ # extract "run-depends" = depends \cup \{ base \} \setminus "builds"
+ {
+ {
+ printf 'all_depend = %s\n' 'base'
+ sed -n "$(printf '/^\tdepends = /p')" "${file_prefix}.SRCINFO"
+ } | \
+ cut -d= -f2 | \
+ sed 's|^\s\+||; s|[<>]$||' | \
+ sort -u
+ sed 'p' "${file_prefix}.builds"
+ } | \
+ sort | \
+ uniq -u > \
+ "${file_prefix}.run-depends"
+
+ rm "${file_prefix}.SRCINFO"
+
+}
+
+# delete_old_metadata
+# delete old (=unneeded) meta data of packages
+
+delete_old_metadata() {
+
+ local current_metadata
+
+ current_metadata=$(
+ find "${work_dir}/package-infos" -mindepth 1 -maxdepth 1 -printf '%f\n' | \
+ sed '
+ s|\.[^.]\+$||
+ s|\.\([^.]\+\)\.\([^.]\+\)\.\([^.]\+\)$| \1 \2 \3|
+ ' | \
+ sort -u
+ )
+
+ ( # the new shell is intentional
+ # what we have
+ echo "${current_metadata}"
+
+ # package-states should stay
+ find "${work_dir}/package-states" -mindepth 1 -maxdepth 1 -printf '%f\n' | \
+ sed '
+ s|\.\([^.]\+\)\.\([^.]\+\)\.\([^.]\+\)\.[^.]\+$| \1 \2 \3|
+ ' | \
+ sort -u | \
+ sed 'p'
+
+ # build-list items should stay
+ sed 'p' "${work_dir}/build-list"
+
+ tmp_dir=$(mktemp -d 'tmp.common-functions.delete_old_metadata.XXXXXXXXXX' --tmpdir)
+ trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT
+
+ echo "${current_metadata}" | \
+ sort -k1,1 > \
+ "${tmp_dir}/current-metadata"
+
+ # the newest of the following should stay:
+ {
+ # deletion-list items
+ cat "${work_dir}/deletion-list"
+ # all packages in the repos
+ for repo in ${repo_names}; do
+ eval 'git -C "${repo_paths__'"${repo}"'}" archive '"$(cat "${work_dir}/${repo}.revision")" | \
+ tar -t | \
+ sed '
+ s|/$||
+ /\//d
+ '
+ done
+ } | \
+ sort -u | \
+ join -j 1 -o 2.2,2.3,2.4,2.1 - "${tmp_dir}/current-metadata" | \
+ sort -k4,4 > \
+ "${tmp_dir}/find-newest-revisions"
+
+ uniq -uf3 < \
+ "${tmp_dir}/find-newest-revisions" | \
+ awk '{print $4 " " $1 " " $2 " " $3}' | \
+ sed 'p'
+
+ uniq -Df3 < \
+ "${tmp_dir}/find-newest-revisions" | \
+ uniq --group=append -f3 | \
+ {
+ revs=''
+ mod_revs=''
+ opkg=''
+ orepo=''
+ while read -r rev mod_rev repo pkg; do
+
+ if [ -z "${rev}" ] && \
+ [ -z "${mod_rev}" ] && \
+ [ -z "${repo}" ] && \
+ [ -z "${pkg}" ]; then
+
+ printf '%s %s %s %s\n' \
+ "$(
+ printf '%s\n' ${revs} | \
+ find_newest_of_git_revisions
+ )" \
+ "$(
+ printf '%s\n' ${mod_revs} | \
+ find_newest_of_git_revisions
+ )" \
+ "${orepo}" \
+ "${opkg}"
+
+ revs=''
+ mod_revs=''
+ orepo=''
+ opkg=''
+ continue
+ fi
+ revs=$(
+ # shellcheck disable=SC2086
+ printf '%s\n' ${revs} ${rev} | \
+ sort -u
+ )
+ mod_revs=$(
+ # shellcheck disable=SC2086
+ printf '%s\n' ${mod_revs} ${mod_rev} | \
+ sort -u
+ )
+ orepo="${repo}"
+ opkg="${pkg}"
+ done
+ } | \
+ awk '{print $4 " " $1 " " $2 " " $3}' | \
+ sed 'p'
+ ) | \
+ sort | \
+ uniq -u | \
+ while read -r pkg rev mod_rev repo; do
+ rm -f "${work_dir}/package-infos/${pkg}.${rev}.${mod_rev}.${repo}."*
+ done
+}
+
+# repository_of_package $package.$repo_revision.$mod_repo_revision.$repository
+# print which (stable) repository a package belongs to
+
+repository_of_package() {
+ local package="$1"
+ local repository="${package##*.}"
+ package="${package%.*}"
+ local a32_rev="${package##*.}"
+ package="${package%.*.*}"
+
+ case "${repository}" in
+ 'multilib')
+ if git -C "${repo_paths__archlinux32}" archive --format=tar "${a32_rev}" -- 'extra-from-multilib' | \
+ tar -Ox | \
+ grep -qFx "${package%.*.*.*}"; then
+ echo 'extra'
+ else
+ echo 'community'
+ fi
+ ;;
+ *)
+ echo "${repository}"
+ esac
+}
+
+# official_or_community $package.$repo_revision.$mod_repo_revision.$repository $ending
+# print wether the specified package is an official package (print
+# $ending) or a community package (print 'community-$ending') or a
+# build-support package (print 'build-support')
+
+official_or_community() {
+ local prepo
+ prepo=$(repository_of_package "$1")
+
+ if [ "${prepo}" = 'community' ]; then
+ echo 'community-'"$2"
+ elif [ "${prepo}" = 'build-support' ]; then
+ echo 'build-support'
+ else
+ echo "$2"
+ fi
+}
+
+# ls_master_mirror $path
+# list content of $path on the master mirror (via rsync)
+
+ls_master_mirror() {
+
+ local path="$1"
+
+ ${master_mirror_rsync_command} \
+ "${master_mirror_rsync_directory}/${path}/" | \
+ grep -v '\s\.$' | \
+ awk '{print $5}'
+
+}
+
+# TODO: the actions of remove_old_package_versions should be done
+# on basis of the information in the database
+
+# remove_old_package_versions $arch $repository $package_file
+
+# removes all older (not-newer) versions of $package_file
+# in all repositories not-older (newer) than $repository
+
+# TODO: should remove all other version (also newer) from
+# some repositories :-/
+
+# A package is considered not newer if
+# a) its version is not newer
+# A package is considered older if
+# b) its version is older or
+# c) if it's "not newer" and its architecture is 'any' and different or
+# d) if it's "not newer" and the other architecture is 'any' and different
+
+# this ensures an any package may replace arch-specific packages of the same version and vice versa
+
+remove_old_package_versions() {
+
+ local arch="$1"
+ local repository="$2"
+ local package="$3"
+
+ local pkgname
+ local epoch
+ local pkgver
+ local pkgrel
+ local sub_pkgrel
+ pkgname="${package%-*}"
+ pkgrel="${pkgname##*-}"
+ sub_pkgrel="${pkgrel##*.}"
+ if [ "${sub_pkgrel}" = "${pkgrel}" ]; then
+ sub_pkgrel='0'
+ else
+ pkgrel="${pkgrel%.*}"
+ fi
+ pkgname="${pkgname%-*}"
+ pkgver="${pkgname##*-}"
+ epoch="${pkgver%%:*}"
+ if [ "${epoch}" = "${pkgver}" ]; then
+ epoch='0'
+ else
+ pkgver="${pkgver#*:}"
+ fi
+ pkgname="${pkgname%-*}"
+
+ # shellcheck disable=SC2016
+ {
+ printf 'SELECT "bogus",CONCAT(from_base64("%s"),"/",from_base64("%s")),1,from_base64("%s");\n' \
+ "$(
+ printf '%s' "${repository}" | \
+ base64 -w0
+ )" \
+ "$(
+ printf '%s' "${package}" | \
+ base64 -w0
+ )" \
+ "$(
+ printf '%s' "${package}" | \
+ sed '
+ s/^.*-\([^-]\+-[^-]\+\)-[^-]\+$/\1/
+ ' | \
+ base64 -w0
+ )"
+ printf 'SELECT '
+ printf '`binary_packages`.`id`,'
+ printf 'CONCAT(`repositories`.`name`,"/",'
+ mysql_package_name_query
+ printf '),'
+ # should we delete packages of identical version?
+ printf 'IF((`more_stable_repos`.`id`!=`repositories`.`id`) AND (`more_stable_repos`.`stability`=`repositories`.`stability`),2,0),'
+ printf 'CONCAT('
+ printf 'IF(`binary_packages`.`epoch`=0,"",CONCAT(`binary_packages`.`epoch`,":")),'
+ printf '`binary_packages`.`pkgver`,"-",'
+ printf '`binary_packages`.`pkgrel`,".",'
+ printf '`binary_packages`.`sub_pkgrel`'
+ printf ')'
+ printf ' FROM `binary_packages`'
+ mysql_join_binary_packages_repositories
+ mysql_join_binary_packages_architectures
+ printf ' JOIN `repository_stability_relations` ON `repository_stability_relations`.`less_stable`=`repositories`.`stability`'
+ printf ' JOIN `repositories` AS `more_stable_repos` ON `repository_stability_relations`.`more_stable`=`more_stable_repos`.`stability`'
+ # name must match
+ printf ' WHERE `binary_packages`.`pkgname`=from_base64("%s")' \
+ "$(printf '%s' "${package%-*-*-*}" | base64 -w0)"
+ # repository, where package should be deleted, should be less stable
+ printf ' AND `more_stable_repos`.`name`=from_base64("%s")' \
+ "$(printf '%s' "${repository}" | base64 -w0)"
+ printf ';\n'
+ } | \
+ ${mysql_command} --raw --batch | \
+ sed '
+ /^\S\+\sCONCAT(/d
+ y/\t/ /
+ ' | \
+ expand_version 4 | \
+ sort -k4V,4 -k3r,3 | \
+ shrink_version 4 | \
+ sed -n '
+ /^bogus /q
+ p
+ ' | \
+ cut -d' ' -f1,2 >&2
+
+ # repositories in which older packages should be deleted
+ local delete_older_repositories
+ # repositories in which not-newer packages should be deleted
+ local delete_not_newer_repositories
+
+ if echo "${standalone_package_repositories}" | \
+ grep -qxF "${repository}"; then
+
+ delete_older_repositories="${repository}"
+ delete_not_newer_repositories=''
+
+ elif echo "${staging_package_repositories}" | \
+ grep -qxF "${repository}"; then
+
+ delete_older_repositories="${repository}"
+ delete_not_newer_repositories=$(
+ echo "${staging_package_repositories}" | \
+ grep -vxF "${repository}"
+ ) || true
+
+ elif echo "${testing_package_repositories}" | \
+ grep -qxF "${repository}"; then
+
+ delete_older_repositories=$(
+ printf '%s\n' "${staging_package_repositories}" "${repository}"
+ )
+ delete_not_newer_repositories=$(
+ echo "${testing_package_repositories}" | \
+ grep -vxF "${repository}"
+ ) || true
+
+ elif echo "${stable_package_repositories}" | \
+ grep -qxF "${repository}"; then
+
+ delete_older_repositories=$(
+ printf '%s\n' "${staging_package_repositories}" "${testing_package_repositories}" "${repository}"
+ )
+ delete_not_newer_repositories=$(
+ echo "${stable_package_repositories}" | \
+ grep -vxF "${repository}"
+ ) || true
+
+ else
+
+ >&2 printf 'remove_old_package_versions: Unknown repository "%s".\n' "${repository}"
+ return 1
+
+ fi
+
+ ( # the new shell is intentional
+ tmp_dir=$(mktemp -d 'tmp.common-functions.remove_old_package_versions.XXXXXXXXXX' --tmpdir)
+ trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT
+
+ {
+ # the architecture of the package (any vs. i686)
+ package_arch="${package##*-}"
+ package_arch="${package_arch%%.*}"
+ if [ "${package_arch}" = 'any' ]; then
+ package_arch_regex_inverter='!'
+ else
+ unset package_arch_regex_inverter
+ fi
+
+ for repo in ${delete_older_repositories}; do
+ ls_master_mirror "${arch}/${repo}" | \
+ sed -n '
+ /\.pkg\.tar\.xz$/!d
+ s|-\([^-]\+-[^-]\+\)-\([^-]\+\)$| \1 \2|
+ /^'"$(str_to_regex "${package%-*-*-*}")"' / {
+ s|^|2 '"${arch} ${repo}"' |
+ / any\.pkg\.tar\.xz$/'"${package_arch_regex_inverter}"'{
+ s|^2|0|
+ }
+ p
+ }
+ '
+ done
+ for repo in ${delete_not_newer_repositories}; do
+ ls_master_mirror "${arch}/${repo}" | \
+ sed -n '
+ /\.pkg\.tar\.xz$/!d
+ s|-\([^-]\+-[^-]\+\)-\([^-]\+\)$| \1 \2|
+ /^'"$(str_to_regex "${package%-*-*-*}")"' / {
+ s|^|0 '"${arch} ${repo}"' |
+ p
+ }
+ '
+ done
+ echo "${package%-*}" | \
+ sed 's|^.*-\([^-]\+-[^-]\+\)$|1 %cut% %it% %here% \1|'
+
+ # the generated list contains the following columns:
+ # $delete-if-newer-vs-not-older $arch-directory $repo-directory $pkgname $pkgver-$pkgrel $pkg-arch.pkg.tar.xz
+ } | \
+ expand_version 5 | \
+ sort -k5V,5 -k1n,1 | \
+ shrink_version 5 | \
+ sed -n '
+ /^1 %cut% %it% %here% /q
+ s/^[02] //
+ s/ \(\S\+\)$/-\1/
+ p
+ ' | \
+ sort -u > \
+ "${tmp_dir}/packages-to-delete"
+ # this file contains a list of packages to be deleted, one on each line:
+ # $architecture-directory $repository-directory $package-name $pkgver-$pkgrel-$package-architecture.pkg.tar.xz
+
+ cut -d' ' -f1,2 < \
+ "${tmp_dir}/packages-to-delete" | \
+ grep -vxF "${arch} ${repository}" | \
+ sort -u > \
+ "${tmp_dir}/repositories-to-modify"
+
+ # fetch all databases being modified
+ while read -r del_arch del_repo; do
+ mkdir -p "${tmp_dir}/${del_arch}/${del_repo}"
+ ${master_mirror_rsync_command} \
+ "${master_mirror_rsync_directory}/${del_arch}/${del_repo}/${del_repo}.db."* \
+ "${master_mirror_rsync_directory}/${del_arch}/${del_repo}/${del_repo}.files."* \
+ "${tmp_dir}/${del_arch}/${del_repo}/"
+ done < \
+ "${tmp_dir}/repositories-to-modify"
+
+ while read -r del_arch del_repo del_package _; do
+ if [ "${del_arch}/${del_repo}" = "${arch}/${repository}" ]; then
+ # we do not repo-remove the package in the target repository
+ continue
+ fi
+ repo-remove -q "${tmp_dir}/${del_arch}/${del_repo}/${del_repo}.db.tar.gz" \
+ "${del_package}"
+ done < \
+ "${tmp_dir}/packages-to-delete"
+
+ # upload modified databases
+ while read -r del_arch del_repo; do
+ ${master_mirror_rsync_command} \
+ "${tmp_dir}/${del_arch}/${del_repo}/${del_repo}.db."* \
+ "${tmp_dir}/${del_arch}/${del_repo}/${del_repo}.files."* \
+ "${master_mirror_rsync_directory}/${del_arch}/${del_repo}/"
+ done < \
+ "${tmp_dir}/repositories-to-modify"
+
+ # shellcheck disable=SC2016
+ sed '
+ s/\.pkg\.tar\.xz$//
+ s/^\S\+ //
+ s/-\([^-. ]\+\)\(-[^- ]\+\)$/-\1.0\2/
+ s/ \([^-: ]\+\(-[^- ]\+\)\{2\}\)$/ 0:\1/
+ s/ \([^-.]\+\):\([^-:]\+\)-\([^-.]\+\)\.\([^-.]\+\)-\([^-]\+\)$/ \1 \2 \3 \4 \5/
+ ' "${tmp_dir}/packages-to-delete" | \
+ while read -r repo pkgname epoch pkgver pkgrel sub_pkgrel arch; do
+ printf 'DELETE FROM `binary_packages` WHERE'
+ printf ' `binary_packages`.`%s`=(SELECT `%s`.`id` FROM `%s` WHERE `%s`.`name`=from_base64("%s")) AND' \
+ 'architecture' 'architectures' 'architectures' 'architectures' "$(printf '%s' "${arch}" | base64 -w0)" \
+ 'repository' 'repositories' 'repositories' 'repositories' "$(printf '%s' "${repo}" | base64 -w0)"
+ printf ' `binary_packages`.`%s`=from_base64("%s") AND' \
+ 'pkgname' "$(printf '%s' "${pkgname}" | base64 -w0)" \
+ 'epoch' "$(printf '%s' "${epoch}" | base64 -w0)" \
+ 'pkgver' "$(printf '%s' "${pkgver}" | base64 -w0)" \
+ 'pkgrel' "$(printf '%s' "${pkgrel}" | base64 -w0)" \
+ 'sub_pkgrel' "$(printf '%s' "${sub_pkgrel}" | base64 -w0)" | \
+ sed 's/ AND$//'
+ printf ';\n'
+ done | \
+ ${mysql_command}
+
+ sed '
+ s| \(\S\+\)$|-\1|
+ y| |/|
+ s|^|rm "|
+ s|$|"|
+ p
+ s|"$|.sig"|
+ ' "${tmp_dir}/packages-to-delete" | \
+ ${master_mirror_sftp_command}
+ )
+
+}
+
+# wait_some_time $minimum $diff
+# wait between minimum and minimum+diff seconds (diff defaults to 30)
+
+wait_some_time() {
+ local minimum=$1
+ local diff=$2
+ local random
+
+ if [ -z "${diff}" ]; then
+ diff=30
+ fi
+
+ random=$(
+ dd if='/dev/urandom' count=1 2> /dev/null | \
+ cksum | \
+ cut -d' ' -f1
+ )
+
+ sleep $((minimum + random % diff))
+}
+
+# str_to_regex $string
+# escape dots for use in regex
+
+str_to_regex() {
+ echo "$1" | \
+ sed '
+ s|[.[]|\\\0|g
+ '
+}
+
+# make_source_info $package $repository $git_revision $mod_git_revision $output
+# create .SRCINFO from PKGBUILD within git repositories, output to $output
+
+make_source_info() {
+
+ local package="$1"
+ local repository="$2"
+ local git_revision="$3"
+ local mod_git_revision="$4"
+ local output="$5"
+
+ local git_repo
+ local PKGBUILD
+ local PKGBUILD_mod
+
+ git_repo=$(find_repository_with_commit "${git_revision}")
+
+ if [ -z "${git_repo}" ]; then
+ return 1
+ fi
+
+ find_pkgbuilds "${package}" "${repository}" "${git_repo}" "${git_revision}" "${mod_git_revision}"
+
+ ( # the new shell is intentional
+
+ tmp_dir=$(mktemp -d "${work_dir}/tmp.make_source_info.XXXXXX")
+ trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT
+
+ extract_source_directory "${git_repo}" "${git_revision}" "${mod_git_revision}" "${tmp_dir}" '0'
+
+ {
+ cd "${tmp_dir}"
+ makepkg --printsrcinfo
+ cd ..
+ } |
+ if [ "${package%-i18n}-i18n" = "${package}" ]; then
+ sed '
+ 1 a \\tdepends = '"${package%-i18n}"'
+ '
+ else
+ cat
+ fi > \
+ "${output}"
+
+ )
+
+}
+
+# recursively_umount_and_rm $dir
+# umount all mountpoints in $dir which are also in $dir's
+# filesystem, possibly also $dir itself and then
+# rm -rf --one-file-system $dir
+
+recursively_umount_and_rm() {
+ local dir="$1"
+
+ if [ -z "${dir}" ]; then
+ >&2 echo 'ERROR: recursively_umount_and_rm requires an argument'
+ exit 42
+ fi
+
+ find "${dir}" \
+ -xdev -depth -type d \
+ -exec 'mountpoint' '-q' '{}' ';' \
+ -exec 'sudo' 'umount' '-l' '{}' ';'
+ rm -rf --one-file-system "${dir}"
+}
+
+# mangle_pkgbuild $PKGBUILD [$sub_pkgrel]
+# mangle $arch in PKBUILDs to contain i486, i586, i686
+# append $sub_pkgrel to the pkgrel
+
+mangle_pkgbuild() {
+ local PKGBUILD="$1"
+ local sub_pkgrel="$2"
+
+ if [ -n "${sub_pkgrel}" ]; then
+ sub_pkgrel=".${sub_pkgrel}"
+ fi
+
+ sed -i '
+ /^arch=[^#]*any/!{
+ /^arch=(/s/(/(i686 /
+ }
+ s/^\(\s*pkgrel=\)['"'"'"]\?\([0-9.]\+\)['"'"'"]\?\s*\(#.*\)\?$/\1"\2'"${sub_pkgrel}"'"/
+ ' "${PKGBUILD}"
+}
+
+# find_newest_of_git_revisions
+# find newest git revision of the ones provided at stdin
+# (assuming linear history)
+
+find_newest_of_git_revisions() {
+ local revisions
+ local repo
+ revisions=$(cat)
+
+ if [ "$(
+ echo "${revisions}" | \
+ wc -l
+ )" -eq 1 ]; then
+
+ echo "${revisions}"
+ return
+
+ fi
+
+ repo=$(
+ find_repository_with_commit \
+ "$(
+ echo "${revisions}" | \
+ grep -xm1 '[0-9a-f]\{40\}'
+ )"
+ )
+
+ eval 'repo="${repo_paths__'"${repo}"'}"'
+
+ echo "${revisions}" | \
+ xargs -rn1 git -C "${repo}" rev-parse | \
+ {
+ newest=''
+ while read -r current; do
+ if [ -z "${newest}" ] || \
+ git -C "${repo}" merge-base --is-ancestor "${newest}" "${current}"; then
+ newest="${current}"
+ fi
+ done
+ echo "${newest}"
+ }
+}
+
+# find_package_repository_to_package $package $git_repository $git_commit
+# find the package repository a package from a given git repository
+# belongs to
+
+find_package_repository_to_package() {
+
+ local package="$1"
+ local git_repository="$2"
+ local git_commit="$3"
+ local repo_path
+ local repo
+
+ eval 'repo_path="${repo_paths__'"${git_repository}"'}"'
+
+ if [ "${git_repository}" = 'archlinux32' ]; then
+ repo=$(
+ git -C "${repo_path}" archive "${git_commit}" -- | \
+ tar -t --wildcards "*/${package}/" | \
+ cut -d/ -f1 | \
+ sort -u
+ )
+ else
+ repo=$(
+ git -C "${repo_path}" archive "${git_commit}" -- "${package}/repos" 2> /dev/null | \
+ tar -t | \
+ cut -d/ -f3 | \
+ grep -vxF '' | \
+ grep -v 'staging\|testing\|-unstable' | \
+ grep -v -- '-i686$' | \
+ sed 's|-[^-]\+$||' | \
+ sort -u
+ )
+ fi
+
+ if [ -z "${repo}" ]; then
+ return 1
+ fi
+
+ if [ "$(
+ echo "${repo}" | \
+ wc -l
+ )" -ne 1 ]; then
+ return 1
+ fi
+
+ echo "${repo}"
+
+}
+
+# extract_source_directory $git_repo $rev $mod_rev $output $sub_pkgrel
+# extract files found in the svn/git source directories
+# $PKGBUILD and $PKGBUILD_mod are expected to be set correctly
+
+extract_source_directory() {
+
+ local git_repo="$1"
+ local rev="$2"
+ local mod_rev="$3"
+ local output="$4"
+ local sub_pkgrel="$5"
+
+ if [ -n "${PKGBUILD}" ]; then
+ eval 'git -C "${repo_paths__'"${git_repo}"'}" archive "${rev}" -- "${PKGBUILD%/*}"' | \
+ tar -x --strip-components=3 -C "${output}"
+ fi
+
+ if [ -n "${PKGBUILD_mod}" ]; then
+ git -C "${repo_paths__archlinux32}" archive "${mod_rev}" -- "${PKGBUILD_mod%/*}" | \
+ tar -x --overwrite --exclude 'PKGBUILD' --strip-components=2 -C "${output}" 2> /dev/null || \
+ true
+ git -C "${repo_paths__archlinux32}" archive "${mod_rev}" -- "${PKGBUILD_mod}" | \
+ tar -Ox "${PKGBUILD_mod}" >> \
+ "${output}/PKGBUILD"
+ fi
+
+ mangle_pkgbuild "${output}/PKGBUILD" "${sub_pkgrel}"
+
+ # shellcheck disable=SC2016
+ sed -i '/^\$Id\$$/d' "${output}/PKGBUILD"
+
+}
+
+# find_dependencies_on_build_list $package $git_revision $mod_git_revision $repository
+# return a list of packages on the build list which are (run- / build- / check-time)
+# dependencies of the given package
+
+find_dependencies_on_build_list() {
+
+ local package="$1"
+ local git_revision="$2"
+ local mod_git_revision="$3"
+ local repository="$4"
+
+ generate_package_metadata "${package}" "${git_revision}" "${mod_git_revision}" "${repository}"
+
+ {
+ cat "${work_dir}/package-infos/${package}.${git_revision}.${mod_git_revision}.${repository}.build-depends"
+ awk '{print $1 "." $2 "." $3 "." $4}' < \
+ "${work_dir}/build-list" | \
+ sed '
+ s|^|'"${work_dir}"'/package-infos/|
+ s|$|\.builds|
+ ' | \
+ xargs -r cat | \
+ sort -u
+ } | \
+ sort | \
+ uniq -d
+
+}
+
+# download_sources_by_hash $package $repository $git_revision $git_mod_revision
+# try to download all sources by their hash into the current directory
+# returns 0 if any source was downloaded and 1 otherwise
+
+download_sources_by_hash() {
+
+ local package="$1"
+ local repository="$2"
+ local git_revision="$3"
+ local git_mod_revision="$4"
+
+ local return_value=1
+ local tmp_dir
+ local sum_type
+ local arch_suffix
+
+ tmp_dir=$(mktemp -d 'tmp.common-functions.download_sources_by_hash.XXXXXXXXXX' --tmpdir)
+
+ if ! make_source_info "${package}" "${repository}" "${git_revision}" "${git_mod_revision}" "${tmp_dir}/.SRCINFO"; then
+ >&2 echo 'download_sources_by_hash: make_source_info failed.'
+ rm -rf --one-file-system "${tmp_dir}"
+ return 1
+ fi
+
+ if ! [ -s "${tmp_dir}/.SRCINFO" ]; then
+ >&2 echo 'download_sources_by_hash: ".SRCINFO" has not been created by make_source_info.'
+ rm -rf --one-file-system "${tmp_dir}"
+ return 1
+ fi
+
+ for arch_suffix in '' '_i686'; do
+ for sum_type in 'sha256sum' 'sha512sum'; do
+ grep '^\s*'"${sum_type}s${arch_suffix}"' = ' "${tmp_dir}/.SRCINFO" | \
+ sed 's|^.* = ||' | \
+ cat -n > \
+ "${tmp_dir}/sums"
+ grep '^\s*source'"${arch_suffix}"' = ' "${tmp_dir}/.SRCINFO" | \
+ sed '
+ s|^.* = ||
+ s|::.*$||
+ s|.*/||
+ ' | \
+ cat -n > \
+ "${tmp_dir}/urls"
+ if [ "$(wc -l < "${tmp_dir}/sums")" -eq "$(wc -l < "${tmp_dir}/urls")" ]; then
+ join -1 1 -2 1 -o 1.2,2.2 "${tmp_dir}/sums" "${tmp_dir}/urls" > \
+ "${tmp_dir}/joined"
+ while read -r sum file; do
+ if [ "${sum}" = 'SKIP' ]; then
+ continue
+ fi
+ if echo "${sum} ${file}" | \
+ ${sum_type} -c > /dev/null 2>&1; then
+ # the correct source is already there
+ continue
+ fi
+ if wget -O "${tmp_dir}/transfer" "${source_by_hash_mirror}${sum}"; then
+ mv "${tmp_dir}/transfer" "${file}"
+ return_value=0
+ fi
+ done < \
+ "${tmp_dir}/joined"
+ fi
+ done
+ done
+
+ rm -rf --one-file-system "${tmp_dir}"
+ return ${return_value}
+
+}
+
+# expand_version $column_num
+# add "0:" to version in $colum_num-th column if no ":" is there (epoch)
+# add "+0" to version in $colum_num-th column if no "+" is there (git count/hash)
+
+expand_version() {
+ local column_num
+ column_num="$1"
+
+ sed '
+ /^\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*+/! s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*\)-/\1+0-/
+ /^\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*:/! s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\)/\10:/
+ '
+}
+
+# shrink_version $column_num
+# remove "0:" from version in $colum_num-th column (epoch)
+# remove "+0" from version in $colum_num-th column (git count/hash)
+
+shrink_version() {
+ local column_num
+ column_num="$1"
+
+ sed '
+ s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*\)+0-/\1-/
+ s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\)0:/\1/
+ '
+}
+
+# find_biggest_subset_of_packages $omega $keep $all_builds $all_depends [ $force ]
+
+# Return (to stdout) the biggest subset A of the packages in $omega whose
+# runtime dependencies in $omega \cup $keep are also in A
+
+# $all_builds and $all_depends either point to an empty file - then they will get
+# filled with cached data for subsequent calls - or to the same files of a previous
+# call
+
+# If non-empty, $force contains packages which are assumed to match the above
+# condition without checking.
+
+# The arguments are names of files with one $package.$revision.$mod_revision.$repository
+# per line.
+
+find_biggest_subset_of_packages() {
+
+ ( # the new shell is intentional
+ omega="$1"
+ keep="$2"
+ all_builds="$3"
+ all_depends="$4"
+ if [ $# -eq 4 ]; then
+ force='/dev/null'
+ elif [ $# -eq 5 ]; then
+ force="$5"
+ else
+ >&2 printf 'find_biggest_subset_of_packages: Wrong number of arguments: %s given, 4 or 5 expected.' "$#"
+ return 2
+ fi
+
+ if [ ! -s "${all_builds}" ]; then
+ find "${work_dir}/package-infos/" -maxdepth 1 -name '*.builds' \
+ -exec sed '
+ s|^|{} |
+ s|^\S\+/||
+ s|\.builds | |
+ ' {} \; | \
+ sort -k2,2 > \
+ "${all_builds}"
+ fi
+
+ if [ ! -s "${all_depends}" ]; then
+ find "${work_dir}/package-infos/" -maxdepth 1 -name '*.run-depends' \
+ -exec sed '
+ s|^|{} |
+ s|^\S\+/||
+ s|\.run-depends | |
+ ' {} \; | \
+ grep -v ' base$' | \
+ sort -k2,2 > \
+ "${all_depends}"
+ fi
+
+ sort -u "${omega}" | \
+ sponge "${omega}"
+
+ temp_dir=$(mktemp -d 'tmp.common-functions.find_biggest_subset_of_packages.XXXXXXXXXX' --tmpdir)
+ trap 'rm -rf --one-file-system "${temp_dir}"' EXIT
+
+ {
+ sort -u "${keep}"
+ cat "${force}" "${force}"
+ } | \
+ sort | \
+ uniq -u > \
+ "${temp_dir}/keep.new"
+ touch "${temp_dir}/keep"
+
+ while [ -s "${temp_dir}/keep.new" ]; do
+ cat "${temp_dir}/keep.new" "${temp_dir}/keep" | \
+ sort -u | \
+ sponge "${temp_dir}/keep"
+
+ {
+ # we append all packages which are run-dependencies of keep-packages
+ # to the keep-list
+ sed '
+ s|^|'"${work_dir}"'/package-infos/|
+ s|$|.run-depends|
+ ' "${temp_dir}/keep" | \
+ xargs -r grep -HF '' | \
+ sed '
+ s|^.*/||
+ s|\.run-depends:| |
+ ' | \
+ sort -u | \
+ sort -k2,2 | \
+ uniq -f1 | \
+ join -1 2 -2 2 -o 2.1 - "${all_builds}"
+
+ # we append all packages with run-dependencies on the keep-list
+ # to the keep-list
+ sed '
+ s|^|'"${work_dir}"'/package-infos/|
+ s|$|.builds|
+ ' "${temp_dir}/keep" | \
+ xargs -r grep -HF '' | \
+ sed '
+ s|^.*/||
+ s|\.builds:| |
+ ' | \
+ sort -u | \
+ sort -k2,2 | \
+ uniq -f1 | \
+ join -1 2 -2 2 -o 2.1 - "${all_depends}"
+ } | \
+ sort -u | \
+ join -1 1 -2 1 -o 2.1 - "${omega}" | \
+ sort -u > \
+ "${temp_dir}/keep.new"
+
+ # "new" is only what has not been there before and what is not forced
+ cat "${temp_dir}/keep" "${temp_dir}/keep" "${force}" "${force}" "${temp_dir}/keep.new" | \
+ sort | \
+ uniq -u | \
+ sponge "${temp_dir}/keep.new"
+ done
+
+ cat "${omega}" "${temp_dir}/keep" "${temp_dir}/keep" | \
+ sort | \
+ uniq -u
+
+ )
+
+}
+
+# sort_square_bracket_content $file
+# sort the content of [] in $file, print to stdout
+
+sort_square_bracket_content() {
+ local file
+ local line
+ local token
+ local token_list
+ local rest
+ file="$1"
+
+ while read -r line; do
+ printf '%s ' "${line}" | \
+ tr ' ' '\n' | \
+ while read -r token; do
+ if echo "${token}" | \
+ grep -qF '['; then
+ printf '%s[' "${token%[*}"
+ token="${token##*[}"
+ token_list="${token%,}"
+ while ! echo "${token_list}" | \
+ grep -qF ']'; do
+ read -r token
+ token_list=$(
+ printf '%s\n' \
+ "${token_list}" \
+ "${token%,}"
+ )
+ done
+ rest="]${token_list#*]}"
+ token_list="${token_list%%]*}"
+ token=$(
+ printf '%s' "${token_list}" | \
+ sort | \
+ sed 's|$|,|'
+ printf '%s' "${rest}"
+ )
+ fi
+ printf '%s\n' "${token}"
+ done | \
+ tr '\n' ' ' | \
+ sed '
+ s|, ]|]|g
+ s| $||
+ '
+ printf '\n'
+ done < \
+ "${file}"
+}
+
+# smoothen_namcap_log $file
+# remove unneccesary differences from namcap-logs:
+# - remove architecture specific information
+# - sort lines
+# - sort content of square brackets
+
+smoothen_namcap_log() {
+ local file
+ file="$1"
+ # shellcheck disable=SC2016
+ sort_square_bracket_content "${file}" | \
+ sed '
+ # normalize architecture specific information
+ s|i[34567]86|$ARCH|g
+ s|x86\([-_]64\)\?|$ARCH|g
+ # remove haskell hashes
+ s|\('"'"'[^'"'"']*-[0-9.]\+\)-[a-zA-Z0-9]\{1,22\}\(-ghc[^'"'"']*'"'"'\)|\1\2|g
+ ' | \
+ sort | \
+ sponge "${file}"
+}
+
+# print_list_of_archaic_packages $source1 $source2 ...
+# print a list of packages which have not been touched for a while,
+# but which are still in the pipeline, e.g. in $source1, $source2 or ...
+
+print_list_of_archaic_packages() {
+ for source in "$@"; do
+ case "${source}" in
+ 'testing')
+ # packages remaining longer than $max_package_age_testing days in testing will be marked tested if no bug for them exists on FS32
+ find "${work_dir}/package-states" -mindepth 1 -maxdepth 1 -name '*.testing' -mtime "+${max_package_age_testing}" \
+ -exec head -n1 {} \; | \
+ "${base_dir}/bin/modify-package-state" -n --tested /dev/stdin
+ # packages remaining longer than $max_package_age_broken_testing days in testing (w/o being tested!) will be considered outdated
+ # and no longer block other packages from being moved
+ find "${work_dir}/package-states" -mindepth 1 -maxdepth 1 -name '*.testing' -mtime "+${max_package_age_broken_testing}" -printf '%f\n' | \
+ sed '
+ s|\.testing$||
+ '
+ ;;
+ 'build-list')
+ while read -r pkg rev mod_rev repo; do
+ git_repo=$(
+ find_repository_with_commit "${rev}"
+ )
+ eval repo_path='"${repo_paths__'"${git_repo}"'}"'
+ commit_date=$(
+ git -C "${repo_path}" show -s --format=%ct "${rev}"
+ )
+ mod_commit_date=$(
+ git -C "${repo_paths__archlinux32}" show -s --format=%ct "${mod_rev}"
+ )
+ if [ "${mod_commit_date}" -gt "${commit_date}" ]; then
+ commit_date="${mod_commit_date}"
+ fi
+ # packages remaining longer than $max_package_age_build_list days on the build list
+ if [ "$((commit_date + 24*60*60*max_package_age_build_list))" -lt "$(date '+%s')" ]; then
+ printf '%s %s %s %s\n' \
+ "${pkg}" \
+ "${rev}" \
+ "${mod_rev}" \
+ "${repo}"
+ fi
+ done < \
+ "${work_dir}/build-list"
+ ;;
+ 'staging')
+ # packages remaining longer than $max_package_age_staging days in staging
+ find "${work_dir}/package-states" -mindepth 1 -maxdepth 1 -name '*.done' -mtime "+${max_package_age_staging}" -printf '%f\n' | \
+ sed '
+ s|\.done$||
+ '
+ ;;
+ *)
+ >&2 printf 'unknown archaic-source "%s" - skipped.\n' "${source}"
+ ;;
+ esac
+ done | \
+ sort -u
+}
+
+# modification_revision_link "${mod_rev}" "${repo}" "${pkg}"
+# print the given modification revision possibly with a html link to github
+
+modification_revision_link() {
+ local mod_rev="$1"
+ local repo="$2"
+ local pkg="$3"
+
+ if git -C "${repo_paths__archlinux32}" archive "${mod_rev}" -- "${repo}/${pkg}/PKGBUILD" > /dev/null 2>&1; then
+ printf '<a href="https://github.com/archlinux32/packages/tree/%s/%s/%s">%s</a>\n' \
+ "${mod_rev}" \
+ "${repo}" \
+ "${pkg}" \
+ "${mod_rev}"
+ else
+ printf '%s\n' \
+ "${mod_rev}"
+ fi
+}
+
+# trigger_mirror_refreshs
+# trigger a refresh of capable tier 1 mirrors (as backup for master mirror)
+
+trigger_mirror_refreshs() {
+ local tmp_file
+
+ tmp_file=$(mktemp "tmp.common-functions.trigger_mirror_refreshs.XXXXXXXXXX" --tmpdir)
+ date '+%s' > \
+ "${tmp_file}"
+ ${master_mirror_rsync_command} \
+ "${tmp_file}" \
+ "${master_mirror_rsync_directory}/lastupdate"
+ rm "${tmp_file}"
+ for trigger_url in ${mirror_refresh_trigger_urls}; do
+ screen -S trigger-mirror-update -d -m curl -L "${trigger_url}"
+ done
+}
+
+# extract_pkgname_epoch_pkgver_pkgrel_sub_pkgrel_arch_from_package_name
+extract_pkgname_epoch_pkgver_pkgrel_sub_pkgrel_arch_from_package_name() {
+ pkgname="$1"
+ pkgname="${pkgname%.pkg.tar.xz}"
+ arch="${pkgname##*-}"
+ pkgname="${pkgname%-*}"
+ sub_pkgrel="${pkgname##*-}"
+ pkgname="${pkgname%-*}"
+ pkgrel="${sub_pkgrel%.*}"
+ if [ "${pkgrel}" = "${sub_pkgrel}" ]; then
+ sub_pkgrel='0'
+ else
+ sub_pkgrel="${sub_pkgrel##*.}"
+ fi
+ epoch="${pkgname##*-}"
+ pkgname="${pkgname%-*}"
+ pkgver="${epoch#*:}"
+ if [ "${pkgver}" = "${epoch}" ]; then
+ epoch='0'
+ else
+ epoch="${epoch%%:*}"
+ fi
+}