Send patches - preferably formatted by git format-patch - to patches at archlinux32 dot org.
summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rwxr-xr-xlib/common-functions925
-rwxr-xr-xlib/mysql-functions1122
2 files changed, 2047 insertions, 0 deletions
diff --git a/lib/common-functions b/lib/common-functions
new file mode 100755
index 0000000..8425e9a
--- /dev/null
+++ b/lib/common-functions
@@ -0,0 +1,925 @@
+#!/bin/sh
+
+# contains functions used by more than one script
+
+# shellcheck disable=SC2039
+
+if [ -z "${base_dir}" ]; then
+ # just to make shellcheck happy
+ . 'conf/default.conf'
+fi
+
+if [ ! -s "${work_dir}/build-master-sanity" ]; then
+ {
+ date
+ printf 'sourcing common-functions for %s\n' "$0"
+ printf '%s parameters:' "$#"
+ printf ' "%s"' "$@"
+ printf '\n'
+ } >> \
+ "${work_dir}/command-log"
+fi
+
+# find_pkgbuilds package repository git_repository git_revision mod_git_revision
+# find the PKGBUILD and modification of $package from $repository
+# sets $PKGBUILD and $PKGBUILD_mod
+
+find_pkgbuilds() {
+
+ local package="$1"
+ local repository="$2"
+ local git_repository="$3"
+ local git_revision="$4"
+ local mod_git_revision="$5"
+
+ local repo_path
+ eval 'repo_path="${repo_paths__'"${git_repository}"'}"'
+
+ PKGBUILD=$(
+ git -C "${repo_path}" archive "${git_revision}" -- "${package}/repos/" 2> /dev/null | \
+ tar -t 2> /dev/null | \
+ grep "$(printf '^%s-.*/PKGBUILD' "$(str_to_regex "${package}/repos/${repository}")")" | \
+ grep -v -- '-i686/PKGBUILD$' | \
+ grep -v -- '[-/]\(staging\|testing\|unstable\)-[^/]\+/PKGBUILD$' | \
+ sort | \
+ tail -n1
+ )
+
+ PKGBUILD_mod=$(
+ git -C "${repo_paths__archlinux32}" archive "${mod_git_revision}" 2> /dev/null | \
+ tar -t "${repository}/${package}/PKGBUILD" 2> /dev/null
+ ) || true
+
+ if [ -z "${PKGBUILD}" ] && \
+ [ -z "${PKGBUILD_mod}" ]; then
+ >&2 printf 'Neither PKGBUILD nor modification of PKGBUILD found for package "%s" from %s (%s), revisions %s and %s.\n' \
+ "${package}" \
+ "${repository}" \
+ "${git_repository}" \
+ "${git_revision}" \
+ "${mod_git_revision}"
+ return 1
+ fi
+
+}
+
+# find_repository_with_commit commit
+# find the repository which has $commit
+
+find_repository_with_commit() {
+
+ local repository
+
+ for repository in ${repo_names}; do
+ # shellcheck disable=SC2016
+ if [ "$(eval git -C "$(printf '"${repo_paths__%s}"' "${repository}")" cat-file -t '"$1"' 2> /dev/null)" = "commit" ]; then
+ echo "${repository}"
+ return 0
+ fi
+ done
+ >&2 printf 'find_repository_with_commit: Cannot find repository with commit "%s"\n' "$1"
+ exit 1
+
+}
+
+# find_git_repository_to_package_repository repository
+# find the git repository which tracks the package repository $repository
+
+find_git_repository_to_package_repository() {
+
+ local repository
+
+ repository=$(
+ # shellcheck disable=SC2016
+ {
+ printf 'SELECT `git_repositories`.`name` FROM `git_repositories`'
+ mysql_join_git_repositories_upstream_repositories
+ printf ' WHERE `upstream_repositories`.`name`=from_base64("%s");\n' \
+ "$(printf '%s' "$1" | base64 -w0)"
+ } | \
+ mysql_run_query
+ )
+ if [ -z "${repository}" ]; then
+ >&2 echo "can't find git repository with package repository '$1'"
+ exit 1
+ else
+ echo "${repository}"
+ return 0
+ fi
+
+}
+
+# repository_of_package $package.$repo_revision.$mod_repo_revision.$repository
+# print which (stable) repository a package belongs to
+
+repository_of_package() {
+ local package="$1"
+ local repository="${package##*.}"
+ package="${package%.*}"
+ local a32_rev="${package##*.}"
+ package="${package%.*.*}"
+
+ case "${repository}" in
+ 'multilib')
+ if git -C "${repo_paths__archlinux32}" archive --format=tar "${a32_rev}" -- 'extra-from-multilib' | \
+ tar -Ox | \
+ grep -qFx "${package%.*.*.*}"; then
+ echo 'extra'
+ else
+ echo 'community'
+ fi
+ ;;
+ *)
+ echo "${repository}"
+ esac
+}
+
+# official_or_community $package.$repo_revision.$mod_repo_revision.$repository $ending
+# print wether the specified package is an official package (print
+# $ending) or a community package (print 'community-$ending') or a
+# build-support package (print 'build-support')
+
+official_or_community() {
+ local prepo
+ prepo=$(repository_of_package "$1")
+
+ if [ "${prepo}" = 'community' ]; then
+ echo 'community-'"$2"
+ elif [ "${prepo}" = 'build-support' ]; then
+ echo 'build-support'
+ else
+ echo "$2"
+ fi
+}
+
+# ls_master_mirror $path
+# list content of $path on the master mirror (via rsync)
+
+ls_master_mirror() {
+
+ local path="$1"
+
+ ${master_mirror_rsync_command} \
+ "${master_mirror_rsync_directory}/${path}/" | \
+ grep -v '\s\.$' | \
+ awk '{print $5}'
+
+}
+
+# TODO: the actions of remove_old_package_versions should be done
+# on basis of the information in the database
+
+# remove_old_package_versions $arch $repository $package_file
+
+# removes all older (not-newer) versions of $package_file
+# in all repositories not-older (newer) than $repository
+
+# TODO: should remove all other version (also newer) from
+# some repositories :-/
+
+# A package is considered not newer if
+# a) its version is not newer
+# A package is considered older if
+# b) its version is older or
+# c) if it's "not newer" and its architecture is 'any' and different or
+# d) if it's "not newer" and the other architecture is 'any' and different
+
+# this ensures an any package may replace arch-specific packages of the same version and vice versa
+
+remove_old_package_versions() {
+
+ local arch="$1"
+ local repository="$2"
+ local package="$3"
+
+ local pkgname
+ local epoch
+ local pkgver
+ local pkgrel
+ local sub_pkgrel
+ pkgname="${package%-*}"
+ pkgrel="${pkgname##*-}"
+ sub_pkgrel="${pkgrel##*.}"
+ if [ "${sub_pkgrel}" = "${pkgrel}" ]; then
+ sub_pkgrel='0'
+ else
+ pkgrel="${pkgrel%.*}"
+ fi
+ pkgname="${pkgname%-*}"
+ pkgver="${pkgname##*-}"
+ epoch="${pkgver%%:*}"
+ if [ "${epoch}" = "${pkgver}" ]; then
+ epoch='0'
+ else
+ pkgver="${pkgver#*:}"
+ fi
+ pkgname="${pkgname%-*}"
+
+ # shellcheck disable=SC2016
+ {
+ printf 'SELECT "bogus",CONCAT(from_base64("%s"),"/",from_base64("%s")),1,from_base64("%s");\n' \
+ "$(
+ printf '%s' "${repository}" | \
+ base64 -w0
+ )" \
+ "$(
+ printf '%s' "${package}" | \
+ base64 -w0
+ )" \
+ "$(
+ printf '%s' "${package}" | \
+ sed '
+ s/^.*-\([^-]\+-[^-]\+\)-[^-]\+$/\1/
+ ' | \
+ base64 -w0
+ )"
+ printf 'SELECT '
+ printf '`binary_packages`.`id`,'
+ printf 'CONCAT(`repositories`.`name`,"/",'
+ mysql_package_name_query
+ printf '),'
+ # should we delete packages of identical version?
+ printf 'IF((`more_stable_repos`.`id`!=`repositories`.`id`) AND (`more_stable_repos`.`stability`=`repositories`.`stability`),2,0),'
+ printf 'CONCAT('
+ printf 'IF(`binary_packages`.`epoch`=0,"",CONCAT(`binary_packages`.`epoch`,":")),'
+ printf '`binary_packages`.`pkgver`,"-",'
+ printf '`binary_packages`.`pkgrel`,".",'
+ printf '`binary_packages`.`sub_pkgrel`'
+ printf ')'
+ printf ' FROM `binary_packages`'
+ mysql_join_binary_packages_repositories
+ mysql_join_binary_packages_architectures
+ printf ' JOIN `repository_stability_relations` ON `repository_stability_relations`.`less_stable`=`repositories`.`stability`'
+ printf ' JOIN `repositories` AS `more_stable_repos` ON `repository_stability_relations`.`more_stable`=`more_stable_repos`.`stability`'
+ # name must match
+ printf ' WHERE `binary_packages`.`pkgname`=from_base64("%s")' \
+ "$(printf '%s' "${package%-*-*-*}" | base64 -w0)"
+ # repository, where package should be deleted, should be less stable
+ printf ' AND `more_stable_repos`.`name`=from_base64("%s")' \
+ "$(printf '%s' "${repository}" | base64 -w0)"
+ printf ';\n'
+ } | \
+ mysql_run_query | \
+ tr '\t' ' ' | \
+ expand_version 4 | \
+ sort -k4V,4 -k3r,3 | \
+ shrink_version 4 | \
+ sed -n '
+ /^bogus /q
+ p
+ ' | \
+ cut -d' ' -f1,2 >&2
+
+ # repositories in which older packages should be deleted
+ local delete_older_repositories
+ # repositories in which not-newer packages should be deleted
+ local delete_not_newer_repositories
+
+ if echo "${standalone_package_repositories}" | \
+ grep -qxF "${repository}"; then
+
+ delete_older_repositories="${repository}"
+ delete_not_newer_repositories=''
+
+ elif echo "${staging_package_repositories}" | \
+ grep -qxF "${repository}"; then
+
+ delete_older_repositories="${repository}"
+ delete_not_newer_repositories=$(
+ echo "${staging_package_repositories}" | \
+ grep -vxF "${repository}"
+ ) || true
+
+ elif echo "${testing_package_repositories}" | \
+ grep -qxF "${repository}"; then
+
+ delete_older_repositories=$(
+ printf '%s\n' "${staging_package_repositories}" "${repository}"
+ )
+ delete_not_newer_repositories=$(
+ echo "${testing_package_repositories}" | \
+ grep -vxF "${repository}"
+ ) || true
+
+ elif echo "${stable_package_repositories}" | \
+ grep -qxF "${repository}"; then
+
+ delete_older_repositories=$(
+ printf '%s\n' "${staging_package_repositories}" "${testing_package_repositories}" "${repository}"
+ )
+ delete_not_newer_repositories=$(
+ echo "${stable_package_repositories}" | \
+ grep -vxF "${repository}"
+ ) || true
+
+ else
+
+ >&2 printf 'remove_old_package_versions: Unknown repository "%s".\n' "${repository}"
+ return 1
+
+ fi
+
+ ( # the new shell is intentional
+ tmp_dir=$(mktemp -d 'tmp.common-functions.remove_old_package_versions.XXXXXXXXXX' --tmpdir)
+ trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT
+
+ {
+ # the architecture of the package (any vs. i686)
+ package_arch="${package##*-}"
+ package_arch="${package_arch%%.*}"
+ if [ "${package_arch}" = 'any' ]; then
+ package_arch_regex_inverter='!'
+ else
+ unset package_arch_regex_inverter
+ fi
+
+ for repo in ${delete_older_repositories}; do
+ ls_master_mirror "${arch}/${repo}" | \
+ sed -n '
+ /\.pkg\.tar\.xz$/!d
+ s|-\([^-]\+-[^-]\+\)-\([^-]\+\)$| \1 \2|
+ /^'"$(str_to_regex "${package%-*-*-*}")"' / {
+ s|^|2 '"${arch} ${repo}"' |
+ / any\.pkg\.tar\.xz$/'"${package_arch_regex_inverter}"'{
+ s|^2|0|
+ }
+ p
+ }
+ '
+ done
+ for repo in ${delete_not_newer_repositories}; do
+ ls_master_mirror "${arch}/${repo}" | \
+ sed -n '
+ /\.pkg\.tar\.xz$/!d
+ s|-\([^-]\+-[^-]\+\)-\([^-]\+\)$| \1 \2|
+ /^'"$(str_to_regex "${package%-*-*-*}")"' / {
+ s|^|0 '"${arch} ${repo}"' |
+ p
+ }
+ '
+ done
+ echo "${package%-*}" | \
+ sed 's|^.*-\([^-]\+-[^-]\+\)$|1 %cut% %it% %here% \1|'
+
+ # the generated list contains the following columns:
+ # $delete-if-newer-vs-not-older $arch-directory $repo-directory $pkgname $pkgver-$pkgrel $pkg-arch.pkg.tar.xz
+ } | \
+ expand_version 5 | \
+ sort -k5V,5 -k1n,1 | \
+ shrink_version 5 | \
+ sed -n '
+ /^1 %cut% %it% %here% /q
+ s/^[02] //
+ s/ \(\S\+\)$/-\1/
+ p
+ ' | \
+ sort -u > \
+ "${tmp_dir}/packages-to-delete"
+ # this file contains a list of packages to be deleted, one on each line:
+ # $architecture-directory $repository-directory $package-name $pkgver-$pkgrel-$package-architecture.pkg.tar.xz
+
+ cut -d' ' -f1,2 < \
+ "${tmp_dir}/packages-to-delete" | \
+ grep -vxF "${arch} ${repository}" | \
+ sort -u > \
+ "${tmp_dir}/repositories-to-modify"
+
+ # fetch all databases being modified
+ while read -r del_arch del_repo; do
+ mkdir -p "${tmp_dir}/${del_arch}/${del_repo}"
+ ${master_mirror_rsync_command} \
+ "${master_mirror_rsync_directory}/${del_arch}/${del_repo}/${del_repo}.db."* \
+ "${master_mirror_rsync_directory}/${del_arch}/${del_repo}/${del_repo}.files."* \
+ "${tmp_dir}/${del_arch}/${del_repo}/"
+ done < \
+ "${tmp_dir}/repositories-to-modify"
+
+ while read -r del_arch del_repo del_package _; do
+ if [ "${del_arch}/${del_repo}" = "${arch}/${repository}" ]; then
+ # we do not repo-remove the package in the target repository
+ continue
+ fi
+ repo-remove -q "${tmp_dir}/${del_arch}/${del_repo}/${del_repo}.db.tar.gz" \
+ "${del_package}"
+ done < \
+ "${tmp_dir}/packages-to-delete"
+
+ # upload modified databases
+ while read -r del_arch del_repo; do
+ ${master_mirror_rsync_command} \
+ "${tmp_dir}/${del_arch}/${del_repo}/${del_repo}.db."* \
+ "${tmp_dir}/${del_arch}/${del_repo}/${del_repo}.files."* \
+ "${master_mirror_rsync_directory}/${del_arch}/${del_repo}/"
+ done < \
+ "${tmp_dir}/repositories-to-modify"
+
+ # shellcheck disable=SC2016
+ sed '
+ s/\.pkg\.tar\.xz$//
+ s/^\S\+ //
+ s/-\([^-. ]\+\)\(-[^- ]\+\)$/-\1.0\2/
+ s/ \([^-: ]\+\(-[^- ]\+\)\{2\}\)$/ 0:\1/
+ s/ \([^-.]\+\):\([^-:]\+\)-\([^-.]\+\)\.\([^-.]\+\)-\([^-]\+\)$/ \1 \2 \3 \4 \5/
+ ' "${tmp_dir}/packages-to-delete" | \
+ while read -r repo pkgname epoch pkgver pkgrel sub_pkgrel arch; do
+ printf 'DELETE FROM `binary_packages` WHERE'
+ printf ' `binary_packages`.`%s`=(SELECT `%s`.`id` FROM `%s` WHERE `%s`.`name`=from_base64("%s")) AND' \
+ 'architecture' 'architectures' 'architectures' 'architectures' "$(printf '%s' "${arch}" | base64 -w0)" \
+ 'repository' 'repositories' 'repositories' 'repositories' "$(printf '%s' "${repo}" | base64 -w0)"
+ printf ' `binary_packages`.`%s`=from_base64("%s") AND' \
+ 'pkgname' "$(printf '%s' "${pkgname}" | base64 -w0)" \
+ 'epoch' "$(printf '%s' "${epoch}" | base64 -w0)" \
+ 'pkgver' "$(printf '%s' "${pkgver}" | base64 -w0)" \
+ 'pkgrel' "$(printf '%s' "${pkgrel}" | base64 -w0)" \
+ 'sub_pkgrel' "$(printf '%s' "${sub_pkgrel}" | base64 -w0)" | \
+ sed 's/ AND$//'
+ printf ';\n'
+ done | \
+ mysql_run_query
+
+ sed '
+ s| \(\S\+\)$|-\1|
+ y| |/|
+ s|^|rm "|
+ s|$|"|
+ p
+ s|"$|.sig"|
+ ' "${tmp_dir}/packages-to-delete" | \
+ ${master_mirror_sftp_command}
+ )
+
+}
+
+# wait_some_time $minimum $diff
+# wait between minimum and minimum+diff seconds (diff defaults to 30)
+
+wait_some_time() {
+ local minimum=$1
+ local diff=$2
+ local random
+
+ if [ -z "${diff}" ]; then
+ diff=30
+ fi
+
+ random=$(
+ dd if='/dev/urandom' count=1 2> /dev/null | \
+ cksum | \
+ cut -d' ' -f1
+ )
+
+ sleep $((minimum + random % diff))
+}
+
+# str_to_regex $string
+# escape dots for use in regex
+
+str_to_regex() {
+ echo "$1" | \
+ sed '
+ s|[.[]|\\\0|g
+ '
+}
+
+# make_source_info $package $repository $git_revision $mod_git_revision $output
+# create .SRCINFO from PKGBUILD within git repositories, output to $output
+
+make_source_info() {
+
+ local package="$1"
+ local repository="$2"
+ local git_revision="$3"
+ local mod_git_revision="$4"
+ local output="$5"
+
+ local git_repo
+ local PKGBUILD
+ local PKGBUILD_mod
+
+ git_repo=$(find_repository_with_commit "${git_revision}")
+
+ if [ -z "${git_repo}" ]; then
+ return 1
+ fi
+
+ find_pkgbuilds "${package}" "${repository}" "${git_repo}" "${git_revision}" "${mod_git_revision}"
+
+ ( # the new shell is intentional
+
+ tmp_dir=$(mktemp -d "${work_dir}/tmp.make_source_info.XXXXXX")
+ trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT
+
+ extract_source_directory "${git_repo}" "${git_revision}" "${mod_git_revision}" "${tmp_dir}" '0'
+
+ {
+ cd "${tmp_dir}"
+ makepkg --printsrcinfo
+ cd ..
+ } |
+ if [ "${package%-i18n}-i18n" = "${package}" ]; then
+ sed '
+ 1 a \\tdepends = '"${package%-i18n}"'
+ '
+ else
+ cat
+ fi > \
+ "${output}"
+
+ )
+
+}
+
+# recursively_umount_and_rm $dir
+# umount all mountpoints in $dir which are also in $dir's
+# filesystem, possibly also $dir itself and then
+# rm -rf --one-file-system $dir
+
+recursively_umount_and_rm() {
+ local dir="$1"
+
+ if [ -z "${dir}" ]; then
+ >&2 echo 'ERROR: recursively_umount_and_rm requires an argument'
+ exit 42
+ fi
+
+ find "${dir}" \
+ -xdev -depth -type d \
+ -exec 'mountpoint' '-q' '{}' ';' \
+ -exec 'sudo' 'umount' '-l' '{}' ';'
+ rm -rf --one-file-system "${dir}"
+}
+
+# mangle_pkgbuild $PKGBUILD [$sub_pkgrel]
+# mangle $arch in PKBUILDs to contain i486, i586, i686
+# append $sub_pkgrel to the pkgrel
+
+mangle_pkgbuild() {
+ local PKGBUILD="$1"
+ local sub_pkgrel="$2"
+
+ if [ -n "${sub_pkgrel}" ]; then
+ sub_pkgrel=".${sub_pkgrel}"
+ fi
+
+ sed -i '
+ /^arch=[^#]*any/!{
+ /^arch=(/s/(/(i686 /
+ }
+ s/^\(\s*pkgrel=\)['"'"'"]\?\([0-9.]\+\)['"'"'"]\?\s*\(#.*\)\?$/\1"\2'"${sub_pkgrel}"'"/
+ ' "${PKGBUILD}"
+}
+
+# find_newest_of_git_revisions
+# find newest git revision of the ones provided at stdin
+# (assuming linear history)
+
+find_newest_of_git_revisions() {
+ local revisions
+ local repo
+ revisions=$(cat)
+
+ if [ "$(
+ echo "${revisions}" | \
+ wc -l
+ )" -eq 1 ]; then
+
+ echo "${revisions}"
+ return
+
+ fi
+
+ repo=$(
+ find_repository_with_commit \
+ "$(
+ echo "${revisions}" | \
+ grep -xm1 '[0-9a-f]\{40\}'
+ )"
+ )
+
+ eval 'repo="${repo_paths__'"${repo}"'}"'
+
+ echo "${revisions}" | \
+ xargs -rn1 git -C "${repo}" rev-parse | \
+ {
+ newest=''
+ while read -r current; do
+ if [ -z "${newest}" ] || \
+ git -C "${repo}" merge-base --is-ancestor "${newest}" "${current}"; then
+ newest="${current}"
+ fi
+ done
+ echo "${newest}"
+ }
+}
+
+# find_package_repository_to_package $package $git_repository $git_commit
+# find the package repository a package from a given git repository
+# belongs to
+
+find_package_repository_to_package() {
+
+ local package="$1"
+ local git_repository="$2"
+ local git_commit="$3"
+ local repo_path
+ local repo
+
+ eval 'repo_path="${repo_paths__'"${git_repository}"'}"'
+
+ if [ "${git_repository}" = 'archlinux32' ]; then
+ repo=$(
+ git -C "${repo_path}" archive "${git_commit}" -- | \
+ tar -t --wildcards "*/${package}/" | \
+ cut -d/ -f1 | \
+ sort -u
+ )
+ else
+ repo=$(
+ git -C "${repo_path}" archive "${git_commit}" -- "${package}/repos" 2> /dev/null | \
+ tar -t | \
+ cut -d/ -f3 | \
+ grep -vxF '' | \
+ grep -v 'staging\|testing\|-unstable' | \
+ grep -v -- '-i686$' | \
+ sed 's|-[^-]\+$||' | \
+ sort -u
+ )
+ fi
+
+ if [ -z "${repo}" ]; then
+ return 1
+ fi
+
+ if [ "$(
+ echo "${repo}" | \
+ wc -l
+ )" -ne 1 ]; then
+ return 1
+ fi
+
+ echo "${repo}"
+
+}
+
+# extract_source_directory $git_repo $rev $mod_rev $output $sub_pkgrel
+# extract files found in the svn/git source directories
+# $PKGBUILD and $PKGBUILD_mod are expected to be set correctly
+
+extract_source_directory() {
+
+ local git_repo="$1"
+ # shellcheck disable=SC2034
+ local rev="$2"
+ local mod_rev="$3"
+ local output="$4"
+ local sub_pkgrel="$5"
+
+ if [ -n "${PKGBUILD}" ]; then
+ eval 'git -C "${repo_paths__'"${git_repo}"'}" archive "${rev}" -- "${PKGBUILD%/*}"' | \
+ tar -x --strip-components=3 -C "${output}"
+ fi
+
+ if [ -n "${PKGBUILD_mod}" ]; then
+ git -C "${repo_paths__archlinux32}" archive "${mod_rev}" -- "${PKGBUILD_mod%/*}" | \
+ tar -x --overwrite --exclude 'PKGBUILD' --strip-components=2 -C "${output}" 2> /dev/null || \
+ true
+ git -C "${repo_paths__archlinux32}" archive "${mod_rev}" -- "${PKGBUILD_mod}" | \
+ tar -Ox "${PKGBUILD_mod}" >> \
+ "${output}/PKGBUILD"
+ fi
+
+ # we do not want to update pkgver, so we just undefine it
+ printf 'unset -f pkgver\n' >> \
+ "${output}/PKGBUILD"
+
+ mangle_pkgbuild "${output}/PKGBUILD" "${sub_pkgrel}"
+
+ # shellcheck disable=SC2016
+ sed -i '/^\$Id\$$/d' "${output}/PKGBUILD"
+
+ # we don't want write permissions on the PKGBUILD - otherwise pkgver()
+ # will change the version! (**HACK**)
+ chmod -w "${output}/PKGBUILD"
+
+}
+
+# download_sources_by_hash $package $repository $git_revision $git_mod_revision
+# try to download all sources by their hash into the current directory
+# returns 0 if any source was downloaded and 1 otherwise
+
+download_sources_by_hash() {
+
+ local package="$1"
+ local repository="$2"
+ local git_revision="$3"
+ local git_mod_revision="$4"
+
+ local return_value=1
+ local tmp_dir
+ local sum_type
+ local arch_suffix
+
+ tmp_dir=$(mktemp -d 'tmp.common-functions.download_sources_by_hash.XXXXXXXXXX' --tmpdir)
+
+ if ! make_source_info "${package}" "${repository}" "${git_revision}" "${git_mod_revision}" "${tmp_dir}/.SRCINFO"; then
+ >&2 echo 'download_sources_by_hash: make_source_info failed.'
+ rm -rf --one-file-system "${tmp_dir}"
+ return 1
+ fi
+
+ if ! [ -s "${tmp_dir}/.SRCINFO" ]; then
+ >&2 echo 'download_sources_by_hash: ".SRCINFO" has not been created by make_source_info.'
+ rm -rf --one-file-system "${tmp_dir}"
+ return 1
+ fi
+
+ for arch_suffix in '' '_i686'; do
+ for sum_type in 'sha256sum' 'sha512sum'; do
+ grep '^\s*'"${sum_type}s${arch_suffix}"' = ' "${tmp_dir}/.SRCINFO" | \
+ sed 's|^.* = ||' | \
+ cat -n > \
+ "${tmp_dir}/sums"
+ grep '^\s*source'"${arch_suffix}"' = ' "${tmp_dir}/.SRCINFO" | \
+ sed '
+ s|^.* = ||
+ s|::.*$||
+ s|.*/||
+ ' | \
+ cat -n > \
+ "${tmp_dir}/urls"
+ if [ "$(wc -l < "${tmp_dir}/sums")" -eq "$(wc -l < "${tmp_dir}/urls")" ]; then
+ join -1 1 -2 1 -o 1.2,2.2 "${tmp_dir}/sums" "${tmp_dir}/urls" > \
+ "${tmp_dir}/joined"
+ while read -r sum file; do
+ if [ "${sum}" = 'SKIP' ]; then
+ continue
+ fi
+ if echo "${sum} ${file}" | \
+ ${sum_type} -c > /dev/null 2>&1; then
+ # the correct source is already there
+ continue
+ fi
+ if wget -O "${tmp_dir}/transfer" "${source_by_hash_mirror}${sum}"; then
+ mv "${tmp_dir}/transfer" "${file}"
+ return_value=0
+ fi
+ done < \
+ "${tmp_dir}/joined"
+ fi
+ done
+ done
+
+ rm -rf --one-file-system "${tmp_dir}"
+ return ${return_value}
+
+}
+
+# expand_version $column_num
+# add "0:" to version in $colum_num-th column if no ":" is there (epoch)
+# add "+0" to version in $colum_num-th column if no "+" is there (git count/hash)
+
+expand_version() {
+ local column_num
+ column_num="$1"
+
+ sed '
+ /^\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*+/! s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*\)-/\1+0-/
+ /^\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*:/! s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\)/\10:/
+ '
+}
+
+# shrink_version $column_num
+# remove "0:" from version in $colum_num-th column (epoch)
+# remove "+0" from version in $colum_num-th column (git count/hash)
+
+shrink_version() {
+ local column_num
+ column_num="$1"
+
+ sed '
+ s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*\)+0-/\1-/
+ s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\)0:/\1/
+ '
+}
+
+# sort_square_bracket_content $file
+# sort the content of [] in $file, print to stdout
+
+sort_square_bracket_content() {
+ local file
+ local line
+ local token
+ local token_list
+ local rest
+ file="$1"
+
+ while read -r line; do
+ printf '%s ' "${line}" | \
+ tr ' ' '\n' | \
+ while read -r token; do
+ if echo "${token}" | \
+ grep -qF '['; then
+ printf '%s[' "${token%[*}"
+ token="${token##*[}"
+ token_list="${token%,}"
+ while ! echo "${token_list}" | \
+ grep -qF ']'; do
+ read -r token
+ token_list=$(
+ printf '%s\n' \
+ "${token_list}" \
+ "${token%,}"
+ )
+ done
+ rest="]${token_list#*]}"
+ token_list="${token_list%%]*}"
+ token=$(
+ printf '%s' "${token_list}" | \
+ sort | \
+ sed 's|$|,|'
+ printf '%s' "${rest}"
+ )
+ fi
+ printf '%s\n' "${token}"
+ done | \
+ tr '\n' ' ' | \
+ sed '
+ s|, ]|]|g
+ s| $||
+ '
+ printf '\n'
+ done < \
+ "${file}"
+}
+
+# smoothen_namcap_log $file
+# remove unneccesary differences from namcap-logs:
+# - remove architecture specific information
+# - sort lines
+# - sort content of square brackets
+
+smoothen_namcap_log() {
+ local file
+ file="$1"
+ # shellcheck disable=SC2016
+ sort_square_bracket_content "${file}" | \
+ sed '
+ # normalize architecture specific information
+ s|i[34567]86|$ARCH|g
+ s|x86\([-_]64\)\?|$ARCH|g
+ # remove haskell hashes
+ s|\('"'"'[^'"'"']*-[0-9.]\+\)-[a-zA-Z0-9]\{1,22\}\(-ghc[^'"'"']*'"'"'\)|\1\2|g
+ ' | \
+ sort | \
+ sponge "${file}"
+}
+
+# trigger_mirror_refreshs
+# trigger a refresh of capable tier 1 mirrors (as backup for master mirror)
+
+trigger_mirror_refreshs() {
+ local tmp_file
+
+ tmp_file=$(mktemp "tmp.common-functions.trigger_mirror_refreshs.XXXXXXXXXX" --tmpdir)
+ date '+%s' > \
+ "${tmp_file}"
+ ${master_mirror_rsync_command} \
+ "${tmp_file}" \
+ "${master_mirror_rsync_directory}/lastupdate"
+ rm "${tmp_file}"
+ for trigger_url in ${mirror_refresh_trigger_urls}; do
+ screen -S trigger-mirror-update -d -m curl -L "${trigger_url}"
+ done
+}
+
+# extract_pkgname_epoch_pkgver_pkgrel_sub_pkgrel_arch_from_package_name
+extract_pkgname_epoch_pkgver_pkgrel_sub_pkgrel_arch_from_package_name() {
+ pkgname="$1"
+ pkgname="${pkgname%.pkg.tar.xz}"
+ arch="${pkgname##*-}"
+ pkgname="${pkgname%-*}"
+ sub_pkgrel="${pkgname##*-}"
+ pkgname="${pkgname%-*}"
+ pkgrel="${sub_pkgrel%.*}"
+ if [ "${pkgrel}" = "${sub_pkgrel}" ]; then
+ sub_pkgrel='0'
+ else
+ sub_pkgrel="${sub_pkgrel##*.}"
+ fi
+ epoch="${pkgname##*-}"
+ pkgname="${pkgname%-*}"
+ pkgver="${epoch#*:}"
+ if [ "${pkgver}" = "${epoch}" ]; then
+ epoch='0'
+ else
+ epoch="${epoch%%:*}"
+ fi
+}
+
+# irc_say
+# say content of stdin in irc channel
+irc_say() {
+ if [ -p "${irc_dir}/#archlinux32/in" ]; then
+ sponge "${irc_dir}/#archlinux32/in"
+ fi
+}
diff --git a/lib/mysql-functions b/lib/mysql-functions
new file mode 100755
index 0000000..801eca2
--- /dev/null
+++ b/lib/mysql-functions
@@ -0,0 +1,1122 @@
+#!/bin/sh
+
+# contains functions used to access mysql db
+
+# shellcheck disable=SC2016,SC2039
+
+if [ -z "${base_dir}" ]; then
+ # just to make shellcheck happy
+ . 'conf/default.conf'
+fi
+
+# base64_encode_each encode each line of stdin with base64
+
+base64_encode_each() {
+ local line
+
+ while read -r line; do
+ printf '%s' \
+ "${line}" | \
+ base64 -w0
+ printf '\n'
+ done
+}
+
+# mysql_run_query
+# wrapper function to query mysql
+mysql_run_query() {
+ local query_file
+ if [ -s "${work_dir}/build-master-sanity" ]; then
+ # If the build master is insane, the calling command should only check
+ # if the build master is still insane - we do not want to log that.
+ ${mysql_command} -N --raw --batch "$@"
+ else
+ # we save the query in a file and delete that file if the query succeeded
+ query_file=$(mktemp "${work_dir}/tmp.mysql-functions.query.$(date +'%Y-%m-%dT%T').XXXXXX")
+ cat > "${query_file}"
+ for _ in {1..10}; do
+ {
+ printf '%s %s: ' "$0" "${mysql_command} -N --raw --batch $*"
+ date
+ } \
+ | tee -a "${work_dir}/mysql.stdin" \
+ | tee -a "${work_dir}/mysql.stdout" \
+ >> "${work_dir}/mysql.stderr"
+ cat "${query_file}" \
+ >> "${work_dir}/mysql.stdin"
+ {
+ ${mysql_command} -N --raw --batch "$@" \
+ < "${query_file}" \
+ 2>> "${work_dir}/mysql.stderr" \
+ && rm "${query_file}"
+ } \
+ | tee -a "${work_dir}/mysql.stdout"
+ if ! [ -f "${query_file}" ]; then
+ # success!
+ for s in \
+ "${work_dir}/mysql.stdin" \
+ "${work_dir}/mysql.stdout" \
+ "${work_dir}/mysql.stderr"; do
+ {
+ tail -n 10000 "$s"
+ printf '%s %s done: ' "$0" "${mysql_command} $*"
+ date
+ } \
+ | sponge "$s"
+ done
+ break
+ fi
+ for s in \
+ "${work_dir}/mysql.stdin" \
+ "${work_dir}/mysql.stdout" \
+ "${work_dir}/mysql.stderr"; do
+ {
+ printf '%s %s FAILED: ' "$0" "${mysql_command} $*"
+ date
+ } \
+ >> "$s"
+ done
+ done
+ # a present query_file means there was an error
+ if [ -f "${query_file}" ]; then
+ >&2 printf 'I could not complete a mysql query!\n'
+ if [ ! -s "${work_dir}/build-master-sanity" ]; then
+ printf '\001ACTION failed to execute a mysql query - can you have a look at "%s"?.\001\n' \
+ "${query_file##*/}" \
+ | irc_say
+ fi
+ echo 'A mysql query failed.' > \
+ "${work_dir}/build-master-sanity"
+ return 2
+ fi
+ fi
+}
+
+# mysql_add_package_source $pkgbase $git_revision $mod_git_revision $upstream_package_repository
+
+# shellcheck disable=SC2086
+mysql_add_package_source() {
+ local names='pkgbase git_revision mod_git_revision upstream_package_repository'
+ local values
+ local uses_upstream
+ local uses_modification
+ local repo
+
+ if git -C "${repo_paths__archlinux32}" archive "$3" -- "$4/$1" >/dev/null 2>&1; then
+ uses_modification=1
+ else
+ uses_modification=0
+ fi
+ uses_upstream=0
+ for repo in ${repo_names}; do
+ if [ "${repo}" = 'archlinux32' ]; then
+ continue
+ fi
+ if eval 'git -C "${repo_paths__'"${repo}"'}" archive "$2" -- "$1/repos/$4-*/PKGBUILD" 2>/dev/null' | \
+ tar -t 2>/dev/null | \
+ sed 's,-x86_64/,-any/,' | \
+ grep -qFx "$1/repos/$4-any/PKGBUILD"; then
+ uses_upstream=1
+ fi
+ done
+
+ for _ in ${names}; do
+ values="${values}$(
+ printf '%s' "$1" | \
+ base64 -w0
+ ) "
+ shift
+ done
+ values="${values% }"
+
+ {
+ printf 'INSERT IGNORE INTO package_sources'
+ printf ' ('
+ printf '`%s`, ' ${names}
+ printf '`uses_upstream`,`uses_modification`'
+ printf ') SELECT'
+ printf ' from_base64("%s"), ' ${values% *}
+ printf ' `upstream_repositories`.`id`,%s,%s' \
+ ${uses_upstream} ${uses_modification}
+ printf ' FROM `upstream_repositories`'
+ printf ' WHERE `upstream_repositories`.`name` = from_base64("%s");' \
+ "${values##* }"
+ } | \
+ mysql_run_query
+}
+
+# mysql_show_binary_package $pkgname $pkgver $pkgrel $sub_pkgrel
+
+# shellcheck disable=SC2031,SC2086,SC2154
+mysql_show_binary_package() {
+ local names='pkgname pkgver pkgrel sub_pkgrel'
+ local name
+ for name in ${names}; do
+ eval 'local '"${name}"
+ eval "${name}"'=$(
+ printf "%s" "$1" |
+ base64 -w0
+ )'
+ shift
+ done
+
+ {
+ printf 'SELECT'
+ printf ' `%s`.`%s`,' \
+ 'repositories' 'name' \
+ 'binary_packages' 'pkgname' \
+ 'package_sources' 'pkgver' \
+ 'package_sources' 'pkgrel' \
+ 'binary_packages' 'sub_pkgrel' \
+ 'architectures' 'name' \
+ 'package_sources' 'pkgbase' \
+ 'package_sources' 'git_revision' \
+ 'package_sources' 'mod_git_revision' \
+ 'upstream_repositories' 'name'
+ printf ' FROM `binary_packages`'
+ mysql_join_binary_packages_architectures
+ mysql_join_binary_packages_repositories
+ mysql_join_binary_packages_build_assignments
+ mysql_join_build_assignments_package_sources
+ mysql_join_package_sources_upstream_repositories
+ printf ' WHERE'
+ printf ' `%s`.`%s` = from_base64("%s") AND' \
+ 'binary_packages' 'pkgname' "${pkgname}" \
+ 'binary_packages' 'sub_pkgrel' "${sub_pkgrel}" \
+ 'package_sources' 'pkgver' "${pkgver}" \
+ 'package_sources' 'pkgrel' "${pkgrel}"
+ printf ';'
+ } | \
+ sed '
+ s|, FROM| FROM|g
+ s|AND;|;|g
+ ' | \
+ mysql_run_query --html --column-names
+}
+
+# mysql_generate_package_metadata $current_repository $package $git_revision $mod_git_revision $repository
+# or
+# mysql_generate_package_metadata $current_repository $package.$git_revision.$mod_git_revision.$repository
+# if sub_pkgrel should be determined automatically
+# and
+# mysql_generate_package_metadata $sub_pkgrel $current_repository $package $git_revision $mod_git_revision $repository
+# or
+# mysql_generate_package_metadata $sub_pkgrel $current_repository $package.$git_revision.$mod_git_revision.$repository
+# if $sub_pkgrel should be forced
+
+# generate the meta data of a package (dependencies, built packages, ...) in the database
+
+mysql_generate_package_metadata() {
+
+ ( # new shell is intentional
+ case "$1" in
+ ''|*[!0-9]*)
+ unset forced_sub_pkgrel
+ ;;
+ *)
+ forced_sub_pkgrel=$(
+ printf '%s' "$1" | \
+ base64 -w0
+ )
+ shift
+ ;;
+ esac
+ current_repository="$1"
+ package="$2"
+
+ if [ $# -eq 2 ]; then
+ # second form
+ repository="${package##*.}"
+ package="${package%.*}"
+ mod_git_revision="${package##*.}"
+ package="${package%.*}"
+ git_revision="${package##*.}"
+ package="${package%.*}"
+ else
+ git_revision="$3"
+ mod_git_revision="$4"
+ repository="$5"
+ fi
+
+ temp_dir=$(mktemp -d 'tmp.mysql-functions.mysql_generate_package_metadata.XXXXXXXXXX' --tmpdir)
+ trap 'rm -rf --one-file-system "${temp_dir}"' EXIT
+
+ printf '.' >&2
+ if ! make_source_info "${package}" "${repository}" "${git_revision}" "${mod_git_revision}" "${temp_dir}/SRCINFO"; then
+ printf '"make_source_info %s %s %s %s %s" failed.\n' "${package}" "${repository}" "${git_revision}" "${mod_git_revision}" "${temp_dir}/SRCINFO"
+ exit 2
+ fi
+ # remove empty lines and unsupported architectures
+ sed -i '
+ /^[^=]*=\s*$/d
+ /^\s*arch = /{
+ / \(i686\|any\)$/!d
+ }
+ ' "${temp_dir}/SRCINFO"
+
+ if [ ! -s "${temp_dir}/SRCINFO" ]; then
+ >&2 printf '"make_source_info" had empty output - eh, what?\n'
+ exit 2
+ fi
+ printf '\n\n' >> "${temp_dir}/SRCINFO"
+
+ printf '.' >&2
+ pkgbase=$(
+ grep '^pkgbase = ' "${temp_dir}/SRCINFO" | \
+ cut -d' ' -f3
+ )
+ if [ -z "${pkgbase}" ]; then
+ >&2 printf '"make_source_info" did not return a "pkgbase" - eh, what?\n'
+ exit 2
+ fi
+
+ # add the package source
+ mysql_add_package_source "${pkgbase}" "${git_revision}" "${mod_git_revision}" "${repository}"
+ printf '.' >&2
+
+ # now we encode everything in base64
+ current_repository=$(
+ printf '%s' "${current_repository}" | \
+ base64 -w0
+ )
+ pkgbase=$(
+ printf '%s' "${pkgbase}" | \
+ base64 -w0
+ )
+ git_revision=$(
+ printf '%s' "${git_revision}" | \
+ base64 -w0
+ )
+ mod_git_revision=$(
+ printf '%s' "${mod_git_revision}" | \
+ base64 -w0
+ )
+ repository=$(
+ printf '%s' "${repository}" | \
+ base64 -w0
+ )
+
+ # add the build assignment(s)
+ {
+ archs=$(
+ sed -n '
+ s/^\tarch = //
+ T
+ p
+ ' "${temp_dir}/SRCINFO" | \
+ grep -vxF 'any' | \
+ sort -u
+ )
+ if [ -z "${archs}" ]; then
+ echo 'any'
+ else
+ printf '%s\n' "${archs}"
+ fi
+ } | \
+ while read -r arch; do
+ printf 'INSERT IGNORE INTO `build_assignments` (`package_source`,`architecture`,`is_blocked`,`is_broken`,`priority`)'
+ printf ' SELECT `package_sources`.`id`,`architectures`.`id`,NULL,0,0'
+ printf ' FROM `architectures` JOIN `package_sources`'
+ printf ' WHERE `architectures`.`name` = from_base64("%s")' \
+ "$(
+ printf '%s' "${arch}" | \
+ base64 -w0
+ )"
+ printf ' AND `package_sources`.`%s` = from_base64("%s")' \
+ 'pkgbase' "${pkgbase}" \
+ 'git_revision' "${git_revision}" \
+ 'mod_git_revision' "${mod_git_revision}"
+ printf ';\n'
+ done > \
+ "${temp_dir}/add-build-assignments-command"
+
+ # TODO: correctly link between binary_packages and build_assignments using any_arch
+
+ # shellcheck disable=SC2034
+ # select any specific arch (which will be building the 'any' part of a split package)
+ any_arch=$(
+ {
+ sed -n '
+ s/^\tarch = //
+ T
+ p
+ ' "${temp_dir}/SRCINFO" | \
+ sort -r | \
+ grep -vxFm 1 'any' || \
+ echo 'any'
+ } | \
+ base64_encode_each
+ )
+
+ grep '^pkgname = ' "${temp_dir}/SRCINFO" | \
+ cut -d' ' -f3 | \
+ while read -r pkgname; do
+ pkgname64=$(
+ printf '%s' "${pkgname}" | \
+ base64 -w0
+ )
+ sed -n '
+ /^pkgbase = \|^pkgname = '"$(str_to_regex "${pkgname}")"'$/{
+ :a
+ N
+ /\n$/{
+ p
+ T
+ }
+ ba
+ }
+ ' "${temp_dir}/SRCINFO" | \
+ sed '
+ /^\S/d
+ s/^\s*//
+ ' > \
+ "${temp_dir}/BINARYINFO.${pkgname64}"
+
+ grep '^arch = ' "${temp_dir}/BINARYINFO.${pkgname64}" | \
+ cut -d' ' -f3 | \
+ while read -r arch; do
+ arch64=$(
+ printf '%s' "${arch}" | \
+ base64 -w0
+ )
+ sed '
+ s/^\(\S\+\)_'"${arch}"' = /\1 = /
+ ' "${temp_dir}/BINARYINFO.${pkgname64}" > \
+ "${temp_dir}/ARCHINFO ${pkgname64} ${arch64}"
+ done
+ done
+ find "${temp_dir}" -mindepth 1 -maxdepth 1 -name 'ARCHINFO * *' -printf '%f\n' | \
+ while read -r _ pkgname arch; do
+ pkgver=$(
+ grep '^pkgver = ' "${temp_dir}/ARCHINFO ${pkgname} ${arch}" | \
+ cut -d' ' -f3 | \
+ base64_encode_each
+ )
+ pkgrel=$(
+ grep '^pkgrel = ' "${temp_dir}/ARCHINFO ${pkgname} ${arch}" | \
+ cut -d' ' -f3 | \
+ sed 's/\.[0-9]\+$//' | \
+ base64_encode_each
+ )
+ epoch=$(
+ {
+ grep '^epoch = ' "${temp_dir}/ARCHINFO ${pkgname} ${arch}" || \
+ echo 'epoch = 0'
+ } | \
+ cut -d' ' -f3 | \
+ base64_encode_each
+ )
+ provides=$(
+ grep '^\(groups\|provides\) = ' "${temp_dir}/ARCHINFO ${pkgname} ${arch}" | \
+ cut -d' ' -f3 | \
+ sed 's/[<>=].*$//' | \
+ base64_encode_each
+ )
+ makedepends=$(
+ grep '^makedepends = ' "${temp_dir}/ARCHINFO ${pkgname} ${arch}" | \
+ cut -d' ' -f3 | \
+ sed 's/[<>=].*$//' | \
+ base64_encode_each
+ )
+ checkdepends=$(
+ grep '^checkdepends = ' "${temp_dir}/ARCHINFO ${pkgname} ${arch}" | \
+ cut -d' ' -f3 | \
+ sed 's/[<>=].*$//' | \
+ base64_encode_each
+ )
+ rundepends=$(
+ grep '^depends = ' "${temp_dir}/ARCHINFO ${pkgname} ${arch}" | \
+ cut -d' ' -f3 | \
+ sed 's/[<>=].*$//' | \
+ base64_encode_each
+ )
+ if [ -n "${forced_sub_pkgrel}" ]; then
+ sub_pkgrel='from_base64("'"${forced_sub_pkgrel}"'")'
+ else
+ sub_pkgrel=$(
+ printf '(SELECT COALESCE('
+ # do not add binary packages which are currently on the
+ # build-list or in $current_repository (beware of split
+ # packages!)
+ printf '(SELECT `sub_pkgrel` FROM `binary_packages`'
+ mysql_join_binary_packages_architectures
+ mysql_join_binary_packages_repositories
+ printf ' WHERE'
+ printf ' `binary_packages`.`%s`=from_base64("%s") AND' \
+ 'epoch' "${epoch}" \
+ 'pkgver' "${pkgver}" \
+ 'pkgrel' "${pkgrel}" \
+ 'pkgname' "${pkgname}"
+ printf ' `architectures`.`name`=from_base64("%s")' \
+ "${arch}"
+ printf ' AND `repositories`.`name` IN ("build-list",from_base64("%s"))),' \
+ "${current_repository}"
+ # max(sub_pkgrel)+1
+ printf '(SELECT 1+MAX(`binary_packages`.`sub_pkgrel`) FROM `binary_packages`'
+ mysql_join_binary_packages_architectures
+ printf ' WHERE'
+ printf ' `binary_packages`.`%s`=from_base64("%s") AND' \
+ 'epoch' "${epoch}" \
+ 'pkgver' "${pkgver}" \
+ 'pkgrel' "${pkgrel}" \
+ 'pkgname' "${pkgname}"
+ if printf '%s' "${arch}" | base64 -d | grep -qxF 'any'; then
+ # 'any' gets higher sub_pkgrel than any architecture
+ printf ' 1'
+ else
+ # not-'any' gets higher sub_pkgrel than same or 'any' architecture
+ printf ' (`architectures`.`name`=from_base64("%s") OR `architectures`.`name`="any")' \
+ "${arch}"
+ fi
+ printf ')'
+ printf ',0))'
+ )
+ fi
+ {
+ printf 'INSERT IGNORE INTO `binary_packages` ('
+ printf '`%s`,' \
+ 'build_assignment' \
+ 'repository' \
+ 'architecture' \
+ 'epoch' \
+ 'pkgver' \
+ 'pkgrel' \
+ 'pkgname' \
+ 'sub_pkgrel' \
+ 'has_issues' \
+ 'is_tested' \
+ 'is_to_be_deleted'
+ printf ') SELECT '
+ printf '`%s`.`id`,' \
+ 'build_assignments' \
+ 'repositories' \
+ 'architectures'
+ printf 'from_base64("%s"),' \
+ "${epoch}" \
+ "${pkgver}" \
+ "${pkgrel}" \
+ "${pkgname}"
+ printf '%s,0,0,0 FROM' \
+ "${sub_pkgrel}"
+ printf ' `%s` JOIN' \
+ 'repositories' \
+ 'architectures' \
+ 'build_assignments'
+ mysql_join_build_assignments_package_sources
+ mysql_join_package_sources_upstream_repositories
+ printf ' WHERE'
+ printf ' `%s`.`%s` = from_base64("%s") AND' \
+ 'repositories' 'name' "${current_repository}" \
+ 'architectures' 'name' "${arch}" \
+ 'package_sources' 'pkgbase' "${pkgbase}" \
+ 'package_sources' 'git_revision' "${git_revision}" \
+ 'package_sources' 'mod_git_revision' "${mod_git_revision}" \
+ 'upstream_repositories' 'name' "${repository}"
+ printf ';\n'
+ } | \
+ sed '
+ s|,)|)|g
+ s| JOIN JOIN | JOIN |
+ s| AND;$|;|
+ ' >> \
+ "${temp_dir}/add-binary-packages-command"
+ {
+ printf 'CREATE TEMPORARY TABLE `%s` (`name` VARCHAR(64));\n' \
+ 'provides' \
+ 'makedepends' \
+ 'checkdepends' \
+ 'rundepends'
+
+ printf 'INSERT INTO `provides` VALUES\n'
+ echo "${provides}" | \
+ sort -u | \
+ grep -vxF '' | \
+ sed '
+ s|^| (from_base64("|
+ s|$|")),|
+ '
+ printf ' (CONCAT(from_base64("%s"),"-",from_base64("%s"),":",from_base64("%s"),"-",from_base64("%s"))),\n' \
+ "${pkgname}" \
+ "${epoch}" \
+ "${pkgver}" \
+ "${pkgrel}"
+ printf ' (CONCAT(from_base64("%s"),"-",from_base64("%s"),":",from_base64("%s"))),\n' \
+ "${pkgname}" \
+ "${epoch}" \
+ "${pkgver}"
+ printf ' (from_base64("%s"));\n' \
+ "${pkgname}"
+
+ printf 'INSERT INTO `rundepends` VALUES\n'
+ echo "${rundepends}" | \
+ sort -u | \
+ grep -vxF '' | \
+ sed '
+ s|^| (from_base64("|
+ s|$|")),|
+ '
+ if printf '%s' "${pkgname}" | \
+ base64 -d | \
+ grep -q -- '-doc$'; then
+ printf ' (CONCAT(from_base64("%s"),"-",from_base64("%s"),":",from_base64("%s"))),\n' \
+ "$(printf '%s' "${pkgname}" | base64 -d | sed 's/-doc$//' | base64 -w0)" \
+ "${epoch}" \
+ "${pkgver}"
+ fi
+ printf ' ("base");\n'
+
+ echo "${checkdepends}" | \
+ sort -u | \
+ grep -vxF '' | \
+ sed '
+ s|^| (from_base64("|
+ s|$|")),|
+ 1 s/^/INSERT INTO `checkdepends` VALUES \n/
+ $ s/,$/;/
+ '
+
+ printf 'INSERT INTO `makedepends` VALUES\n'
+ echo "${makedepends}" | \
+ sort -u | \
+ grep -vxF '' | \
+ sed '
+ s|^| (from_base64("|
+ s|$|")),|
+ '
+ printf ' ("base-devel");\n'
+
+ printf 'INSERT IGNORE INTO `install_targets` (`name`)'
+ printf ' SELECT (`name`) FROM `%s` UNION' \
+ 'provides' \
+ 'makedepends' \
+ 'checkdepends' \
+ 'rundepends' | \
+ sed 's| UNION$|;\n|'
+
+ for link in 'provides' 'makedepends' 'checkdepends' 'rundepends'; do
+ case "${link}" in
+ 'provides')
+ printf 'INSERT IGNORE INTO `install_target_providers` (`package`,`install_target`) SELECT'
+ printf ' `binary_packages`.`id`,`install_targets`.`id` FROM'
+ ;;
+ 'makedepends'|'checkdepends'|'rundepends')
+ printf 'INSERT IGNORE INTO `dependencies` (`dependent`,`depending_on`,`dependency_type`) SELECT'
+ printf ' `binary_packages`.`id`,`install_targets`.`id`,`dependency_types`.`id` FROM'
+ printf ' `dependency_types` JOIN'
+ ;;
+ esac
+ printf ' `binary_packages`'
+ mysql_join_binary_packages_architectures
+ printf ' JOIN `install_targets`'
+ printf ' JOIN `%s`' "${link}"
+ printf ' ON `%s`.`name` = `install_targets`.`name`' "${link}"
+ printf ' WHERE'
+ if [ "${link}" = 'makedepends' ] || \
+ [ "${link}" = 'checkdepends' ] || \
+ [ "${link}" = 'rundepends' ]; then
+ printf ' `dependency_types`.`name` = "%s" AND' \
+ "${link%depends}"
+ fi
+ printf ' `binary_packages`.`%s` = from_base64("%s") AND' \
+ 'epoch' "${epoch}" \
+ 'pkgver' "${pkgver}" \
+ 'pkgrel' "${pkgrel}" \
+ 'pkgname' "${pkgname}"
+ # we do not want to match the sub_pkgrel:
+ # a) it is tedious to do so (because it may be calculated
+ # dynamically)
+ # b) it is not necessary to do so: if only the sub_pkgrel
+ # changed, the dependencies and provided install_targets
+ # should not have changed
+ printf ' `architectures`.`name` = from_base64("%s");\n' \
+ "${arch}"
+ # the repository is of no relevance: it hardly matters for
+ # the dependencies
+ done
+
+ printf 'DROP TABLE `%s`;\n' \
+ 'provides' \
+ 'makedepends' \
+ 'checkdepends' \
+ 'rundepends'
+ } >> \
+ "${temp_dir}/add-install-targets-command"
+ done
+ printf '.' >&2
+
+ {
+ if [ -s "${temp_dir}/add-build-assignments-command" ]; then
+ cat "${temp_dir}/add-build-assignments-command"
+ fi
+ if [ -s "${temp_dir}/add-binary-packages-command" ]; then
+ cat "${temp_dir}/add-binary-packages-command"
+ fi
+ if [ -s "${temp_dir}/add-install-targets-command" ]; then
+ cat "${temp_dir}/add-install-targets-command"
+ fi
+ } | \
+ mysql_run_query
+ printf '.' >&2
+
+ )
+}
+
+# mysql_sanity_check
+# do a sanity check on the mysql database
+
+mysql_sanity_check() {
+ {
+ printf 'SELECT CONCAT("\\"any\\" build-assignment building \\"",`bp_arch`.`name`,"\\" binary package: ",`binary_packages`.`pkgname`)'
+ printf ' FROM `binary_packages`'
+ mysql_join_binary_packages_build_assignments
+ mysql_join_binary_packages_architectures '' 'bp_arch'
+ mysql_join_build_assignments_architectures '' 'ba_arch'
+ printf ' WHERE `bp_arch`.`name`!="any"'
+ printf ' AND `ba_arch`.`name`="any";\n'
+ printf 'SELECT DISTINCT CONCAT("package multiple times on build list: ",`a`.`pkgname`)'
+ printf ' FROM `binary_packages` AS `a`'
+ printf ' JOIN `binary_packages` AS `b`'
+ printf ' ON `a`.`pkgname`=`b`.`pkgname`'
+ printf ' AND `a`.`repository`=`b`.`repository`'
+ printf ' AND `a`.`id`!=`b`.`id`'
+ mysql_join_binary_packages_repositories 'a'
+ printf ' WHERE `repositories`.`name`="build-list";\n'
+ printf 'SELECT DISTINCT CONCAT("\\"split-package with differing sub_pkgrels on the build-list: ",`a`.`pkgname`)'
+ printf ' FROM `binary_packages` AS `a`'
+ printf ' JOIN `binary_packages` AS `b` ON `a`.`build_assignment`=`b`.`build_assignment`'
+ mysql_join_binary_packages_repositories 'a' 'arep'
+ mysql_join_binary_packages_repositories 'b' 'brep'
+ printf ' WHERE `a`.`sub_pkgrel`!=`b`.`sub_pkgrel`'
+ printf ' AND `%srep`.`name`="build-list"' \
+ 'a' 'b'
+ printf ';\n'
+ } | \
+ mysql_run_query | \
+ sed '
+ s,^,<font color="#FF0000">,
+ s,$,</font>,
+ '
+ ( # new shell is intentional
+ temp_dir=$(mktemp -d 'tmp.mysql-functions.mysql_sanity_check.XXXXXXXXXX' --tmpdir)
+ trap 'rm -rf --one-file-system "${temp_dir}"' EXIT
+
+ for dir in $(ls_master_mirror 'i686'); do
+ ls_master_mirror "i686/${dir}" | \
+ sed -n '
+ s/\.pkg\.tar\.xz$//
+ T
+ s/-\([0-9]\+\)-\([^-]\+\)$/-\1.0-\2/
+ s/-\([^-:]\+-[^-]\+-[^-]\+\)$/-0:\1/
+ s|^|'"${dir}"'/|
+ p
+ '
+ done | \
+ sort > \
+ "${temp_dir}/master-mirror-listing"
+
+ {
+ printf 'SELECT `repositories`.`name`,`pkgname`,`epoch`,`pkgver`,`pkgrel`,`sub_pkgrel`,`architectures`.`name`'
+ printf ' FROM `binary_packages`'
+ mysql_join_binary_packages_architectures
+ mysql_join_binary_packages_repositories
+ printf ' WHERE `repositories`.`is_on_master_mirror`'
+ } | \
+ mysql_run_query | \
+ sed '
+ s,\t,/,
+ s,\t,-,
+ s,\t,:,
+ s,\t,-,
+ s,\t,.,
+ s,\t,-,
+ ' | \
+ sort > \
+ "${temp_dir}/mysql-packages"
+
+ diff -u \
+ "${temp_dir}/master-mirror-listing" \
+ "${temp_dir}/mysql-packages"
+ )
+}
+
+mysql_find_build_assignment_loops() {
+ new_loops=$(
+ {
+ printf 'SELECT DISTINCT `packages_dependency`.`build_assignment`,`packages_dependent`.`build_assignment`'
+ printf ' FROM `dependencies`'
+ mysql_join_dependencies_install_target_providers
+ mysql_join_install_target_providers_binary_packages '' 'packages_dependency'
+ mysql_join_dependencies_binary_packages '' 'packages_dependent'
+ mysql_join_binary_packages_repositories 'packages_dependency' 'repositories_dependency'
+ mysql_join_binary_packages_repositories 'packages_dependent' 'repositories_dependent'
+ printf ' WHERE `repositories_dependent`.`name`="build-list" AND `repositories_dependency`.`name`="build-list"'
+ } | \
+ mysql_run_query | \
+ tr '\t' ' ' | \
+ tsort 2>&1 >/dev/null | \
+ sed 's/^tsort:\s*//' | \
+ {
+ loop=0
+ while read -r id; do
+ if [ "x${id}" = 'x-: input contains a loop:' ]; then
+ loop=$((loop+1))
+ continue
+ fi
+ if ! printf '%s' "${id}" | tr '\n' ' ' | grep -q '^[0-9]\+$'; then
+ >&2 printf 'ERROR: non-numeric id "%s"\n' "${id}"
+ continue
+ fi
+ printf '(%s,%s),' "${loop}" "${id}"
+ done | \
+ sed 's/,$//'
+ }
+ )
+ {
+ printf 'DELETE FROM `build_dependency_loops`;\n'
+ if [ -n "${new_loops}" ]; then
+ printf 'INSERT INTO `build_dependency_loops` (`loop`,`build_assignment`) VALUES %s;\n' \
+ "${new_loops}"
+ fi
+ } | \
+ mysql_run_query
+}
+
+# mysql_cleanup [dry]
+# clean up left overs from mysql database
+mysql_cleanup() {
+ local operator
+ if [ "$#" = '0' ]; then
+ operator='DELETE'
+ elif [ "$#" = '1' ] && [ "x$1" = 'xdry' ]; then
+ operator='SELECT COUNT(*)'
+ else
+ >&2 echo 'Unknown parameter'
+ >&2 echo 'Call "mysql_clean_up" or "mysql_clean_up dry".'
+ exit 2
+ fi
+ {
+ # remove to-be-decided binary_packages
+ printf '%s ' \
+ "${operator}"
+ if [ "${operator}" = 'DELETE' ]; then
+ printf '`binary_packages` '
+ fi
+ printf 'FROM `binary_packages`'
+ mysql_join_binary_packages_repositories
+ printf ' WHERE `repositories`.`name`="to-be-decided";\n'
+ # remove dependencies w/o binary_package or install_target
+ printf '%s FROM `dependencies` ' \
+ "${operator}"
+ printf 'WHERE NOT EXISTS '
+ printf '('
+ printf 'SELECT * FROM `binary_packages` '
+ printf 'WHERE `dependencies`.`dependent`=`binary_packages`.`id`'
+ printf ') OR NOT EXISTS '
+ printf '('
+ printf 'SELECT * FROM `install_targets` '
+ printf 'WHERE `dependencies`.`depending_on`=`install_targets`.`id`'
+ printf ');\n'
+ # remove install_target_providers w/o binary_package or install_target
+ printf '%s FROM `install_target_providers` ' \
+ "${operator}"
+ printf 'WHERE NOT EXISTS '
+ printf '('
+ printf 'SELECT * FROM `binary_packages` '
+ printf 'WHERE `install_target_providers`.`package`=`binary_packages`.`id`'
+ printf ') OR NOT EXISTS '
+ printf '('
+ printf 'SELECT * FROM `install_targets` '
+ printf 'WHERE `install_target_providers`.`install_target`=`install_targets`.`id`'
+ printf ');\n'
+ # remove build_assignments w/o binary_package
+ printf '%s FROM `build_assignments` ' \
+ "${operator}"
+ printf 'WHERE NOT EXISTS '
+ printf '('
+ printf 'SELECT * FROM `binary_packages` '
+ printf 'WHERE `binary_packages`.`build_assignment`=`build_assignments`.`id`'
+ printf ');\n'
+ # remove failed_builds with unbroken build_assignments
+ printf '%s ' \
+ "${operator}"
+ if [ "${operator}" = 'DELETE' ]; then
+ printf '`failed_builds` '
+ fi
+ printf 'FROM `failed_builds` '
+ mysql_join_failed_builds_build_assignments
+ printf 'WHERE NOT `build_assignments`.`is_broken`'
+ printf ';\n'
+ # remove failed_builds w/o build_assignment
+ printf '%s FROM `failed_builds` ' \
+ "${operator}"
+ printf 'WHERE NOT EXISTS '
+ printf '('
+ printf 'SELECT * FROM `build_assignments` '
+ printf 'WHERE `build_assignments`.`id`=`failed_builds`.`build_assignment`'
+ printf ');\n'
+ # remove package_sources w/o build_assignment
+ printf '%s FROM `package_sources` ' \
+ "${operator}"
+ printf 'WHERE NOT EXISTS '
+ printf '('
+ printf 'SELECT * FROM `build_assignments` '
+ printf 'WHERE `build_assignments`.`package_source`=`package_sources`.`id`'
+ printf ');\n'
+ printf 'UPDATE `build_slaves`'
+ mysql_join_build_slaves_binary_packages
+ mysql_join_binary_packages_repositories
+ printf ' SET `currently_building`=NULL'
+ printf ' WHERE `repositories`.`name`!="build-list";\n'
+ } | \
+ mysql_run_query
+}
+
+# mysql_query_has_pending_dependencies `build_assignment`.`id`
+# print a mysql query giving wether dependencies are pending
+mysql_query_has_pending_dependencies() {
+ printf 'EXISTS ('
+ printf 'SELECT * FROM `binary_packages` as `to_dos`'
+ mysql_join_binary_packages_repositories 'to_dos' 'to_do_repos'
+ mysql_join_binary_packages_dependencies 'to_dos'
+ mysql_join_dependencies_install_target_providers
+ mysql_join_install_target_providers_binary_packages '' 'bin_deps'
+ mysql_join_binary_packages_repositories 'bin_deps' 'dep_repos'
+ printf ' WHERE'
+ printf ' `%s`.`name`="build-list" AND' \
+ 'dep_repos' 'to_do_repos'
+ printf ' `bin_deps`.`build_assignment`!=`to_dos`.`build_assignment` AND'
+ printf ' `to_dos`.`build_assignment`=%s' \
+ "$1"
+ printf ')'
+}
+
+# mysql_query_is_part_of_loop `build_assignment`.`id`
+# print a mysql query giving wether the package is part of a loop
+mysql_query_is_part_of_loop() {
+ printf 'EXISTS ('
+ printf 'SELECT * FROM `build_dependency_loops`'
+ printf ' WHERE `build_dependency_loops`.`build_assignment`=%s' \
+ "$1"
+ printf ')'
+}
+
+# mysql_query_select_pkgbase_and_revision
+# print the part of a mysql query giving:
+# pkgbase git_revision mod_git_revision upstream_package_repository
+mysql_query_select_pkgbase_and_revision() {
+ printf '`package_sources`.`%s`,' \
+ 'pkgbase' \
+ 'git_revision' \
+ 'mod_git_revision'
+ printf '`upstream_repositories`.`name`'
+ printf ' FROM `build_assignments`'
+ mysql_join_build_assignments_package_sources
+ mysql_join_package_sources_upstream_repositories
+}
+
+# mysql_repair_binary_packages_without_build_assignment
+# try to generate valid build assignments to binary packages without
+# a valid one yet
+mysql_repair_binary_packages_without_build_assignment() {
+ {
+ printf 'SELECT '
+ printf '`binary_packages`.`id`'
+ printf ',replace(to_base64(%s),"\\n","")' \
+ '`binary_packages`.`pkgname`' \
+ '`architectures`.`name`'
+ printf ' FROM `binary_packages`'
+ mysql_join_binary_packages_architectures
+ printf ' WHERE `binary_packages`.`build_assignment`<0'
+ } | \
+ mysql_run_query | \
+ while read -r id pkgname arch; do
+ pkgname=$(
+ printf '%s' "${pkgname}" | \
+ base64 -d
+ )
+ pkgbase=$(
+ curl -Ss "$(
+ printf 'https://www.archlinux.org/packages/search/json/?name=%s' \
+ "${pkgname}"
+ )" | \
+ sed '
+ s/^.*"results":\s*\[//
+ s/}\s*,\s*{/\n/g
+ ' | \
+ grep '"pkgname":\s*"'"$(str_to_regex "${pkgname}")"'"' | \
+ tr ',' '\n' | \
+ grep '"pkgbase":' | \
+ cut -d'"' -f4 | \
+ sort -u | \
+ head -n1
+ )
+ if [ -z "${pkgbase}" ] && \
+ {
+ printf 'SELECT count(*) FROM `package_sources`'
+ printf ' WHERE `package_sources`.`pkgbase`=from_base64("%s")' \
+ "$(printf '%s' "${pkgname}" | base64 -w0)"
+ } | \
+ mysql_run_query | \
+ grep -qvxF '0'; then
+ pkgbase="${pkgname}"
+ fi
+ if [ -z "${pkgbase}" ]; then
+ >&2 printf 'Could not find "%s" upstream.\n' "${pkgname}"
+ continue
+ fi
+ pkgbase=$(
+ printf '%s' "${pkgbase}" | \
+ base64 -w0
+ )
+ printf 'INSERT IGNORE INTO `build_assignments` (`package_source`,`architecture`,`is_blocked`,`is_broken`,`priority`)'
+ printf ' SELECT `package_sources`.`id`,`architectures`.`id`,0,0,0'
+ printf ' FROM `package_sources`'
+ printf ' JOIN `architectures`'
+ printf ' WHERE `package_sources`.`pkgbase`=from_base64("%s")' "${pkgbase}"
+ printf ' AND `architectures`.`name`=from_base64("%s")' "${arch}"
+ printf ' LIMIT 1;\n'
+ printf 'UPDATE `binary_packages`'
+ printf ' JOIN `build_assignments`'
+ mysql_join_binary_packages_build_assignments
+ printf ' SET `binary_packages`.`build_assignment`=`build_assignments`.`id`'
+ printf ' WHERE `binary_packages`.`id`=%s' "${id}"
+ printf ' AND `package_sources`.`pkgbase`=from_base64("%s");\n' "${pkgbase}"
+ done | \
+ mysql_run_query
+}
+
+# mysql_remove_duplicate_binary_packages
+# remove duplicate binary_packages, matching pkgname, epoch, pkgver, pkgrel,
+# having difference of 1 in sub_pkgrel
+
+mysql_remove_duplicate_build_order() {
+ {
+ printf 'CREATE TEMPORARY TABLE `ren`'
+ printf ' (`old` BIGINT, `new` BIGINT, `repo` BIGINT, `sub_pkgrel` BIGINT);\n'
+ printf 'INSERT INTO `ren` (`old`,`new`,`repo`,`sub_pkgrel`)'
+ printf ' SELECT `old`.`id`,`new`.`id`,`old`.`repository`,`old`.`sub_pkgrel`'
+ printf ' FROM `binary_packages` as `old`'
+ printf ' JOIN `binary_packages` as `new` ON'
+ printf ' `old`.`%s`=`new`.`%s` AND' \
+ 'pkgname' 'pkgname' \
+ 'epoch' 'epoch' \
+ 'pkgver' 'pkgver' \
+ 'pkgrel' 'pkgrel'
+ printf ' `old`.`sub_pkgrel`+1=`new`.`sub_pkgrel`'
+ mysql_join_binary_packages_repositories 'old' 'orep'
+ mysql_join_binary_packages_repositories 'new' 'nrep'
+ printf ' WHERE `orep`.`name`!="to-be-decided"'
+ printf ' AND `nrep`.`name`="to-be-decided";\n'
+ printf 'UPDATE IGNORE `dependencies`'
+ printf ' JOIN `ren` ON `ren`.`old`=`dependencies`.`dependent`'
+ printf ' SET `dependencies`.`dependent`=`ren`.`new`;\n'
+ printf 'UPDATE IGNORE `install_target_providers`'
+ printf ' JOIN `ren` ON `ren`.`old`=`install_target_providers`.`package`'
+ printf ' SET `install_target_providers`.`package`=`ren`.`new`;\n'
+ printf 'DELETE FROM `binary_packages`'
+ printf ' WHERE EXISTS ('
+ printf 'SELECT * FROM `ren`'
+ printf ' WHERE `ren`.`old`=`binary_packages`.`id`'
+ printf ');\n'
+ printf 'UPDATE IGNORE `binary_packages`'
+ printf ' JOIN `ren` ON `ren`.`new`=`binary_packages`.`id`'
+ printf ' SET `binary_packages`.`repository`=`ren`.`repo`,'
+ printf ' `binary_packages`.`sub_pkgrel`=`ren`.`sub_pkgrel`;\n'
+ } | \
+ mysql_run_query
+}
+
+# mysql_package_name_query
+# print a mysql query of the full name of a package file
+
+mysql_package_name_query() {
+ printf 'CONCAT('
+ printf '`binary_packages`.`pkgname`,"-",'
+ printf 'IF(`binary_packages`.`epoch`=0,"",CONCAT(`binary_packages`.`epoch`,":")),'
+ printf '`binary_packages`.`pkgver`,"-",'
+ printf '`binary_packages`.`pkgrel`,".",'
+ printf '`binary_packages`.`sub_pkgrel`,"-",'
+ printf '`architectures`.`name`,".pkg.tar.xz"'
+ printf ')'
+}
+
+# mysql_join_*_*
+# print 'JOIN' part of mysql query to connect the respective tables
+# these functions take 2 optional arguments, acting as aliases for
+# the tables
+
+# mysql_join__generic $table_a $column_a $table_b $column_b
+# create mysql_join_${table_a}_${table_b}() function
+
+mysql_join__generic() {
+ eval "$(
+ printf 'mysql_join_%s_%s() {\n' "$1" "$3"
+ printf ' printf '"'"' JOIN `%s`'"'"'\n' "$3"
+ printf ' if [ -n "$2" ]; then\n'
+ printf ' printf '"'"' AS `%%s`'"'"' "$2"\n'
+ printf ' fi\n'
+ printf ' if [ -n "$1" ]; then\n'
+ printf ' printf '"'"' ON `%%s`.`%s`='"'"' "$1"\n' "$2"
+ printf ' else\n'
+ printf ' printf '"'"' ON `%s`.`%s`='"'"'\n' "$1" "$2"
+ printf ' fi\n'
+ printf ' if [ -n "$2" ]; then\n'
+ printf ' printf '"'"'`%%s`.`%s`'"'"' "$2"\n' "$4"
+ printf ' else\n'
+ printf ' printf '"'"'`%s`.`%s`'"'"'\n' "$3" "$4"
+ printf ' fi\n'
+ printf '}\n'
+ )"
+}
+
+for link in \
+ 'binary_packages:architecture:architectures' \
+ 'binary_packages:repository:repositories' \
+ 'binary_packages:build_assignment:build_assignments' \
+ \
+ 'build_assignments:architecture:architectures' \
+ 'build_assignments:package_source:package_sources' \
+ \
+ 'build_dependency_loops:build_assignment:build_assignments' \
+ 'build_dependency_loops:build_assignment build_assignment:binary_packages' \
+ \
+ 'build_slaves:currently_building:build_assignments' \
+ 'build_slaves:currently_building build_assignment:binary_packages' \
+ \
+ 'dependencies:depending_on:install_targets' \
+ 'dependencies:dependent:binary_packages' \
+ 'dependencies:dependency_type:dependency_types' \
+ \
+ 'failed_builds:reason:fail_reason' \
+ 'failed_builds:build_assignment:build_assignments' \
+ 'failed_builds:build_slave:build_slaves' \
+ \
+ 'install_target_providers:package:binary_packages' \
+ 'install_target_providers:install_target:install_targets' \
+ 'install_target_providers:install_target depending_on:dependencies' \
+ \
+ 'package_sources:upstream_package_repository:upstream_repositories' \
+ \
+ 'repositories:stability:repository_stabilities' \
+ \
+ 'repository_moves:upstream_package_repository:upstream_repositories' \
+ \
+ 'upstream_repositories:git_repository:git_repositories'; do
+# A join for these cannot be done, because it's not clear on what to join:
+# 'repository_stability_relations:more_stable:repository_stabilities'
+# 'repository_stability_relations:less_stable:repository_stabilities'
+
+ table_b="${link##*:}"
+ table_a="${link%:*}"
+ column_b="${table_a##*:}"
+ table_a="${table_a%:*}"
+ column_a="${column_b% *}"
+ if [ "${column_a}" = "${column_b}" ]; then
+ column_b='id'
+ else
+ column_b="${column_b##* }"
+ fi
+
+ mysql_join__generic "${table_a}" "${column_a}" "${table_b}" "${column_b}"
+ mysql_join__generic "${table_b}" "${column_b}" "${table_a}" "${column_a}"
+done