File 0105-Add-github-release-sync.patch of Package erlang
From 3113ca6094d0f2fb479d79e6a03410f5b312a920 Mon Sep 17 00:00:00 2001
From: Lukas Larsson <lukas@erlang.org>
Date: Thu, 24 Sep 2020 15:35:52 +0200
Subject: [PATCH] Add github release sync
This commit adds a github workflow that periodically syncs
the contents of erlang.org/download with
https://github.com/erlang/otp/releases.
This is done for any release > OTP-21.
---
.github/workflows/sync-github-releases.yaml | 33 ++++
scripts/sync-github-releases.sh | 170 ++++++++++++++++++++
2 files changed, 203 insertions(+)
create mode 100644 .github/workflows/sync-github-releases.yaml
create mode 100755 scripts/sync-github-releases.sh
diff --git a/.github/workflows/sync-github-releases.yaml b/.github/workflows/sync-github-releases.yaml
new file mode 100644
index 0000000000..2a88558d6b
--- /dev/null
+++ b/.github/workflows/sync-github-releases.yaml
@@ -0,0 +1,33 @@
+name: Sync all github releases with erlang.org
+
+## Update the base image every day
+on:
+ workflow_dispatch:
+ schedule:
+ ## In UTC
+ - cron: '*/30 * * * *'
+
+## Build base images to be used by other github workflows
+jobs:
+
+ # Wait for up to a minute for previous runs to complete, abort if not done by then
+ pre-ci:
+ runs-on: ubuntu-latest
+ timeout-minutes: 1
+ steps:
+ - name: 'Block Concurrent Executions'
+ uses: softprops/turnstyle@v1
+ with:
+ poll-interval-seconds: 10
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ sync-releases:
+ if: github.repository == 'erlang/otp'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Sync releases
+ run: >
+ scripts/sync-github-releases.sh ${{ github.repository }}
+ "Bearer ${{ secrets.GITHUB_TOKEN }}" "^2[123]\\..*" 25m
diff --git a/scripts/sync-github-releases.sh b/scripts/sync-github-releases.sh
new file mode 100755
index 0000000000..8b9cef6744
--- /dev/null
+++ b/scripts/sync-github-releases.sh
@@ -0,0 +1,170 @@
+#!/bin/bash
+
+## This bash script takes 4 arguments
+## 1: Repository on github to sync to
+## 2: The auth token to be used (either "token TheToken" or "Bearer TheToken")
+## 3: A regexp for which releases to sync
+## 4: Optional timeout for the rsync command when it should stop syncing
+##
+## This script downloads artifacts from erlang.org/download and then publishes
+## them to the release for the corresponding TAG on github. If there is no release
+## then a release is created with the README as the body of the release.
+##
+## The script does not keep release artifacts up to date, so if an artifact is changed
+## on erlang.org then it will not be automatically updated on github.
+##
+## The reason why this is a polling script and not triggered when a new tag is pushed
+## is because when the TAG is pushed to github it is not guarenteed that the
+## corresponding artifacts will be available on erlang.org.
+
+set -e
+
+REPOSITORY=${1}
+TOKEN=${2}
+RELEASE_FILTER=${3}
+TIME_LIMIT=${4:-120m}
+HDR=(-H "Authorization: ${TOKEN}")
+REPO="https://api.github.com/repos/${REPOSITORY}"
+
+_json_escape () {
+ printf '```\n%s\n```' "${1}" | python -c 'import json,sys; print(json.dumps(sys.stdin.read()))'
+}
+
+_strip_name() {
+ echo ${1} | sed -e 's/^OTP[-_]//g'
+}
+
+_curl_get() {
+ curl --silent "${HDR[@]}" "${@}"
+}
+
+_curl_post() {
+ curl -o /dev/null --silent --fail --show-error -X POST "${HDR[@]}" \
+ -H "Accept: application/vnd.github.v3+json" "${@}"
+}
+
+RI=""
+ALL_TAGS=""
+CREATE_RELEASE=""
+TAG_URL="${REPO}/tags?per_page=100"
+
+## This function is used to loop over the pagianated results from github tags
+## It sets TAGS to be the the json from the current page of tags
+_next_page() {
+ TAGS=`curl -s -H "${HDR[@]}" ${TAG_URL}`
+ ## In the "Link:" header from github we get the link for the next page.
+ ## An example link header:
+ ## link: <https://api.github.com/repositories/843890/tags?per_page=100&page=2>; rel="next", <https://api.github.com/repositories/843890/tags?per_page=100&page=4>; rel="last"
+ TAG_URL=`curl -s -I -H "${HDR[@]}" ${TAG_URL} | grep "^link:" | sed -n 's/link:.* <\([^>]\+\)>; rel="next".*/\1/p'`
+}
+
+## First we fetch all tags and releases and build a list of all resources
+## that we should rsync from erlang.org. We only want to do one call to
+## rsync for all files as otherwise erlang.org will rate-limit us.
+while [ "${TAG_URL}" != "" ]; do
+ _next_page
+
+ ## Loop over all tags, we base64 encode each element in the array
+ ## in order to make the bash for loop work
+ for row in $(echo "${TAGS}" | jq -r '.[] | @base64'); do
+ _row() {
+ echo ${row} | base64 --decode | jq -r ${1}
+ }
+ name=$(_row '.name')
+ stripped_name=$(_strip_name ${name})
+
+ if echo ${stripped_name} | grep -E "${RELEASE_FILTER}" > /dev/null; then
+ ALL_TAGS="${ALL_TAGS} ${name}"
+ RELEASE=$(_curl_get "${REPO}/releases/tags/${name}")
+ if ! echo "${RELEASE}" | jq -er ".name" > /dev/null; then
+ CREATE_RELEASE="${CREATE_RELEASE} ${name}"
+ RI="*${stripped_name}* ${RI}"
+ echo "Create release ${name}"
+ else
+ _asset() {
+ local filename=${1}
+ local remotename=${2:-${filename}}
+ if ! echo "${RELEASE}" | jq -er ".assets[] | select(.name == \"${filename}\")" > /dev/null; then
+ echo "Sync ${remotename} for ${name}"
+ RI="${remotename} ${RI}"
+ fi
+ }
+ _asset "${name}.README" "${name}.README otp_src_${stripped_name}.readme"
+ _asset "otp_src_${stripped_name}.tar.gz" ""
+ _asset "otp_doc_html_${stripped_name}.tar.gz" ""
+ _asset "otp_doc_man_${stripped_name}.tar.gz" ""
+ _asset "otp_win32_${stripped_name}.exe" ""
+ _asset "otp_win64_${stripped_name}.exe" ""
+ fi
+ fi
+ done
+done
+
+RINCLUDE=""
+for i in ${RI}; do
+ RINCLUDE="--include=${i} ${RINCLUDE}"
+done
+
+## rsync the proper files, we will use which files have been
+## synced to determine which artifacts we should upload.
+## There is a timelimit here so that github actions will not
+## timeout
+! timeout ${TIME_LIMIT} rsync --archive --verbose --compress ${RINCLUDE} --exclude='*' \
+ erlang.org::erlang-download downloads
+
+## Rename all .readme files to .README
+for name in ${ALL_TAGS}; do
+ stripped_name=$(_strip_name ${name})
+ if [ -s "downloads/otp_src_${stripped_name}.readme" ]; then
+ mv downloads/otp_src_${stripped_name}.readme downloads/${name}.README
+ fi
+done
+
+## All tags that do not have a release we create a release for
+## using the readme as the body text if a readme is available.
+for name in ${CREATE_RELEASE}; do
+ echo "Create release for ${name}"
+ stripped_name=$(_strip_name ${name})
+ if [ -s "downloads/${name}.README" ]; then
+ README=`cat downloads/${name}.README`
+ else
+ README=""
+ fi
+ if echo "${README}" | grep "HIGHLIGHTS" > /dev/null; then
+ ## We have highlights, so only use those as the body
+
+ ## This awk script is a hack.
+ ## It counts the number of lines that start with '---' and
+ ## then outputs any text after the first '---' until the 7th.
+ README=`echo "${README}" | awk 'BEGIN{ echo=0 } { if ( $1 ~ /^---/ ) { echo++ } if ( echo > 0 && echo < 7 ) { print $0 } }'`
+ fi
+ if [ "${README}" != "" ]; then
+ RM=$(_json_escape "${README}")
+ BODY=", \"body\":${RM}"
+ else
+ BODY=""
+ fi
+ $(_curl_post "${REPO}/releases" -d '{"tag_name":"'"${name}"'", "name":"OTP '"${stripped_name}\"${BODY}}")
+done
+
+## Upload all assets for tags
+for name in ${ALL_TAGS}; do
+ echo "Upload artifacts for ${name}"
+ stripped_name=$(_strip_name ${name})
+ RELEASE=$(_curl_get "${REPO}/releases/tags/${name}")
+ UPLOAD_URL=`echo "${RELEASE}" | jq -r ".upload_url" | sed 's/{.*//'`
+ _upload() {
+ if [ -s downloads/${1} ]; then
+ echo "Upload ${1}"
+ $(_curl_post -H "Content-Type: ${2}" \
+ "${UPLOAD_URL}?name=${1}" \
+ --data-binary "@downloads/${1}")
+ fi
+ }
+ _upload "${name}.README" "text"
+ _upload "otp_src_${stripped_name}.tar.gz" "application/gzip"
+ _upload "otp_doc_html_${stripped_name}.tar.gz" "application/gzip"
+ _upload "otp_doc_man_${stripped_name}.tar.gz" "application/gzip"
+ _upload "otp_win32_${stripped_name}.exe" "application/x-msdownload"
+ _upload "otp_win64_${stripped_name}.exe" "application/x-msdownload"
+done
--
2.26.2