ci: Remove "publish" step (#2936)

This commit removes the publish step in GitHub actions, insteading
folding all functionality into the release build steps. This avoids
having a separately scheduled job after all the release build jobs which
ends up getting delayed for quite a long time given the current
scheduling algorithm.

This involves refactoring the tarball assembly scripts and refactoring the
github asset upload script too. Tarball assembly now manages everything
internally and does platform-specific bits where necessary. The upload
script is restructured to be run in parallel (in theory) and hopefully
catches various errors and tries to not stomp over everyone else's work.
The main trickiness here is handling `dev`, which is less critical for
correctness than than tags themselves.

As a small tweak build-wise the QEMU build for cross-compiled builders
is now cached unlike before where it was unconditionally built, shaving
a minute or two off build time.
This commit is contained in:
Alex Crichton
2021-05-25 12:52:41 -05:00
committed by GitHub
parent e5ac9350b1
commit 2b0649c74c
5 changed files with 172 additions and 200 deletions

View File

@@ -1,8 +0,0 @@
FROM node:slim
COPY . /action
WORKDIR /action
RUN npm install --production
ENTRYPOINT ["node", "/action/main.js"]

View File

@@ -4,12 +4,9 @@ inputs:
token: token:
description: '' description: ''
required: true required: true
name:
description: ''
required: true
files: files:
description: '' description: ''
required: true required: true
runs: runs:
using: 'docker' using: 'node12'
image: 'Dockerfile' main: 'main.js'

View File

@@ -11,12 +11,15 @@ function sleep(milliseconds) {
async function runOnce() { async function runOnce() {
// Load all our inputs and env vars. Note that `getInput` reads from `INPUT_*` // Load all our inputs and env vars. Note that `getInput` reads from `INPUT_*`
const files = core.getInput('files'); const files = core.getInput('files');
const name = core.getInput('name');
const token = core.getInput('token'); const token = core.getInput('token');
const slug = process.env.GITHUB_REPOSITORY; const slug = process.env.GITHUB_REPOSITORY;
const owner = slug.split('/')[0]; const owner = slug.split('/')[0];
const repo = slug.split('/')[1]; const repo = slug.split('/')[1];
const sha = process.env.GITHUB_SHA; const sha = process.env.GITHUB_SHA;
let name = 'dev';
if (process.env.GITHUB_REF.startsWith('refs/tags/v')) {
name = process.env.GITHUB_REF.substring(10);
}
core.info(`files: ${files}`); core.info(`files: ${files}`);
core.info(`name: ${name}`); core.info(`name: ${name}`);
@@ -24,21 +27,33 @@ async function runOnce() {
const octokit = new github.GitHub(token); const octokit = new github.GitHub(token);
// Delete the previous release since we can't overwrite one. This may happen // For the `dev` release we may need to update the tag to point to the new
// due to retrying an upload or it may happen because we're doing the dev // commit on this branch. All other names should already have tags associated
// release. // with them.
const releases = await octokit.paginate("GET /repos/:owner/:repo/releases", { owner, repo }); if (name == 'dev') {
for (const release of releases) { let tag = null;
if (release.tag_name !== name) { try {
continue; tag = await octokit.request("GET /repos/:owner/:repo/git/refs/tags/:name", { owner, repo, name });
} core.info(`found existing tag`);
const release_id = release.id; console.log("tag: ", JSON.stringify(tag.data, null, 2));
core.info(`deleting release ${release_id}`); } catch (e) {
await octokit.repos.deleteRelease({ owner, repo, release_id }); // ignore if this tag doesn't exist
core.info(`no existing tag found`);
}
if (tag === null || tag.data.object.sha !== sha) {
core.info(`updating existing tag or creating new one`);
// Delete the previous release for this tag, if any
try {
core.info(`fetching release for ${name}`);
const release = await octokit.repos.getReleaseByTag({ owner, repo, tag: name });
core.info(`deleting release ${release.data.id}`);
await octokit.repos.deleteRelease({ owner, repo, release_id: release.data.id });
} catch (e) {
// ignore, there may not have been a release
console.log("ERROR: ", JSON.stringify(e, null, 2));
} }
// We also need to update the `dev` tag while we're at it on the `dev` branch.
if (name == 'dev') {
try { try {
core.info(`updating dev tag`); core.info(`updating dev tag`);
await octokit.git.updateRef({ await octokit.git.updateRef({
@@ -51,6 +66,7 @@ async function runOnce() {
} catch (e) { } catch (e) {
console.log("ERROR: ", JSON.stringify(e, null, 2)); console.log("ERROR: ", JSON.stringify(e, null, 2));
core.info(`creating dev tag`); core.info(`creating dev tag`);
try {
await octokit.git.createTag({ await octokit.git.createTag({
owner, owner,
repo, repo,
@@ -59,18 +75,40 @@ async function runOnce() {
object: sha, object: sha,
type: 'commit', type: 'commit',
}); });
} catch (e) {
// we might race with others, so assume someone else has created the
// tag by this point.
}
}
} else {
core.info(`existing tag works`);
} }
} }
// Creates an official GitHub release for this `tag`, and if this is `dev` // Try to load the release for this tag, and if it doesn't exist then make a
// then we know that from the previous block this should be a fresh release. // new one. We might race with other builders on creation, though, so if the
// creation fails try again to get the release by the tag.
let release = null;
try {
core.info(`fetching release`);
release = await octokit.repos.getReleaseByTag({ owner, repo, tag: name });
} catch (e) {
console.log("ERROR: ", JSON.stringify(e, null, 2));
core.info(`creating a release`); core.info(`creating a release`);
const release = await octokit.repos.createRelease({ try {
release = await octokit.repos.createRelease({
owner, owner,
repo, repo,
tag_name: name, tag_name: name,
prerelease: name === 'dev', prerelease: name === 'dev',
}); });
} catch(e) {
console.log("ERROR: ", JSON.stringify(e, null, 2));
core.info(`fetching one more time`);
release = await octokit.repos.getReleaseByTag({ owner, repo, tag: name });
}
}
console.log("found release: ", JSON.stringify(release.data, null, 2));
// Upload all the relevant assets for this release as just general blobs. // Upload all the relevant assets for this release as just general blobs.
for (const file of glob.sync(files)) { for (const file of glob.sync(files)) {

View File

@@ -394,6 +394,8 @@ jobs:
build: build:
name: Build wasmtime name: Build wasmtime
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
env:
QEMU_VERSION: 5.0.0
strategy: strategy:
matrix: matrix:
include: include:
@@ -434,30 +436,42 @@ jobs:
rustup target add ${{ matrix.target }} rustup target add ${{ matrix.target }}
if: matrix.target != '' if: matrix.target != ''
- uses: actions/cache@v2
with:
path: ${{ runner.tool_cache }}/qemu
key: qemu-${{ matrix.target }}-${{ env.QEMU_VERSION }}
if: matrix.target != '' && matrix.os == 'ubuntu-latest'
- name: Install cross-compilation tools - name: Install cross-compilation tools
run: | run: |
set -ex set -ex
sudo apt-get update sudo apt-get update
sudo apt-get install -y ${{ matrix.gcc_package }} sudo apt-get install -y ${{ matrix.gcc_package }}
# Configure Cargo for cross compilation and tell it how it can run
# cross executables
upcase=$(echo ${{ matrix.target }} | awk '{ print toupper($0) }' | sed 's/-/_/g')
echo CARGO_TARGET_${upcase}_RUNNER=${{ runner.tool_cache }}/qemu/bin/${{ matrix.qemu }} >> $GITHUB_ENV
echo CARGO_TARGET_${upcase}_LINKER=${{ matrix.gcc }} >> $GITHUB_ENV
# See comments in the source for why we enable this during QEMU
# emulation.
echo WASMTIME_TEST_NO_HOG_MEMORY=1 >> $GITHUB_ENV
# See if qemu is already in the cache
if [ -f ${{ runner.tool_cache }}/qemu/built ]; then
exit 0
fi
# Download and build qemu from source since the most recent release is # Download and build qemu from source since the most recent release is
# way faster at arm emulation than the current version github actions' # way faster at arm emulation than the current version github actions'
# ubuntu image uses. Disable as much as we can to get it to build # ubuntu image uses. Disable as much as we can to get it to build
# quickly. # quickly.
curl https://download.qemu.org/qemu-5.0.0.tar.xz | tar xJf - curl https://download.qemu.org/qemu-5.0.0.tar.xz | tar xJf -
cd qemu-5.0.0 cd qemu-5.0.0
./configure --target-list=${{ matrix.qemu_target }} --prefix=$HOME/qemu --disable-tools --disable-slirp --disable-fdt --disable-capstone --disable-docs ./configure --target-list=${{ matrix.qemu_target }} --prefix=${{ runner.tool_cache}}/qemu --disable-tools --disable-slirp --disable-fdt --disable-capstone --disable-docs
make -j$(nproc) install make -j$(nproc) install
touch ${{ runner.tool_cache }}/qemu/built
# Configure Cargo for cross compilation and tell it how it can run
# cross executables
upcase=$(echo ${{ matrix.target }} | awk '{ print toupper($0) }' | sed 's/-/_/g')
echo CARGO_TARGET_${upcase}_RUNNER=$HOME/qemu/bin/${{ matrix.qemu }} >> $GITHUB_ENV
echo CARGO_TARGET_${upcase}_LINKER=${{ matrix.gcc }} >> $GITHUB_ENV
# See comments in the source for why we enable this during QEMU
# emulation.
echo WASMTIME_TEST_NO_HOG_MEMORY=1 >> $GITHUB_ENV
if: matrix.target != '' && matrix.os == 'ubuntu-latest' if: matrix.target != '' && matrix.os == 'ubuntu-latest'
# Install wasm32-wasi target in order to build wasi-common's integration # Install wasm32-wasi target in order to build wasi-common's integration
@@ -487,122 +501,23 @@ jobs:
env: env:
RUST_BACKTRACE: 1 RUST_BACKTRACE: 1
# Postprocess the macOS dylib a bit to have a more reasonable `LC_ID_DYLIB` # Assemble release artifats appropriate for this platform, then upload them
# directive than the default one that comes out of the linker when typically # unconditionally to this workflow's files so we have a copy of them.
# doing `cargo build`. For more info see #984 - run: ./ci/build-tarballs.sh "${{ matrix.build }}" "${{ matrix.target }}"
- run: install_name_tool -id "@rpath/libwasmtime.dylib" target/release/libwasmtime.dylib
if: matrix.os == 'macos-latest'
# ... and now perform some goop to move all the relevant artifacts into
# something that we'll upload from this action.
- run: mkdir dist
# Move binaries to dist folder
- run: cp target/release/wasmtime dist
if: matrix.os != 'windows-latest' && matrix.target == ''
- run: cp target/${{ matrix.target }}/release/wasmtime dist
if: matrix.os != 'windows-latest' && matrix.target != ''
- run: cp target/release/wasmtime.exe dist
if: matrix.build == 'x86_64-windows'
- run: cp target/x86_64-pc-windows-gnu/release/wasmtime.exe dist
if: matrix.build == 'x86_64-mingw'
# Move libwasmtime dylib to dist folder
- run: cp target/release/libwasmtime.{so,a} dist
if: matrix.os == 'ubuntu-latest' && matrix.target == ''
- run: cp target/${{ matrix.target }}/release/libwasmtime.{so,a} dist
if: matrix.os == 'ubuntu-latest' && matrix.target != ''
- run: cp target/release/libwasmtime.{dylib,a} dist
if: matrix.os == 'macos-latest'
- run: cp target/release/wasmtime.{dll,lib,dll.lib} dist
if: matrix.build == 'x86_64-windows'
- run: cp target/x86_64-pc-windows-gnu/release/{wasmtime.dll,libwasmtime.a} dist
if: matrix.build == 'x86_64-mingw'
# Make a Windows MSI installer if we're on Windows
- run: |
export WT_VERSION=`cat Cargo.toml | sed -n 's/^version = "\([^"]*\)".*/\1/p'`
"$WIX/bin/candle" -arch x64 -out target/wasmtime.wixobj ci/wasmtime.wxs
"$WIX/bin/light" -out dist/installer.msi target/wasmtime.wixobj -ext WixUtilExtension
rm dist/installer.wixpdb
if: matrix.build == 'x86_64-windows'
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
with: with:
name: bins-${{ matrix.build }} name: bins-${{ matrix.build }}
path: dist path: dist
# Consumes all published artifacts from all the previous build steps, creates
# a bunch of tarballs for all of them, and then publishes the tarballs
# themselves as an artifact (for inspection) and then optionally creates
# github releases and/or tags for pushes.
publish:
name: Publish
needs: build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: true
- run: rustup update stable && rustup default stable
- name: Download x86_64 macOS binaries
uses: actions/download-artifact@v1
with:
name: bins-x86_64-macos
- name: Download x86_64 Linux binaries
uses: actions/download-artifact@v1
with:
name: bins-x86_64-linux
- name: Download AArch64 Linux binaries
uses: actions/download-artifact@v1
with:
name: bins-aarch64-linux
- name: Download x86_64 Windows binaries
uses: actions/download-artifact@v1
with:
name: bins-x86_64-windows
- name: Download x86_64 Windows MinGW binaries
uses: actions/download-artifact@v1
with:
name: bins-x86_64-mingw
- name: Calculate tag name
run: |
name=dev
if [[ $GITHUB_REF == refs/tags/v* ]]; then
name=${GITHUB_REF:10}
fi
echo ::set-output name=val::$name
echo TAG=$name >> $GITHUB_ENV
id: tagname
# Assemble all the build artifacts into tarballs and zip archives.
- name: Assemble tarballs
run: |
./ci/build-tarballs.sh x86_64-linux
./ci/build-tarballs.sh x86_64-windows .exe
./ci/build-tarballs.sh x86_64-mingw .exe
./ci/build-tarballs.sh x86_64-macos
./ci/build-tarballs.sh aarch64-linux
# Upload all assembled tarballs as an artifact of the github action run, so
# that way even PRs can inspect the output.
- uses: actions/upload-artifact@v1
with:
name: tarballs
path: dist
# ... and if this was an actual push (tag or `main`) then we publish a # ... and if this was an actual push (tag or `main`) then we publish a
# new release. This'll automatically publish a tag release or update `dev` # new release. This'll automatically publish a tag release or update `dev`
# with this `sha` # with this `sha`
- run: cd .github/actions/github-release && npm install --production
- name: Publish Release - name: Publish Release
uses: ./.github/actions/github-release uses: ./.github/actions/github-release
if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')) if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v'))
with: with:
files: "dist/*" files: "dist/*"
name: ${{ steps.tagname.outputs.val }}
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
cargo-audit: cargo-audit:

View File

@@ -1,56 +1,86 @@
#!/bin/bash #!/bin/bash
# A small shell script invoked from CI on the final Linux builder which actually # A small script used for assembling release tarballs for both the `wasmtime`
# assembles the release artifacts for a particular platform. This will take the # binary and the C API. This is executed with two arguments, mostly coming from
# binary artifacts of previous builders and create associated tarballs to # the CI matrix.
# publish to GitHub.
# #
# The first argument of this is the "platform" name to put into the tarball, and # * The first argument is the name of the platform, used to name the release
# the second argument is the name of the github actions platform which is where # * The second argument is the "target", if present, currently only for
# we source binaries from. The final third argument is ".exe" on Windows to # cross-compiles
# handle executable extensions right.
# #
# Usage: build-tarballs.sh PLATFORM [.exe] # This expects the build to already be done and will assemble release artifacts
# in `dist/`
# where PLATFORM is e.g. x86_64-linux, aarch64-linux, ...
set -ex set -ex
platform=$1 platform=$1
exe=$2 target=$2
rm -rf tmp rm -rf tmp
mkdir tmp mkdir tmp
mkdir -p dist mkdir -p dist
mktarball() { tag=dev
dir=$1 if [[ $GITHUB_REF == refs/tags/v* ]]; then
if [ "$exe" = "" ]; then tag=${GITHUB_REF:10}
tar cJf dist/$dir.tar.xz -C tmp $dir
else
(cd tmp && zip -r ../dist/$dir.zip $dir)
fi
}
# Create the main tarball of binaries
bin_pkgname=wasmtime-$TAG-$platform
mkdir tmp/$bin_pkgname
cp LICENSE README.md tmp/$bin_pkgname
mv bins-$platform/wasmtime$exe tmp/$bin_pkgname
chmod +x tmp/$bin_pkgname/wasmtime$exe
mktarball $bin_pkgname
if [ -f bins-$platform/installer.msi ]; then
mv bins-$platform/installer.msi dist/$bin_pkgname.msi
fi fi
# Create tarball of API libraries bin_pkgname=wasmtime-$tag-$platform
api_pkgname=wasmtime-$TAG-$platform-c-api api_pkgname=wasmtime-$tag-$platform-c-api
mkdir tmp/$api_pkgname mkdir tmp/$api_pkgname
mkdir tmp/$api_pkgname/lib mkdir tmp/$api_pkgname/lib
mkdir tmp/$api_pkgname/include mkdir tmp/$api_pkgname/include
mkdir tmp/$bin_pkgname
cp LICENSE README.md tmp/$api_pkgname cp LICENSE README.md tmp/$api_pkgname
mv bins-$platform/* tmp/$api_pkgname/lib cp LICENSE README.md tmp/$bin_pkgname
cp crates/c-api/wasm-c-api/include/wasm.h tmp/$api_pkgname/include cp crates/c-api/wasm-c-api/include/wasm.h tmp/$api_pkgname/include
cp crates/c-api/include/{wasmtime,wasi}.h tmp/$api_pkgname/include cp crates/c-api/include/{wasmtime,wasi}.h tmp/$api_pkgname/include
fmt=tar
if [ "$platform" = "x86_64-windows" ]; then
cp target/release/wasmtime.exe tmp/$bin_pkgname
cp target/release/{wasmtime.dll,wasmtime.lib,wasmtime.dll.lib} tmp/$api_pkgname/lib
fmt=zip
# Generate a `*.msi` installer for Windows as well
export WT_VERSION=`cat Cargo.toml | sed -n 's/^version = "\([^"]*\)".*/\1/p'`
"$WIX/bin/candle" -arch x64 -out target/wasmtime.wixobj ci/wasmtime.wxs
"$WIX/bin/light" -out dist/$bin_pkgname.msi target/wasmtime.wixobj -ext WixUtilExtension
rm dist/$bin_pkgname.wixpdb
elif [ "$platform" = "x86_64-mingw" ]; then
cp target/x86_64-pc-windows-gnu/release/wasmtime.exe tmp/$bin_pkgname
cp target/x86_64-pc-windows-gnu/release/{wasmtime.dll,libwasmtime.a} tmp/$api_pkgname/lib
fmt=zip
elif [ "$platform" = "x86_64-macos" ]; then
# Postprocess the macOS dylib a bit to have a more reasonable `LC_ID_DYLIB`
# directive than the default one that comes out of the linker when typically
# doing `cargo build`. For more info see #984
install_name_tool -id "@rpath/libwasmtime.dylib" target/release/libwasmtime.dylib
cp target/release/wasmtime tmp/$bin_pkgname
cp target/release/libwasmtime.{a,dylib} tmp/$api_pkgname/lib
elif [ "$target" = "" ]; then
cp target/release/wasmtime tmp/$bin_pkgname
cp target/release/libwasmtime.{a,so} tmp/$api_pkgname/lib
else
cp target/$target/release/wasmtime tmp/$bin_pkgname
cp target/$target/release/libwasmtime.{a,so} tmp/$api_pkgname/lib
fi
mktarball() {
dir=$1
if [ "$fmt" = "tar" ]; then
# this is a bit wonky, but the goal is to use `xz` with threaded compression
# to ideally get better performance with the `-T0` flag.
tar -cvf - -C tmp $dir | xz -9 -T0 > dist/$dir.tar.xz
else
# Note that this runs on Windows, and it looks like GitHub Actions doesn't
# have a `zip` tool there, so we use powershell
(cd tmp && powershell Compress-Archive $dir $dir.zip)
mv tmp/$dir.zip dist
fi
}
mktarball $api_pkgname mktarball $api_pkgname
mktarball $bin_pkgname