diff --git a/.github/actions/github-release/Dockerfile b/.github/actions/github-release/Dockerfile deleted file mode 100644 index 5849eac7d2..0000000000 --- a/.github/actions/github-release/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM node:slim - -COPY . /action -WORKDIR /action - -RUN npm install --production - -ENTRYPOINT ["node", "/action/main.js"] diff --git a/.github/actions/github-release/action.yml b/.github/actions/github-release/action.yml index 51a074adfa..3225a91dbb 100644 --- a/.github/actions/github-release/action.yml +++ b/.github/actions/github-release/action.yml @@ -4,12 +4,9 @@ inputs: token: description: '' required: true - name: - description: '' - required: true files: description: '' required: true runs: - using: 'docker' - image: 'Dockerfile' + using: 'node12' + main: 'main.js' diff --git a/.github/actions/github-release/main.js b/.github/actions/github-release/main.js index 567fe3967f..82374c9948 100644 --- a/.github/actions/github-release/main.js +++ b/.github/actions/github-release/main.js @@ -11,12 +11,15 @@ function sleep(milliseconds) { async function runOnce() { // Load all our inputs and env vars. Note that `getInput` reads from `INPUT_*` const files = core.getInput('files'); - const name = core.getInput('name'); const token = core.getInput('token'); const slug = process.env.GITHUB_REPOSITORY; const owner = slug.split('/')[0]; const repo = slug.split('/')[1]; const sha = process.env.GITHUB_SHA; + let name = 'dev'; + if (process.env.GITHUB_REF.startsWith('refs/tags/v')) { + name = process.env.GITHUB_REF.substring(10); + } core.info(`files: ${files}`); core.info(`name: ${name}`); @@ -24,53 +27,88 @@ async function runOnce() { const octokit = new github.GitHub(token); - // Delete the previous release since we can't overwrite one. This may happen - // due to retrying an upload or it may happen because we're doing the dev - // release. - const releases = await octokit.paginate("GET /repos/:owner/:repo/releases", { owner, repo }); - for (const release of releases) { - if (release.tag_name !== name) { - continue; + // For the `dev` release we may need to update the tag to point to the new + // commit on this branch. All other names should already have tags associated + // with them. + if (name == 'dev') { + let tag = null; + try { + tag = await octokit.request("GET /repos/:owner/:repo/git/refs/tags/:name", { owner, repo, name }); + core.info(`found existing tag`); + console.log("tag: ", JSON.stringify(tag.data, null, 2)); + } catch (e) { + // ignore if this tag doesn't exist + core.info(`no existing tag found`); + } + + if (tag === null || tag.data.object.sha !== sha) { + core.info(`updating existing tag or creating new one`); + // Delete the previous release for this tag, if any + try { + core.info(`fetching release for ${name}`); + const release = await octokit.repos.getReleaseByTag({ owner, repo, tag: name }); + core.info(`deleting release ${release.data.id}`); + await octokit.repos.deleteRelease({ owner, repo, release_id: release.data.id }); + } catch (e) { + // ignore, there may not have been a release + console.log("ERROR: ", JSON.stringify(e, null, 2)); + } + + try { + core.info(`updating dev tag`); + await octokit.git.updateRef({ + owner, + repo, + ref: 'tags/dev', + sha, + force: true, + }); + } catch (e) { + console.log("ERROR: ", JSON.stringify(e, null, 2)); + core.info(`creating dev tag`); + try { + await octokit.git.createTag({ + owner, + repo, + tag: 'dev', + message: 'dev release', + object: sha, + type: 'commit', + }); + } catch (e) { + // we might race with others, so assume someone else has created the + // tag by this point. + } + } + } else { + core.info(`existing tag works`); } - const release_id = release.id; - core.info(`deleting release ${release_id}`); - await octokit.repos.deleteRelease({ owner, repo, release_id }); } - // We also need to update the `dev` tag while we're at it on the `dev` branch. - if (name == 'dev') { + // Try to load the release for this tag, and if it doesn't exist then make a + // new one. We might race with other builders on creation, though, so if the + // creation fails try again to get the release by the tag. + let release = null; + try { + core.info(`fetching release`); + release = await octokit.repos.getReleaseByTag({ owner, repo, tag: name }); + } catch (e) { + console.log("ERROR: ", JSON.stringify(e, null, 2)); + core.info(`creating a release`); try { - core.info(`updating dev tag`); - await octokit.git.updateRef({ - owner, - repo, - ref: 'tags/dev', - sha, - force: true, - }); - } catch (e) { - console.log("ERROR: ", JSON.stringify(e, null, 2)); - core.info(`creating dev tag`); - await octokit.git.createTag({ + release = await octokit.repos.createRelease({ owner, repo, - tag: 'dev', - message: 'dev release', - object: sha, - type: 'commit', + tag_name: name, + prerelease: name === 'dev', }); + } catch(e) { + console.log("ERROR: ", JSON.stringify(e, null, 2)); + core.info(`fetching one more time`); + release = await octokit.repos.getReleaseByTag({ owner, repo, tag: name }); } } - - // Creates an official GitHub release for this `tag`, and if this is `dev` - // then we know that from the previous block this should be a fresh release. - core.info(`creating a release`); - const release = await octokit.repos.createRelease({ - owner, - repo, - tag_name: name, - prerelease: name === 'dev', - }); + console.log("found release: ", JSON.stringify(release.data, null, 2)); // Upload all the relevant assets for this release as just general blobs. for (const file of glob.sync(files)) { diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6af34422c7..f6a3505e3e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -394,6 +394,8 @@ jobs: build: name: Build wasmtime runs-on: ${{ matrix.os }} + env: + QEMU_VERSION: 5.0.0 strategy: matrix: include: @@ -434,30 +436,42 @@ jobs: rustup target add ${{ matrix.target }} if: matrix.target != '' + - uses: actions/cache@v2 + with: + path: ${{ runner.tool_cache }}/qemu + key: qemu-${{ matrix.target }}-${{ env.QEMU_VERSION }} + if: matrix.target != '' && matrix.os == 'ubuntu-latest' + - name: Install cross-compilation tools run: | set -ex sudo apt-get update sudo apt-get install -y ${{ matrix.gcc_package }} + # Configure Cargo for cross compilation and tell it how it can run + # cross executables + upcase=$(echo ${{ matrix.target }} | awk '{ print toupper($0) }' | sed 's/-/_/g') + echo CARGO_TARGET_${upcase}_RUNNER=${{ runner.tool_cache }}/qemu/bin/${{ matrix.qemu }} >> $GITHUB_ENV + echo CARGO_TARGET_${upcase}_LINKER=${{ matrix.gcc }} >> $GITHUB_ENV + + # See comments in the source for why we enable this during QEMU + # emulation. + echo WASMTIME_TEST_NO_HOG_MEMORY=1 >> $GITHUB_ENV + + # See if qemu is already in the cache + if [ -f ${{ runner.tool_cache }}/qemu/built ]; then + exit 0 + fi + # Download and build qemu from source since the most recent release is # way faster at arm emulation than the current version github actions' # ubuntu image uses. Disable as much as we can to get it to build # quickly. curl https://download.qemu.org/qemu-5.0.0.tar.xz | tar xJf - cd qemu-5.0.0 - ./configure --target-list=${{ matrix.qemu_target }} --prefix=$HOME/qemu --disable-tools --disable-slirp --disable-fdt --disable-capstone --disable-docs + ./configure --target-list=${{ matrix.qemu_target }} --prefix=${{ runner.tool_cache}}/qemu --disable-tools --disable-slirp --disable-fdt --disable-capstone --disable-docs make -j$(nproc) install - - # Configure Cargo for cross compilation and tell it how it can run - # cross executables - upcase=$(echo ${{ matrix.target }} | awk '{ print toupper($0) }' | sed 's/-/_/g') - echo CARGO_TARGET_${upcase}_RUNNER=$HOME/qemu/bin/${{ matrix.qemu }} >> $GITHUB_ENV - echo CARGO_TARGET_${upcase}_LINKER=${{ matrix.gcc }} >> $GITHUB_ENV - - # See comments in the source for why we enable this during QEMU - # emulation. - echo WASMTIME_TEST_NO_HOG_MEMORY=1 >> $GITHUB_ENV + touch ${{ runner.tool_cache }}/qemu/built if: matrix.target != '' && matrix.os == 'ubuntu-latest' # Install wasm32-wasi target in order to build wasi-common's integration @@ -487,122 +501,23 @@ jobs: env: RUST_BACKTRACE: 1 - # Postprocess the macOS dylib a bit to have a more reasonable `LC_ID_DYLIB` - # directive than the default one that comes out of the linker when typically - # doing `cargo build`. For more info see #984 - - run: install_name_tool -id "@rpath/libwasmtime.dylib" target/release/libwasmtime.dylib - if: matrix.os == 'macos-latest' - - # ... and now perform some goop to move all the relevant artifacts into - # something that we'll upload from this action. - - - run: mkdir dist - - # Move binaries to dist folder - - run: cp target/release/wasmtime dist - if: matrix.os != 'windows-latest' && matrix.target == '' - - run: cp target/${{ matrix.target }}/release/wasmtime dist - if: matrix.os != 'windows-latest' && matrix.target != '' - - run: cp target/release/wasmtime.exe dist - if: matrix.build == 'x86_64-windows' - - run: cp target/x86_64-pc-windows-gnu/release/wasmtime.exe dist - if: matrix.build == 'x86_64-mingw' - - # Move libwasmtime dylib to dist folder - - run: cp target/release/libwasmtime.{so,a} dist - if: matrix.os == 'ubuntu-latest' && matrix.target == '' - - run: cp target/${{ matrix.target }}/release/libwasmtime.{so,a} dist - if: matrix.os == 'ubuntu-latest' && matrix.target != '' - - run: cp target/release/libwasmtime.{dylib,a} dist - if: matrix.os == 'macos-latest' - - run: cp target/release/wasmtime.{dll,lib,dll.lib} dist - if: matrix.build == 'x86_64-windows' - - run: cp target/x86_64-pc-windows-gnu/release/{wasmtime.dll,libwasmtime.a} dist - if: matrix.build == 'x86_64-mingw' - - # Make a Windows MSI installer if we're on Windows - - run: | - export WT_VERSION=`cat Cargo.toml | sed -n 's/^version = "\([^"]*\)".*/\1/p'` - "$WIX/bin/candle" -arch x64 -out target/wasmtime.wixobj ci/wasmtime.wxs - "$WIX/bin/light" -out dist/installer.msi target/wasmtime.wixobj -ext WixUtilExtension - rm dist/installer.wixpdb - if: matrix.build == 'x86_64-windows' - + # Assemble release artifats appropriate for this platform, then upload them + # unconditionally to this workflow's files so we have a copy of them. + - run: ./ci/build-tarballs.sh "${{ matrix.build }}" "${{ matrix.target }}" - uses: actions/upload-artifact@v1 with: name: bins-${{ matrix.build }} path: dist - # Consumes all published artifacts from all the previous build steps, creates - # a bunch of tarballs for all of them, and then publishes the tarballs - # themselves as an artifact (for inspection) and then optionally creates - # github releases and/or tags for pushes. - publish: - name: Publish - needs: build - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - with: - submodules: true - - run: rustup update stable && rustup default stable - - - name: Download x86_64 macOS binaries - uses: actions/download-artifact@v1 - with: - name: bins-x86_64-macos - - name: Download x86_64 Linux binaries - uses: actions/download-artifact@v1 - with: - name: bins-x86_64-linux - - name: Download AArch64 Linux binaries - uses: actions/download-artifact@v1 - with: - name: bins-aarch64-linux - - name: Download x86_64 Windows binaries - uses: actions/download-artifact@v1 - with: - name: bins-x86_64-windows - - name: Download x86_64 Windows MinGW binaries - uses: actions/download-artifact@v1 - with: - name: bins-x86_64-mingw - - - name: Calculate tag name - run: | - name=dev - if [[ $GITHUB_REF == refs/tags/v* ]]; then - name=${GITHUB_REF:10} - fi - echo ::set-output name=val::$name - echo TAG=$name >> $GITHUB_ENV - id: tagname - - # Assemble all the build artifacts into tarballs and zip archives. - - name: Assemble tarballs - run: | - ./ci/build-tarballs.sh x86_64-linux - ./ci/build-tarballs.sh x86_64-windows .exe - ./ci/build-tarballs.sh x86_64-mingw .exe - ./ci/build-tarballs.sh x86_64-macos - ./ci/build-tarballs.sh aarch64-linux - - # Upload all assembled tarballs as an artifact of the github action run, so - # that way even PRs can inspect the output. - - uses: actions/upload-artifact@v1 - with: - name: tarballs - path: dist - # ... and if this was an actual push (tag or `main`) then we publish a # new release. This'll automatically publish a tag release or update `dev` # with this `sha` + - run: cd .github/actions/github-release && npm install --production - name: Publish Release uses: ./.github/actions/github-release if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')) with: files: "dist/*" - name: ${{ steps.tagname.outputs.val }} token: ${{ secrets.GITHUB_TOKEN }} cargo-audit: diff --git a/ci/build-tarballs.sh b/ci/build-tarballs.sh index 117f38d38e..c3f73af635 100755 --- a/ci/build-tarballs.sh +++ b/ci/build-tarballs.sh @@ -1,56 +1,86 @@ #!/bin/bash -# A small shell script invoked from CI on the final Linux builder which actually -# assembles the release artifacts for a particular platform. This will take the -# binary artifacts of previous builders and create associated tarballs to -# publish to GitHub. +# A small script used for assembling release tarballs for both the `wasmtime` +# binary and the C API. This is executed with two arguments, mostly coming from +# the CI matrix. # -# The first argument of this is the "platform" name to put into the tarball, and -# the second argument is the name of the github actions platform which is where -# we source binaries from. The final third argument is ".exe" on Windows to -# handle executable extensions right. +# * The first argument is the name of the platform, used to name the release +# * The second argument is the "target", if present, currently only for +# cross-compiles # -# Usage: build-tarballs.sh PLATFORM [.exe] - -# where PLATFORM is e.g. x86_64-linux, aarch64-linux, ... +# This expects the build to already be done and will assemble release artifacts +# in `dist/` set -ex platform=$1 -exe=$2 +target=$2 rm -rf tmp mkdir tmp mkdir -p dist -mktarball() { - dir=$1 - if [ "$exe" = "" ]; then - tar cJf dist/$dir.tar.xz -C tmp $dir - else - (cd tmp && zip -r ../dist/$dir.zip $dir) - fi -} - -# Create the main tarball of binaries -bin_pkgname=wasmtime-$TAG-$platform -mkdir tmp/$bin_pkgname -cp LICENSE README.md tmp/$bin_pkgname -mv bins-$platform/wasmtime$exe tmp/$bin_pkgname -chmod +x tmp/$bin_pkgname/wasmtime$exe -mktarball $bin_pkgname - -if [ -f bins-$platform/installer.msi ]; then - mv bins-$platform/installer.msi dist/$bin_pkgname.msi +tag=dev +if [[ $GITHUB_REF == refs/tags/v* ]]; then + tag=${GITHUB_REF:10} fi -# Create tarball of API libraries -api_pkgname=wasmtime-$TAG-$platform-c-api +bin_pkgname=wasmtime-$tag-$platform +api_pkgname=wasmtime-$tag-$platform-c-api + mkdir tmp/$api_pkgname mkdir tmp/$api_pkgname/lib mkdir tmp/$api_pkgname/include +mkdir tmp/$bin_pkgname cp LICENSE README.md tmp/$api_pkgname -mv bins-$platform/* tmp/$api_pkgname/lib +cp LICENSE README.md tmp/$bin_pkgname cp crates/c-api/wasm-c-api/include/wasm.h tmp/$api_pkgname/include cp crates/c-api/include/{wasmtime,wasi}.h tmp/$api_pkgname/include + +fmt=tar +if [ "$platform" = "x86_64-windows" ]; then + cp target/release/wasmtime.exe tmp/$bin_pkgname + cp target/release/{wasmtime.dll,wasmtime.lib,wasmtime.dll.lib} tmp/$api_pkgname/lib + fmt=zip + + # Generate a `*.msi` installer for Windows as well + export WT_VERSION=`cat Cargo.toml | sed -n 's/^version = "\([^"]*\)".*/\1/p'` + "$WIX/bin/candle" -arch x64 -out target/wasmtime.wixobj ci/wasmtime.wxs + "$WIX/bin/light" -out dist/$bin_pkgname.msi target/wasmtime.wixobj -ext WixUtilExtension + rm dist/$bin_pkgname.wixpdb +elif [ "$platform" = "x86_64-mingw" ]; then + cp target/x86_64-pc-windows-gnu/release/wasmtime.exe tmp/$bin_pkgname + cp target/x86_64-pc-windows-gnu/release/{wasmtime.dll,libwasmtime.a} tmp/$api_pkgname/lib + fmt=zip +elif [ "$platform" = "x86_64-macos" ]; then + # Postprocess the macOS dylib a bit to have a more reasonable `LC_ID_DYLIB` + # directive than the default one that comes out of the linker when typically + # doing `cargo build`. For more info see #984 + install_name_tool -id "@rpath/libwasmtime.dylib" target/release/libwasmtime.dylib + cp target/release/wasmtime tmp/$bin_pkgname + cp target/release/libwasmtime.{a,dylib} tmp/$api_pkgname/lib +elif [ "$target" = "" ]; then + cp target/release/wasmtime tmp/$bin_pkgname + cp target/release/libwasmtime.{a,so} tmp/$api_pkgname/lib +else + cp target/$target/release/wasmtime tmp/$bin_pkgname + cp target/$target/release/libwasmtime.{a,so} tmp/$api_pkgname/lib +fi + + +mktarball() { + dir=$1 + if [ "$fmt" = "tar" ]; then + # this is a bit wonky, but the goal is to use `xz` with threaded compression + # to ideally get better performance with the `-T0` flag. + tar -cvf - -C tmp $dir | xz -9 -T0 > dist/$dir.tar.xz + else + # Note that this runs on Windows, and it looks like GitHub Actions doesn't + # have a `zip` tool there, so we use powershell + (cd tmp && powershell Compress-Archive $dir $dir.zip) + mv tmp/$dir.zip dist + fi +} + mktarball $api_pkgname +mktarball $bin_pkgname