Add Octave input request #199
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Kernel Conformance | |
| on: | |
| push: | |
| branches: [main] | |
| pull_request: | |
| branches: [main] | |
| permissions: | |
| contents: write | |
| pull-requests: write | |
| concurrency: | |
| group: ${{ github.workflow }}-${{ github.ref }} | |
| cancel-in-progress: true | |
| jobs: | |
| test-kernel: | |
| name: Test ${{ matrix.kernel.name }} | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 15 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| kernel: | |
| - name: ipykernel | |
| install: pip install ipykernel && python -m ipykernel install --user | |
| kernel-name: python3 | |
| - name: async-kernel | |
| install: | | |
| pip install async-kernel | |
| async-kernel -a async | |
| mkdir -p ~/.local/share/jupyter/kernels | |
| cp -r $pythonLocation/share/jupyter/kernels/async ~/.local/share/jupyter/kernels/ | |
| kernel-name: async | |
| - name: evcxr | |
| install: | | |
| if [ -f ~/.cargo/bin/evcxr_jupyter ]; then | |
| echo "evcxr_jupyter found in cache" | |
| else | |
| cargo install --locked evcxr_jupyter | |
| fi | |
| evcxr_jupyter --install | |
| kernel-name: rust | |
| - name: deno | |
| install: | | |
| curl -fsSL https://deno.land/install.sh | sh | |
| echo "$HOME/.deno/bin" >> $GITHUB_PATH | |
| $HOME/.deno/bin/deno jupyter --install | |
| kernel-name: deno | |
| - name: ark | |
| install: | | |
| # Download Ark binary from GitHub releases | |
| curl -fsSL -o ark.zip https://github.com/posit-dev/ark/releases/download/0.1.238/ark-0.1.238-linux-x64.zip | |
| unzip -o ark.zip | |
| chmod +x ark | |
| sudo mv ark /usr/local/bin/ | |
| # Install kernel | |
| ark --install | |
| kernel-name: ark | |
| - name: almond | |
| install: | | |
| curl -fLo cs https://github.com/coursier/coursier/releases/latest/download/coursier | |
| chmod +x cs | |
| ./cs launch --use-bootstrap almond -- --install | |
| rm -f cs | |
| kernel-name: scala | |
| - name: ijulia | |
| install: julia -e 'using Pkg; Pkg.add("IJulia"); using IJulia' | |
| kernel-name: julia-1.11 | |
| - name: xeus-cling | |
| install: micromamba install -y xeus-cling -c conda-forge | |
| kernel-name: xcpp17 | |
| - name: xeus-sql | |
| # Pin to 0.3.1 which includes fix for missing status in kernel_info_reply | |
| install: micromamba install -y "xeus-sql>=0.3.1" -c conda-forge | |
| kernel-name: xsql | |
| - name: gonb | |
| install: | | |
| go install github.com/janpfeifer/gonb@latest | |
| go install golang.org/x/tools/cmd/goimports@latest | |
| go install golang.org/x/tools/gopls@latest | |
| echo "$HOME/go/bin" >> $GITHUB_PATH | |
| $HOME/go/bin/gonb --install | |
| kernel-name: gonb | |
| - name: xeus-python | |
| install: micromamba install -y xeus-python -c conda-forge | |
| kernel-name: xpython | |
| - name: xeus-cpp | |
| install: micromamba install -y xeus-cpp -c conda-forge | |
| kernel-name: xcpp20 | |
| - name: xeus-sqlite | |
| install: micromamba install -y xeus-sqlite -c conda-forge | |
| kernel-name: xsqlite | |
| - name: xeus-r | |
| install: micromamba install -y xeus-r -c conda-forge | |
| kernel-name: xr | |
| - name: xeus-lua | |
| install: micromamba install -y xeus-lua -c conda-forge | |
| kernel-name: xlua | |
| - name: xeus-haskell | |
| install: | | |
| curl -fsSL https://pixi.sh/install.sh | bash | |
| export PATH="$HOME/.pixi/bin:$PATH" | |
| git clone https://github.com/jupyter-xeus/xeus-haskell /tmp/xeus-haskell | |
| cd /tmp/xeus-haskell | |
| pixi run -e default prebuild | |
| pixi run -e default build | |
| pixi run -e default install | |
| # Copy kernelspec from Pixi env to user Jupyter directory | |
| mkdir -p ~/.local/share/jupyter/kernels | |
| find /tmp/xeus-haskell -path "*share/jupyter/kernels/xhaskell" -exec cp -r {} ~/.local/share/jupyter/kernels/ \; 2>/dev/null || true | |
| ls -la ~/.local/share/jupyter/kernels/ || true | |
| kernel-name: xhaskell | |
| - name: xeus-octave | |
| install: micromamba install -y xeus-octave -c conda-forge | |
| kernel-name: xoctave | |
| - name: ocaml-jupyter | |
| install: | | |
| eval $(opam env) | |
| opam install -y jupyter | |
| echo '#use "topfind";;' >> ~/.ocamlinit | |
| ocaml-jupyter-opam-genspec | |
| mkdir -p ~/.local/share/jupyter/kernels/ocaml-jupyter | |
| cp -r "$(opam var share)/jupyter"/* ~/.local/share/jupyter/kernels/ocaml-jupyter/ | |
| kernel-name: ocaml-jupyter | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Install Rust | |
| uses: dtolnay/rust-toolchain@stable | |
| - name: Set up Python | |
| if: matrix.kernel.name == 'ipykernel' || matrix.kernel.name == 'ark' || matrix.kernel.name == 'async-kernel' | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: "3.12" | |
| - name: Set up R | |
| if: matrix.kernel.name == 'ark' | |
| uses: r-lib/actions/setup-r@v2 | |
| with: | |
| r-version: "4.4" | |
| - name: Set up Java | |
| if: matrix.kernel.name == 'almond' | |
| uses: actions/setup-java@v4 | |
| with: | |
| distribution: 'temurin' | |
| java-version: '17' | |
| - name: Set up Julia | |
| if: matrix.kernel.name == 'ijulia' | |
| uses: julia-actions/setup-julia@v2 | |
| with: | |
| version: '1.11' | |
| - name: Set up Go | |
| if: matrix.kernel.name == 'gonb' | |
| uses: actions/setup-go@v5 | |
| with: | |
| go-version: '1.22' | |
| - name: Set up Mamba | |
| if: contains(fromJson('["xeus-cling", "xeus-sql", "xeus-python", "xeus-cpp", "xeus-sqlite", "xeus-r", "xeus-lua", "xeus-octave"]'), matrix.kernel.name) | |
| uses: mamba-org/setup-micromamba@v2 | |
| with: | |
| micromamba-version: 'latest' | |
| environment-name: kernel | |
| init-shell: bash | |
| create-args: >- | |
| python=3.12 | |
| jupyter_client | |
| - name: Set up OCaml | |
| if: matrix.kernel.name == 'ocaml-jupyter' | |
| uses: ocaml/setup-ocaml@v3 | |
| with: | |
| ocaml-compiler: '5.2' | |
| - name: Install OCaml dependencies | |
| if: matrix.kernel.name == 'ocaml-jupyter' | |
| run: | | |
| sudo apt-get update | |
| sudo apt-get install -y libzmq5-dev | |
| - name: Install jupyter for Ark | |
| if: matrix.kernel.name == 'ark' | |
| run: python -m pip install jupyter | |
| - name: Cache cargo (test suite) | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| ~/.cargo/registry/ | |
| ~/.cargo/git/ | |
| target/ | |
| key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | |
| - name: Cache evcxr | |
| if: matrix.kernel.name == 'evcxr' | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| ~/.cargo/bin/evcxr* | |
| ~/.cargo/registry/ | |
| ~/.cargo/git/ | |
| key: ${{ runner.os }}-evcxr-0.21.1 | |
| restore-keys: | | |
| ${{ runner.os }}-evcxr- | |
| - name: Install kernel | |
| run: ${{ matrix.kernel.install }} | |
| - name: Copy conda kernelspecs to user directory | |
| if: contains(fromJson('["xeus-cling", "xeus-sql", "xeus-python", "xeus-cpp", "xeus-sqlite", "xeus-r", "xeus-lua", "xeus-octave"]'), matrix.kernel.name) | |
| run: | | |
| echo "Looking for kernelspecs..." | |
| find $MAMBA_ROOT_PREFIX -name "kernel.json" 2>/dev/null || true | |
| find /home/runner -path "*/jupyter/kernels/*/kernel.json" 2>/dev/null || true | |
| mkdir -p ~/.local/share/jupyter/kernels | |
| # Copy from wherever the kernels are installed | |
| if [ -d "$MAMBA_ROOT_PREFIX/envs/kernel/share/jupyter/kernels" ]; then | |
| cp -r $MAMBA_ROOT_PREFIX/envs/kernel/share/jupyter/kernels/* ~/.local/share/jupyter/kernels/ | |
| elif [ -d "$MAMBA_ROOT_PREFIX/share/jupyter/kernels" ]; then | |
| cp -r $MAMBA_ROOT_PREFIX/share/jupyter/kernels/* ~/.local/share/jupyter/kernels/ | |
| fi | |
| ls -la ~/.local/share/jupyter/kernels/ || true | |
| - name: Install Xvfb for xeus-octave | |
| if: matrix.kernel.name == 'xeus-octave' | |
| run: sudo apt-get update && sudo apt-get install -y xvfb | |
| - name: Configure Octave environment | |
| if: matrix.kernel.name == 'xeus-octave' | |
| run: | | |
| # xeus-octave needs OCTAVE_HOME so Octave can find its .m scripts | |
| # (e.g., graphics_toolkit). The conda prefix contains all Octave files. | |
| echo "OCTAVE_HOME=$MAMBA_ROOT_PREFIX" >> $GITHUB_ENV | |
| echo "OCTAVE_EXEC_PREFIX=$MAMBA_ROOT_PREFIX" >> $GITHUB_ENV | |
| - name: Build test suite | |
| run: cargo build --release | |
| - name: List available kernels | |
| run: ./target/release/jupyter-kernel-test --list-kernels | |
| - name: Run conformance tests | |
| run: | | |
| if [ "${{ matrix.kernel.name }}" = "xeus-octave" ]; then | |
| xvfb-run -a ./target/release/jupyter-kernel-test ${{ matrix.kernel.kernel-name }} \ | |
| --timeout 30000 \ | |
| --format json \ | |
| --output ${{ matrix.kernel.name }}-report.json | |
| else | |
| ./target/release/jupyter-kernel-test ${{ matrix.kernel.kernel-name }} \ | |
| --format json \ | |
| --output ${{ matrix.kernel.name }}-report.json | |
| fi | |
| continue-on-error: true | |
| - name: Display results | |
| run: | | |
| if [ "${{ matrix.kernel.name }}" = "xeus-octave" ]; then | |
| xvfb-run -a ./target/release/jupyter-kernel-test ${{ matrix.kernel.kernel-name }} --timeout 30000 || true | |
| else | |
| ./target/release/jupyter-kernel-test ${{ matrix.kernel.kernel-name }} || true | |
| fi | |
| - name: Upload report | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: report-${{ matrix.kernel.name }} | |
| path: ${{ matrix.kernel.name }}-report.json | |
| if-no-files-found: warn | |
| conformance-matrix: | |
| name: Conformance Matrix | |
| needs: test-kernel | |
| runs-on: ubuntu-latest | |
| if: always() | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Download all reports | |
| uses: actions/download-artifact@v4 | |
| with: | |
| path: reports | |
| pattern: report-* | |
| merge-multiple: true | |
| - name: Generate detailed report | |
| run: | | |
| { | |
| echo "## Kernel Conformance Matrix" | |
| echo "" | |
| echo "| Kernel | Protocol | Tier 1 | Tier 2 | Tier 3 | Tier 4 | Total |" | |
| echo "|--------|----------|--------|--------|--------|--------|-------|" | |
| for report in reports/*-report.json; do | |
| if [ -f "$report" ]; then | |
| kernel=$(basename "$report" -report.json) | |
| protocol=$(jq -r '.protocol_version // "?"' "$report" 2>/dev/null || echo "?") | |
| tier1=$(jq '[.results[] | select(.category == "tier1_basic") | select(.result.status == "pass")] | length' "$report" 2>/dev/null || echo "?") | |
| tier1_total=$(jq '[.results[] | select(.category == "tier1_basic")] | length' "$report" 2>/dev/null || echo "?") | |
| tier2=$(jq '[.results[] | select(.category == "tier2_interactive") | select(.result.status == "pass")] | length' "$report" 2>/dev/null || echo "?") | |
| tier2_total=$(jq '[.results[] | select(.category == "tier2_interactive")] | length' "$report" 2>/dev/null || echo "?") | |
| tier3=$(jq '[.results[] | select(.category == "tier3_rich_output") | select(.result.status == "pass")] | length' "$report" 2>/dev/null || echo "?") | |
| tier3_total=$(jq '[.results[] | select(.category == "tier3_rich_output")] | length' "$report" 2>/dev/null || echo "?") | |
| tier4=$(jq '[.results[] | select(.category == "tier4_advanced") | select(.result.status == "pass")] | length' "$report" 2>/dev/null || echo "?") | |
| tier4_total=$(jq '[.results[] | select(.category == "tier4_advanced")] | length' "$report" 2>/dev/null || echo "?") | |
| total=$(jq '[.results[] | select(.result.status == "pass")] | length' "$report" 2>/dev/null || echo "?") | |
| total_tests=$(jq '.results | length' "$report" 2>/dev/null || echo "?") | |
| echo "| $kernel | $protocol | $tier1/$tier1_total | $tier2/$tier2_total | $tier3/$tier3_total | $tier4/$tier4_total | $total/$total_tests |" | |
| fi | |
| done | |
| echo "" | |
| echo "---" | |
| echo "" | |
| # Detailed failure breakdown per kernel | |
| for report in reports/*-report.json; do | |
| if [ -f "$report" ]; then | |
| kernel=$(basename "$report" -report.json) | |
| impl=$(jq -r '.implementation // "unknown"' "$report" 2>/dev/null) | |
| lang=$(jq -r '.language // "unknown"' "$report" 2>/dev/null) | |
| protocol=$(jq -r '.protocol_version // "unknown"' "$report" 2>/dev/null) | |
| failures=$(jq '[.results[] | select(.result.status != "pass" and .result.status != "unsupported")] | length' "$report" 2>/dev/null || echo "0") | |
| skipped=$(jq '[.results[] | select(.result.status == "unsupported")] | length' "$report" 2>/dev/null || echo "0") | |
| if [ "$failures" != "0" ] || [ "$skipped" != "0" ]; then | |
| echo "<details>" | |
| echo "<summary><strong>$kernel</strong> ($impl) - $failures failures, $skipped skipped</summary>" | |
| echo "" | |
| echo "**Language:** $lang | **Protocol:** $protocol" | |
| echo "" | |
| if [ "$failures" != "0" ]; then | |
| echo "#### Failures" | |
| echo "" | |
| echo "| Test | Message Type | Likely Source | Reason |" | |
| echo "|------|--------------|---------------|--------|" | |
| jq -r '.results[] | select(.result.status == "fail" or .result.status == "timeout") | "| \(.name) | `\(.message_type)` | \(.result.kind // "unknown") | \(.result.reason // "timeout") |"' "$report" 2>/dev/null | |
| echo "" | |
| fi | |
| if [ "$skipped" != "0" ]; then | |
| echo "#### Skipped (Not Implemented)" | |
| echo "" | |
| jq -r '.results[] | select(.result.status == "unsupported") | "- **\(.name)**: \(.description)"' "$report" 2>/dev/null | |
| echo "" | |
| fi | |
| echo "</details>" | |
| echo "" | |
| fi | |
| fi | |
| done | |
| # Cross-kernel comparison for failing tests | |
| echo "---" | |
| echo "" | |
| echo "### Test Results by Message Type" | |
| echo "" | |
| echo "| Test | Message Type |$(for r in reports/*-report.json; do kernel=$(basename "$r" -report.json); echo -n " $kernel |"; done)" | |
| echo "|------|--------------|$(for r in reports/*-report.json; do echo -n "--------|"; done)" | |
| # Get all unique test names | |
| all_tests=$(jq -r '.results[].name' reports/*-report.json 2>/dev/null | sort -u) | |
| for test in $all_tests; do | |
| msg_type=$(jq -r --arg t "$test" '.results[] | select(.name == $t) | .message_type' reports/*-report.json 2>/dev/null | head -1) | |
| echo -n "| $test | \`$msg_type\` |" | |
| for report in reports/*-report.json; do | |
| if [ -f "$report" ]; then | |
| status=$(jq -r --arg t "$test" '.results[] | select(.name == $t) | .result.status' "$report" 2>/dev/null) | |
| case "$status" in | |
| pass) echo -n " ✅ |" ;; | |
| fail) echo -n " ❌ |" ;; | |
| unsupported) echo -n " ⏭️ |" ;; | |
| timeout) echo -n " ⏱️ |" ;; | |
| partial_pass) echo -n " ⚠️ |" ;; | |
| *) echo -n " - |" ;; | |
| esac | |
| fi | |
| done | |
| echo "" | |
| done | |
| } | tee -a $GITHUB_STEP_SUMMARY > summary.md | |
| - name: Comment on PR | |
| if: github.event_name == 'pull_request' | |
| env: | |
| GH_TOKEN: ${{ github.token }} | |
| run: gh pr comment ${{ github.event.pull_request.number }} --body-file summary.md | |
| - name: Upload combined reports | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: conformance-reports | |
| path: reports/ | |
| if-no-files-found: warn | |
| create-release: | |
| name: Create Release | |
| needs: conformance-matrix | |
| runs-on: ubuntu-latest | |
| if: github.event_name == 'push' && github.ref == 'refs/heads/main' | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Download all reports | |
| uses: actions/download-artifact@v4 | |
| with: | |
| path: reports | |
| pattern: report-* | |
| merge-multiple: true | |
| - name: Bundle reports into matrix | |
| run: | | |
| node -e " | |
| const fs = require('fs'); | |
| const path = require('path'); | |
| const reportsDir = 'reports'; | |
| const reports = fs.readdirSync(reportsDir) | |
| .filter(f => f.endsWith('.json')) | |
| .map(f => { | |
| try { | |
| return JSON.parse(fs.readFileSync(path.join(reportsDir, f), 'utf8')); | |
| } catch (e) { | |
| console.error('Failed to parse', f, e); | |
| return null; | |
| } | |
| }) | |
| .filter(Boolean); | |
| const matrix = { | |
| generated_at: new Date().toISOString(), | |
| commit_sha: process.env.GITHUB_SHA, | |
| reports | |
| }; | |
| fs.writeFileSync('conformance-matrix.json', JSON.stringify(matrix, null, 2)); | |
| console.log('Created conformance-matrix.json with', reports.length, 'reports'); | |
| " | |
| - name: Create Release | |
| env: | |
| GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| DATE=$(date +%Y-%m-%d) | |
| TAG="conformance-${DATE}-${{ github.run_number }}" | |
| gh release create "$TAG" \ | |
| --title "Conformance Report $DATE" \ | |
| --notes "Automated conformance test results from commit ${{ github.sha }}" \ | |
| conformance-matrix.json |