Skip to content

submission_mutex removed #28

submission_mutex removed

submission_mutex removed #28

Workflow file for this run

name: NerdMiner Test Suite
on:
push:
branches: [ main, develop, prerelease ]
pull_request:
branches: [ main, develop ]
jobs:
native-tests:
name: Native Tests
runs-on: ubuntu-latest
strategy:
matrix:
test-suite:
- native_test
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Cache PlatformIO
uses: actions/cache@v4
with:
path: |
~/.cache/pip
~/.platformio/.cache
key: ${{ runner.os }}-pio-${{ hashFiles('**/platformio.ini') }}
restore-keys: |
${{ runner.os }}-pio-
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install PlatformIO Core
run: pip install --upgrade platformio
- name: Install project dependencies
run: pio pkg install
- name: Run All Native Tests
run: |
echo "==========================================="
echo "Running Native Test Suite"
echo "==========================================="
pio test -e native_test --verbose
- name: Run SHA256 Tests
run: |
echo "Running SHA256 algorithm tests..."
pio test -e native_test -f "*sha256*" --verbose || true
- name: Run Stratum Protocol Tests
run: |
echo "Running Stratum protocol tests..."
pio test -e native_test -f "*stratum*" --verbose || true
- name: Run Mining Integration Tests
run: |
echo "Running mining integration tests..."
pio test -e native_test -f "*mining*" --verbose || true
- name: Generate Test Coverage Report
run: |
echo "Generating comprehensive test report..."
pio test -e native_test --json-output-path native_test_results.json || true
if [ -f native_test_results.json ]; then
echo "Test results summary:"
cat native_test_results.json | jq '.' || cat native_test_results.json
fi
- name: Upload Native Test Results
uses: actions/upload-artifact@v4
if: always()
with:
name: native-test-results-${{ github.sha }}
path: |
native_test_results.json
.pio/build/native_test/
retention-days: 30
embedded-tests:
name: Embedded Tests (ESP32-2432S028R)
runs-on: ubuntu-latest
needs: native-tests
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Cache PlatformIO
uses: actions/cache@v4
with:
path: |
~/.cache/pip
~/.platformio/.cache
key: ${{ runner.os }}-pio-embedded-${{ hashFiles('**/platformio.ini') }}
restore-keys: |
${{ runner.os }}-pio-embedded-
${{ runner.os }}-pio-
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install PlatformIO Core
run: pip install --upgrade platformio
- name: Install project dependencies
run: pio pkg install
- name: Build ESP32-2432S028R Test Environment
run: |
echo "Building ESP32-2432S028R test environment..."
pio run -e ESP32-2432S028R-test --verbose
- name: Check Test Compilation
run: |
echo "Verifying test compilation was successful..."
ls -la .pio/build/ESP32-2432S028R-test/
if [ -f .pio/build/ESP32-2432S028R-test/firmware.bin ]; then
echo "✅ ESP32-2432S028R test firmware compiled successfully"
echo "Firmware size: $(stat -c%s .pio/build/ESP32-2432S028R-test/firmware.bin) bytes"
else
echo "❌ ESP32-2432S028R test firmware compilation failed"
exit 1
fi
- name: Validate Test Binary
run: |
echo "Validating test binary structure..."
# Check if all our test files are included
if objdump -t .pio/build/ESP32-2432S028R-test/firmware.elf | grep -q "test_esp32_2432s028r_hardware"; then
echo "✅ Hardware tests included"
else
echo "⚠️ Hardware tests may not be included"
fi
if objdump -t .pio/build/ESP32-2432S028R-test/firmware.elf | grep -q "test_stratum_protocol"; then
echo "✅ Stratum protocol tests included"
else
echo "⚠️ Stratum protocol tests may not be included"
fi
- name: Generate Test Documentation
run: |
echo "# ESP32-2432S028R Test Build Report" > test_build_report.md
echo "" >> test_build_report.md
echo "## Build Information" >> test_build_report.md
echo "- **Date**: $(date)" >> test_build_report.md
echo "- **Commit**: ${{ github.sha }}" >> test_build_report.md
echo "- **Environment**: ESP32-2432S028R-test" >> test_build_report.md
echo "" >> test_build_report.md
if [ -f .pio/build/ESP32-2432S028R-test/firmware.bin ]; then
echo "## Firmware Details" >> test_build_report.md
echo "- **Size**: $(stat -c%s .pio/build/ESP32-2432S028R-test/firmware.bin) bytes" >> test_build_report.md
echo "- **Build Status**: ✅ Success" >> test_build_report.md
else
echo "- **Build Status**: ❌ Failed" >> test_build_report.md
fi
echo "" >> test_build_report.md
echo "## Test Coverage" >> test_build_report.md
echo "The following test suites are included in this build:" >> test_build_report.md
echo "- Hardware interface tests (GPIO, SPI, I2C, Display)" >> test_build_report.md
echo "- WiFi connectivity and manager tests" >> test_build_report.md
echo "- Stratum protocol validation" >> test_build_report.md
echo "- Hardware SHA256 acceleration tests" >> test_build_report.md
echo "- Mining workflow integration tests" >> test_build_report.md
echo "- Performance benchmarking tests" >> test_build_report.md
echo "- Memory management and leak detection" >> test_build_report.md
- name: Upload Embedded Test Artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: embedded-test-artifacts-${{ github.sha }}
path: |
.pio/build/ESP32-2432S028R-test/firmware.bin
.pio/build/ESP32-2432S028R-test/firmware.elf
test_build_report.md
retention-days: 30
code-quality:
name: Code Quality Analysis
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Cache PlatformIO
uses: actions/cache@v4
with:
path: |
~/.cache/pip
~/.platformio/.cache
key: ${{ runner.os }}-pio-quality-${{ hashFiles('**/platformio.ini') }}
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install PlatformIO Core
run: pip install --upgrade platformio
- name: Run Static Code Analysis
run: |
echo "Running PlatformIO code analysis..."
pio check --environment ESP32-2432S028R --verbose --json-output-path code_analysis.json || true
if [ -f code_analysis.json ]; then
echo "Code analysis completed. Results:"
cat code_analysis.json | jq '.' || cat code_analysis.json
fi
- name: Check Test File Structure
run: |
echo "Analyzing test file structure..."
echo "## Test Coverage Analysis" > test_coverage_report.md
echo "" >> test_coverage_report.md
# Count test files
test_files=$(find test/ -name "test_*.cpp" | wc -l)
echo "- **Total test files**: $test_files" >> test_coverage_report.md
# List test files
echo "" >> test_coverage_report.md
echo "### Test Files:" >> test_coverage_report.md
for file in test/test_*.cpp; do
if [ -f "$file" ]; then
lines=$(wc -l < "$file")
echo "- \`$(basename "$file")\` - $lines lines" >> test_coverage_report.md
fi
done
echo "" >> test_coverage_report.md
echo "### Test Fixtures:" >> test_coverage_report.md
for file in test/fixtures/*.h; do
if [ -f "$file" ]; then
lines=$(wc -l < "$file")
echo "- \`$(basename "$file")\` - $lines lines" >> test_coverage_report.md
fi
done
- name: Validate PlatformIO Configuration
run: |
echo "Validating PlatformIO configuration..."
pio project config --json-output-path project_config.json
if [ -f project_config.json ]; then
echo "Project configuration is valid"
echo "Available environments:"
cat project_config.json | jq -r '.environments | keys[]' || echo "Could not parse environments"
fi
- name: Upload Code Quality Results
uses: actions/upload-artifact@v4
if: always()
with:
name: code-quality-results-${{ github.sha }}
path: |
code_analysis.json
test_coverage_report.md
project_config.json
retention-days: 30
performance-check:
name: Performance Regression Check
runs-on: ubuntu-latest
needs: native-tests
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Cache PlatformIO
uses: actions/cache@v4
with:
path: |
~/.cache/pip
~/.platformio/.cache
key: ${{ runner.os }}-pio-perf-${{ hashFiles('**/platformio.ini') }}
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install PlatformIO Core
run: pip install --upgrade platformio
- name: Build Size Analysis
run: |
echo "Analyzing build sizes for all environments..."
echo "# Build Size Analysis" > size_analysis.md
echo "" >> size_analysis.md
echo "| Environment | Firmware Size | Flash Usage | RAM Usage |" >> size_analysis.md
echo "|-------------|---------------|-------------|-----------|" >> size_analysis.md
# Build key environments and analyze sizes
environments=("ESP32-2432S028R" "ESP32-2432S028R-test" "native_test")
for env in "${environments[@]}"; do
echo "Building environment: $env"
if pio run -e "$env" --silent > /dev/null 2>&1; then
if [ -f ".pio/build/$env/firmware.bin" ]; then
size=$(stat -c%s ".pio/build/$env/firmware.bin")
echo "| $env | $size bytes | TBD | TBD |" >> size_analysis.md
else
echo "| $env | Build failed | - | - |" >> size_analysis.md
fi
else
echo "| $env | Build failed | - | - |" >> size_analysis.md
fi
done
- name: Performance Benchmark (Native)
run: |
echo "Running performance benchmarks on native platform..."
# Run a subset of performance tests that work in native environment
timeout 300 pio test -e native_test -f "*performance*" --verbose || true
timeout 300 pio test -e native_test -f "*benchmark*" --verbose || true
- name: Upload Performance Results
uses: actions/upload-artifact@v4
if: always()
with:
name: performance-results-${{ github.sha }}
path: |
size_analysis.md
.pio/build/*/firmware.bin
retention-days: 30
test-summary:
name: Test Summary
runs-on: ubuntu-latest
needs: [native-tests, embedded-tests, code-quality, performance-check]
if: always()
steps:
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: test-artifacts
- name: Generate Test Summary
run: |
echo "# NerdMiner Test Suite Summary" > test_summary.md
echo "" >> test_summary.md
echo "**Commit:** ${{ github.sha }}" >> test_summary.md
echo "**Date:** $(date)" >> test_summary.md
echo "**Branch:** ${{ github.ref_name }}" >> test_summary.md
echo "" >> test_summary.md
echo "## Test Results" >> test_summary.md
echo "" >> test_summary.md
# Check job results
if [ "${{ needs.native-tests.result }}" = "success" ]; then
echo "✅ **Native Tests**: PASSED" >> test_summary.md
else
echo "❌ **Native Tests**: FAILED" >> test_summary.md
fi
if [ "${{ needs.embedded-tests.result }}" = "success" ]; then
echo "✅ **Embedded Tests**: PASSED" >> test_summary.md
else
echo "❌ **Embedded Tests**: FAILED" >> test_summary.md
fi
if [ "${{ needs.code-quality.result }}" = "success" ]; then
echo "✅ **Code Quality**: PASSED" >> test_summary.md
else
echo "❌ **Code Quality**: FAILED" >> test_summary.md
fi
if [ "${{ needs.performance-check.result }}" = "success" ]; then
echo "✅ **Performance Check**: PASSED" >> test_summary.md
else
echo "❌ **Performance Check**: FAILED" >> test_summary.md
fi
echo "" >> test_summary.md
echo "## Test Coverage" >> test_summary.md
echo "This test suite includes:" >> test_summary.md
echo "- **SHA256 Algorithm Tests**: Software implementation validation" >> test_summary.md
echo "- **Hardware Interface Tests**: GPIO, SPI, Display, Touch" >> test_summary.md
echo "- **Network Protocol Tests**: WiFi, Stratum, JSON parsing" >> test_summary.md
echo "- **Mining Integration Tests**: End-to-end mining workflow" >> test_summary.md
echo "- **Performance Benchmarks**: Hash rates, memory usage, display FPS" >> test_summary.md
echo "- **Error Handling Tests**: Edge cases and failure scenarios" >> test_summary.md
echo "" >> test_summary.md
echo "## Artifacts Generated" >> test_summary.md
echo "- Native test results and coverage reports" >> test_summary.md
echo "- ESP32-2432S028R test firmware binaries" >> test_summary.md
echo "- Code quality analysis reports" >> test_summary.md
echo "- Performance benchmarking data" >> test_summary.md
echo "- Build size analysis" >> test_summary.md
- name: Comment on PR (if applicable)
if: github.event_name == 'pull_request'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
if (fs.existsSync('test_summary.md')) {
const summary = fs.readFileSync('test_summary.md', 'utf8');
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: summary
});
}
- name: Upload Final Test Summary
uses: actions/upload-artifact@v4
if: always()
with:
name: test-summary-${{ github.sha }}
path: |
test_summary.md
test-artifacts/
retention-days: 90