666 lines
23 KiB
Bash
Executable File
666 lines
23 KiB
Bash
Executable File
#!/bin/bash
|
|
# ============================================================================
|
|
# DarkForge Linux — Integration Test Runner
|
|
# ============================================================================
|
|
# Purpose: Run automated integration tests on an Arch Linux host.
|
|
# Generates a machine-readable report (JSON + human-readable summary)
|
|
# that can be fed back to the development process for fixing issues.
|
|
#
|
|
# Requirements:
|
|
# - Arch Linux (x86_64) host
|
|
# - Packages: qemu-full edk2-ovmf rust cargo base-devel git wget
|
|
# sudo pacman -S qemu-full edk2-ovmf rust cargo base-devel git wget
|
|
# - ~30GB free disk space
|
|
# - Internet access (for package signing tests)
|
|
#
|
|
# Usage:
|
|
# bash tests/run-tests.sh # run all tests
|
|
# bash tests/run-tests.sh --quick # skip QEMU + long tests
|
|
# bash tests/run-tests.sh --report # generate report and exit
|
|
#
|
|
# Output:
|
|
# tests/report.json — machine-readable test results
|
|
# tests/report.txt — human-readable summary
|
|
# ============================================================================
|
|
|
|
set -uo pipefail
|
|
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
|
REPORT_JSON="${SCRIPT_DIR}/report.json"
|
|
REPORT_TXT="${SCRIPT_DIR}/report.txt"
|
|
LOG_DIR="${SCRIPT_DIR}/logs"
|
|
QUICK_MODE=false
|
|
|
|
# Parse args
|
|
for arg in "$@"; do
|
|
case "$arg" in
|
|
--quick) QUICK_MODE=true ;;
|
|
esac
|
|
done
|
|
|
|
mkdir -p "${LOG_DIR}"
|
|
|
|
# --- Colors -----------------------------------------------------------------
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
CYAN='\033[0;36m'
|
|
BOLD='\033[1m'
|
|
NC='\033[0m'
|
|
|
|
# --- Test infrastructure ----------------------------------------------------
|
|
PASS=0
|
|
FAIL=0
|
|
SKIP=0
|
|
TESTS=()
|
|
|
|
start_time=$(date +%s)
|
|
|
|
record_test() {
|
|
local name="$1"
|
|
local status="$2" # pass, fail, skip
|
|
local detail="${3:-}"
|
|
local duration="${4:-0}"
|
|
|
|
# Escape double quotes in detail for valid JSON
|
|
detail=$(echo "$detail" | sed 's/"/\\"/g' | tr '\n' ' ')
|
|
|
|
TESTS+=("{\"name\":\"${name}\",\"status\":\"${status}\",\"detail\":\"${detail}\",\"duration_s\":${duration}}")
|
|
|
|
case "$status" in
|
|
pass) ((PASS++)); echo -e " ${GREEN}PASS${NC} ${name}" ;;
|
|
fail) ((FAIL++)); echo -e " ${RED}FAIL${NC} ${name}: ${detail}" ;;
|
|
skip) ((SKIP++)); echo -e " ${YELLOW}SKIP${NC} ${name}: ${detail}" ;;
|
|
esac
|
|
}
|
|
|
|
timed_test() {
|
|
# Usage: timed_test "test.name" command args...
|
|
local name="$1"; shift
|
|
local t0=$(date +%s)
|
|
local output
|
|
output=$("$@" 2>&1)
|
|
local rc=$?
|
|
local t1=$(date +%s)
|
|
local dur=$((t1 - t0))
|
|
if [ $rc -eq 0 ]; then
|
|
record_test "$name" "pass" "" "$dur"
|
|
else
|
|
record_test "$name" "fail" "$(echo "$output" | tail -5 | tr '\n' ' ')" "$dur"
|
|
fi
|
|
return $rc
|
|
}
|
|
|
|
# ============================================================================
|
|
# TEST SUITE 1: Host Environment
|
|
# ============================================================================
|
|
echo -e "\n${BOLD}=== Test Suite 1: Host Environment ===${NC}\n"
|
|
|
|
# Check we're on Linux
|
|
if [ "$(uname -s)" = "Linux" ]; then
|
|
record_test "host.is_linux" "pass"
|
|
else
|
|
record_test "host.is_linux" "fail" "Not Linux: $(uname -s)"
|
|
fi
|
|
|
|
# Check Arch Linux
|
|
if [ -f /etc/arch-release ]; then
|
|
record_test "host.is_arch" "pass"
|
|
else
|
|
record_test "host.is_arch" "skip" "Not Arch Linux (tests may still work)"
|
|
fi
|
|
|
|
# Check required tools
|
|
for tool in gcc g++ make git wget curl cargo rustc qemu-system-x86_64 sha256sum tar xz python3; do
|
|
if command -v "$tool" >/dev/null 2>&1; then
|
|
record_test "host.tool.${tool}" "pass"
|
|
else
|
|
if [ "$tool" = "qemu-system-x86_64" ] && [ "$QUICK_MODE" = true ]; then
|
|
record_test "host.tool.${tool}" "skip" "Quick mode"
|
|
else
|
|
record_test "host.tool.${tool}" "fail" "Not installed"
|
|
fi
|
|
fi
|
|
done
|
|
|
|
# Check nested virtualization support
|
|
if grep -qE '(vmx|svm)' /proc/cpuinfo 2>/dev/null; then
|
|
record_test "host.nested_virt" "pass"
|
|
else
|
|
record_test "host.nested_virt" "skip" "No VMX/SVM — QEMU boot tests will be slower"
|
|
fi
|
|
|
|
# Check OVMF — search all known paths including 4m variant (newer Arch/edk2)
|
|
OVMF_PATH=""
|
|
for p in \
|
|
/usr/share/edk2/x64/OVMF_CODE.4m.fd \
|
|
/usr/share/edk2/x64/OVMF_CODE.fd \
|
|
/usr/share/edk2-ovmf/x64/OVMF_CODE.4m.fd \
|
|
/usr/share/edk2-ovmf/x64/OVMF_CODE.fd \
|
|
/usr/share/OVMF/OVMF_CODE.4m.fd \
|
|
/usr/share/OVMF/OVMF_CODE.fd \
|
|
/usr/share/edk2/x64/OVMF.fd \
|
|
/usr/share/edk2-ovmf/x64/OVMF.fd \
|
|
/usr/share/ovmf/x64/OVMF.fd \
|
|
/usr/share/OVMF/OVMF.fd \
|
|
/usr/share/ovmf/OVMF.fd; do
|
|
if [ -f "$p" ]; then
|
|
OVMF_PATH="$p"
|
|
break
|
|
fi
|
|
done
|
|
# Last resort: search with find
|
|
if [ -z "$OVMF_PATH" ]; then
|
|
OVMF_PATH=$(find /usr/share -name "OVMF_CODE*.fd" -o -name "OVMF.fd" 2>/dev/null | head -1)
|
|
fi
|
|
|
|
if [ -n "$OVMF_PATH" ]; then
|
|
record_test "host.ovmf" "pass" "${OVMF_PATH}"
|
|
elif [ "$QUICK_MODE" = true ]; then
|
|
record_test "host.ovmf" "skip" "Quick mode"
|
|
else
|
|
record_test "host.ovmf" "fail" "OVMF not found — install edk2-ovmf"
|
|
fi
|
|
|
|
# Check GCC version (need 14+ for znver5)
|
|
GCC_VER=$(gcc -dumpversion 2>/dev/null | cut -d. -f1)
|
|
if [ -n "$GCC_VER" ] && [ "$GCC_VER" -ge 14 ]; then
|
|
record_test "host.gcc_version" "pass" "GCC ${GCC_VER}"
|
|
elif [ -n "$GCC_VER" ]; then
|
|
record_test "host.gcc_version" "fail" "GCC ${GCC_VER} — need 14+ for znver5"
|
|
else
|
|
record_test "host.gcc_version" "fail" "GCC not found"
|
|
fi
|
|
|
|
# Check Rust version
|
|
RUST_VER=$(rustc --version 2>/dev/null | awk '{print $2}')
|
|
record_test "host.rust_version" "pass" "Rust ${RUST_VER:-unknown}"
|
|
|
|
# ============================================================================
|
|
# TEST SUITE 2: dpack Build & Unit Tests
|
|
# ============================================================================
|
|
echo -e "\n${BOLD}=== Test Suite 2: dpack Build ===${NC}\n"
|
|
|
|
cd "${PROJECT_ROOT}/src/dpack"
|
|
|
|
# Build release
|
|
t_start=$(date +%s)
|
|
if cargo build --release 2>"${LOG_DIR}/dpack-build.log"; then
|
|
t_end=$(date +%s)
|
|
record_test "dpack.build" "pass" "" "$((t_end - t_start))"
|
|
else
|
|
t_end=$(date +%s)
|
|
err=$(tail -5 "${LOG_DIR}/dpack-build.log" | tr '\n' ' ' | tr '"' "'")
|
|
record_test "dpack.build" "fail" "${err}" "$((t_end - t_start))"
|
|
fi
|
|
|
|
# Check for warnings (fix: separate the grep exit code from the count)
|
|
WARNINGS=$(grep -c "^warning" "${LOG_DIR}/dpack-build.log" 2>/dev/null) || WARNINGS=0
|
|
if [ "$WARNINGS" -eq 0 ]; then
|
|
record_test "dpack.no_warnings" "pass"
|
|
else
|
|
record_test "dpack.no_warnings" "fail" "${WARNINGS} warning(s)"
|
|
fi
|
|
|
|
# Unit tests
|
|
t_start=$(date +%s)
|
|
if cargo test 2>"${LOG_DIR}/dpack-test.log"; then
|
|
t_end=$(date +%s)
|
|
record_test "dpack.unit_tests" "pass" "" "$((t_end - t_start))"
|
|
else
|
|
t_end=$(date +%s)
|
|
err=$(grep "^test result" "${LOG_DIR}/dpack-test.log" | tr '"' "'")
|
|
record_test "dpack.unit_tests" "fail" "${err}" "$((t_end - t_start))"
|
|
fi
|
|
|
|
# CLI smoke tests
|
|
DPACK="${PROJECT_ROOT}/src/dpack/target/release/dpack"
|
|
if [ -x "$DPACK" ]; then
|
|
# Basic CLI tests
|
|
if $DPACK --version >/dev/null 2>&1; then
|
|
record_test "dpack.cli.version" "pass"
|
|
else
|
|
record_test "dpack.cli.version" "fail" "dpack --version failed"
|
|
fi
|
|
|
|
if $DPACK --help >/dev/null 2>&1; then
|
|
record_test "dpack.cli.help" "pass"
|
|
else
|
|
record_test "dpack.cli.help" "fail" "dpack --help failed"
|
|
fi
|
|
|
|
# Extended CLI tests with temp config
|
|
DPACK_TEST_DIR=$(mktemp -d /tmp/dpack-test-XXXXX)
|
|
mkdir -p "${DPACK_TEST_DIR}"/{db,repos,sources,build}
|
|
ln -sf "${PROJECT_ROOT}/src/repos/core" "${DPACK_TEST_DIR}/repos/core"
|
|
ln -sf "${PROJECT_ROOT}/src/repos/extra" "${DPACK_TEST_DIR}/repos/extra"
|
|
ln -sf "${PROJECT_ROOT}/src/repos/desktop" "${DPACK_TEST_DIR}/repos/desktop"
|
|
ln -sf "${PROJECT_ROOT}/src/repos/gaming" "${DPACK_TEST_DIR}/repos/gaming"
|
|
|
|
cat > "${DPACK_TEST_DIR}/dpack.conf" << DCONF
|
|
[paths]
|
|
db_dir = "${DPACK_TEST_DIR}/db"
|
|
repo_dir = "${DPACK_TEST_DIR}/repos"
|
|
source_dir = "${DPACK_TEST_DIR}/sources"
|
|
build_dir = "${DPACK_TEST_DIR}/build"
|
|
|
|
[[repos]]
|
|
name = "core"
|
|
path = "${DPACK_TEST_DIR}/repos/core"
|
|
priority = 0
|
|
|
|
[[repos]]
|
|
name = "extra"
|
|
path = "${DPACK_TEST_DIR}/repos/extra"
|
|
priority = 10
|
|
|
|
[[repos]]
|
|
name = "desktop"
|
|
path = "${DPACK_TEST_DIR}/repos/desktop"
|
|
priority = 20
|
|
|
|
[[repos]]
|
|
name = "gaming"
|
|
path = "${DPACK_TEST_DIR}/repos/gaming"
|
|
priority = 30
|
|
DCONF
|
|
|
|
DPACK_CMD="$DPACK --config ${DPACK_TEST_DIR}/dpack.conf"
|
|
|
|
if $DPACK_CMD list >/dev/null 2>&1; then
|
|
record_test "dpack.cli.list" "pass"
|
|
else
|
|
record_test "dpack.cli.list" "fail" "Exit code $?"
|
|
fi
|
|
|
|
if $DPACK_CMD check >/dev/null 2>&1; then
|
|
record_test "dpack.cli.check" "pass"
|
|
else
|
|
record_test "dpack.cli.check" "fail" "Exit code $?"
|
|
fi
|
|
|
|
if $DPACK_CMD search zlib 2>/dev/null | grep -q "zlib"; then
|
|
record_test "dpack.cli.search" "pass"
|
|
else
|
|
record_test "dpack.cli.search" "fail" "zlib not found in search"
|
|
fi
|
|
|
|
if $DPACK_CMD info zlib 2>/dev/null | grep -qi "compression\|zlib"; then
|
|
record_test "dpack.cli.info" "pass"
|
|
else
|
|
record_test "dpack.cli.info" "fail" "info zlib returned no useful output"
|
|
fi
|
|
|
|
rm -rf "${DPACK_TEST_DIR}"
|
|
else
|
|
record_test "dpack.cli.version" "skip" "Binary not built"
|
|
record_test "dpack.cli.help" "skip" "Binary not built"
|
|
record_test "dpack.cli.list" "skip" "Binary not built"
|
|
record_test "dpack.cli.check" "skip" "Binary not built"
|
|
record_test "dpack.cli.search" "skip" "Binary not built"
|
|
record_test "dpack.cli.info" "skip" "Binary not built"
|
|
fi
|
|
|
|
cd "${PROJECT_ROOT}"
|
|
|
|
# ============================================================================
|
|
# TEST SUITE 3: Package Definitions
|
|
# ============================================================================
|
|
echo -e "\n${BOLD}=== Test Suite 3: Package Definitions ===${NC}\n"
|
|
|
|
# Count packages per repo
|
|
for repo in core extra desktop gaming; do
|
|
repo_dir="${PROJECT_ROOT}/src/repos/${repo}"
|
|
if [ -d "$repo_dir" ]; then
|
|
count=$(find "$repo_dir" -name "*.toml" | wc -l)
|
|
record_test "repos.${repo}.count" "pass" "${count} packages"
|
|
else
|
|
record_test "repos.${repo}.count" "fail" "Directory missing"
|
|
fi
|
|
done
|
|
|
|
# Validate TOML syntax (check required sections)
|
|
TOML_ERRORS=0
|
|
for toml in $(find "${PROJECT_ROOT}/src/repos" -name "*.toml" 2>/dev/null); do
|
|
pkg_name=$(basename "$(dirname "$toml")")
|
|
for section in '\[package\]' '\[source\]' '\[build\]'; do
|
|
if ! grep -q "$section" "$toml"; then
|
|
record_test "repos.validate.${pkg_name}" "fail" "Missing ${section}"
|
|
((TOML_ERRORS++))
|
|
break
|
|
fi
|
|
done
|
|
done
|
|
if [ "$TOML_ERRORS" -eq 0 ]; then
|
|
total=$(find "${PROJECT_ROOT}/src/repos" -name "*.toml" | wc -l)
|
|
record_test "repos.toml_validation" "pass" "All ${total} valid"
|
|
fi
|
|
|
|
# Dependency resolution check (all deps must resolve within the repo tree)
|
|
if command -v python3 >/dev/null 2>&1; then
|
|
DEP_OUTPUT=$(PROJECT_ROOT="${PROJECT_ROOT}" python3 << 'PYEOF' 2>/dev/null
|
|
import os, re, sys
|
|
base = os.environ.get("PROJECT_ROOT", ".") + "/src/repos"
|
|
known = set()
|
|
for repo in ['core','extra','desktop','gaming']:
|
|
d = os.path.join(base, repo)
|
|
if not os.path.isdir(d): continue
|
|
for p in os.listdir(d):
|
|
if os.path.isdir(os.path.join(d,p)) and os.path.exists(os.path.join(d,p,f"{p}.toml")):
|
|
known.add(p)
|
|
missing = set()
|
|
for repo in ['core','extra','desktop','gaming']:
|
|
d = os.path.join(base, repo)
|
|
if not os.path.isdir(d): continue
|
|
for p in os.listdir(d):
|
|
tf = os.path.join(d,p,f"{p}.toml")
|
|
if not os.path.exists(tf): continue
|
|
with open(tf) as f:
|
|
content = f.read()
|
|
for m in re.finditer(r'(?:run|build)\s*=\s*\[(.*?)\]', content):
|
|
for dm in re.finditer(r'"([\w][\w.-]*)"', m.group(1)):
|
|
if dm.group(1) not in known:
|
|
missing.add(dm.group(1))
|
|
if missing:
|
|
print(f"MISSING:{','.join(sorted(missing))}")
|
|
sys.exit(1)
|
|
else:
|
|
print(f"OK:{len(known)}")
|
|
PYEOF
|
|
)
|
|
if [ $? -eq 0 ]; then
|
|
record_test "repos.deps_resolve" "pass" "All dependencies resolve"
|
|
else
|
|
record_test "repos.deps_resolve" "fail" "${DEP_OUTPUT}"
|
|
fi
|
|
else
|
|
record_test "repos.deps_resolve" "skip" "python3 not available"
|
|
fi
|
|
|
|
# ============================================================================
|
|
# TEST SUITE 4: Script Validation
|
|
# ============================================================================
|
|
echo -e "\n${BOLD}=== Test Suite 4: Script Validation ===${NC}\n"
|
|
|
|
# Toolchain scripts — all executable
|
|
MISSING_SCRIPTS=0
|
|
for script in ${PROJECT_ROOT}/toolchain/scripts/*.sh; do
|
|
if [ ! -x "$script" ]; then
|
|
record_test "toolchain.exec.$(basename "$script" .sh)" "fail" "Not executable"
|
|
((MISSING_SCRIPTS++))
|
|
fi
|
|
done
|
|
if [ "$MISSING_SCRIPTS" -eq 0 ]; then
|
|
count=$(ls "${PROJECT_ROOT}/toolchain/scripts/"*.sh | wc -l)
|
|
record_test "toolchain.all_executable" "pass" "${count} scripts"
|
|
fi
|
|
|
|
# Toolchain scripts — syntax check
|
|
SYNTAX_ERRORS=0
|
|
for script in ${PROJECT_ROOT}/toolchain/scripts/*.sh; do
|
|
if ! bash -n "$script" 2>/dev/null; then
|
|
record_test "toolchain.syntax.$(basename "$script" .sh)" "fail" "Syntax error"
|
|
((SYNTAX_ERRORS++))
|
|
fi
|
|
done
|
|
if [ "$SYNTAX_ERRORS" -eq 0 ]; then
|
|
record_test "toolchain.bash_syntax" "pass"
|
|
fi
|
|
|
|
# Init/rc.d scripts — individual syntax checks
|
|
for script in "${PROJECT_ROOT}"/configs/rc.d/*; do
|
|
[ -f "$script" ] || continue
|
|
name=$(basename "$script")
|
|
if bash -n "$script" 2>/dev/null; then
|
|
record_test "scripts.init.${name}" "pass"
|
|
else
|
|
record_test "scripts.init.${name}" "fail" "Syntax error"
|
|
fi
|
|
done
|
|
|
|
# Installer scripts — syntax checks
|
|
for script in "${PROJECT_ROOT}"/src/install/*.sh "${PROJECT_ROOT}"/src/install/modules/*.sh; do
|
|
[ -f "$script" ] || continue
|
|
name=$(basename "$script" .sh)
|
|
if bash -n "$script" 2>/dev/null; then
|
|
record_test "scripts.install.${name}" "pass"
|
|
else
|
|
record_test "scripts.install.${name}" "fail" "Syntax error"
|
|
fi
|
|
done
|
|
|
|
# ISO builder scripts — syntax checks
|
|
for script in "${PROJECT_ROOT}"/src/iso/*.sh; do
|
|
[ -f "$script" ] || continue
|
|
name=$(basename "$script" .sh)
|
|
if bash -n "$script" 2>/dev/null; then
|
|
record_test "scripts.iso.${name}" "pass"
|
|
else
|
|
record_test "scripts.iso.${name}" "fail" "Syntax error"
|
|
fi
|
|
done
|
|
|
|
# ============================================================================
|
|
# TEST SUITE 5: Kernel Config
|
|
# ============================================================================
|
|
echo -e "\n${BOLD}=== Test Suite 5: Kernel Config ===${NC}\n"
|
|
|
|
KCONFIG="${PROJECT_ROOT}/kernel/config"
|
|
if [ -f "$KCONFIG" ]; then
|
|
record_test "kernel.config_exists" "pass"
|
|
|
|
# Check critical enabled options
|
|
for opt in CONFIG_EFI_STUB CONFIG_BLK_DEV_NVME CONFIG_PREEMPT CONFIG_R8169 CONFIG_EXT4_FS CONFIG_MODULES CONFIG_SMP CONFIG_AMD_IOMMU; do
|
|
if grep -q "^${opt}=y" "$KCONFIG"; then
|
|
record_test "kernel.${opt}" "pass"
|
|
else
|
|
record_test "kernel.${opt}" "fail" "Not set to =y"
|
|
fi
|
|
done
|
|
|
|
# Check disabled options
|
|
for opt in CONFIG_BLUETOOTH CONFIG_WIRELESS CONFIG_DRM_NOUVEAU; do
|
|
if grep -q "^${opt}=n" "$KCONFIG"; then
|
|
record_test "kernel.disable.${opt}" "pass"
|
|
else
|
|
record_test "kernel.disable.${opt}" "fail" "Should be =n"
|
|
fi
|
|
done
|
|
else
|
|
record_test "kernel.config_exists" "fail" "kernel/config missing"
|
|
fi
|
|
|
|
# ============================================================================
|
|
# TEST SUITE 6: Init System Files
|
|
# ============================================================================
|
|
echo -e "\n${BOLD}=== Test Suite 6: Init System ===${NC}\n"
|
|
|
|
for f in rc.conf inittab fstab.template zprofile; do
|
|
if [ -f "${PROJECT_ROOT}/configs/${f}" ]; then
|
|
record_test "init.${f}" "pass"
|
|
else
|
|
record_test "init.${f}" "fail" "Missing"
|
|
fi
|
|
done
|
|
|
|
for daemon in eudev syslog dbus dhcpcd pipewire; do
|
|
script="${PROJECT_ROOT}/configs/rc.d/${daemon}"
|
|
if [ -x "$script" ]; then
|
|
if bash -n "$script" 2>/dev/null; then
|
|
record_test "init.daemon.${daemon}" "pass"
|
|
else
|
|
record_test "init.daemon.${daemon}" "fail" "Syntax error"
|
|
fi
|
|
else
|
|
record_test "init.daemon.${daemon}" "fail" "Missing or not executable"
|
|
fi
|
|
done
|
|
|
|
# ============================================================================
|
|
# TEST SUITE 7: Package Signing (network test)
|
|
# ============================================================================
|
|
if [ "$QUICK_MODE" = false ] && [ -x "$DPACK" ]; then
|
|
echo -e "\n${BOLD}=== Test Suite 7: Package Signing ===${NC}\n"
|
|
|
|
ZLIB_TOML="${PROJECT_ROOT}/src/repos/core/zlib/zlib.toml"
|
|
if [ -f "$ZLIB_TOML" ]; then
|
|
cp "$ZLIB_TOML" "${ZLIB_TOML}.bak"
|
|
timed_test "sign.zlib" $DPACK sign zlib || true
|
|
# Check if it got a real hash (not the placeholder)
|
|
if grep -q 'sha256 = "aaa' "$ZLIB_TOML" 2>/dev/null; then
|
|
record_test "sign.zlib_result" "fail" "Checksum still placeholder after signing"
|
|
else
|
|
record_test "sign.zlib_result" "pass"
|
|
fi
|
|
mv "${ZLIB_TOML}.bak" "$ZLIB_TOML"
|
|
fi
|
|
else
|
|
if [ "$QUICK_MODE" = true ]; then
|
|
echo -e "\n${BOLD}=== Test Suite 7: Package Signing (SKIPPED) ===${NC}\n"
|
|
record_test "sign.zlib" "skip" "Quick mode"
|
|
fi
|
|
fi
|
|
|
|
# ============================================================================
|
|
# TEST SUITE 8: QEMU Boot Test (skipped in quick mode)
|
|
# ============================================================================
|
|
if [ "$QUICK_MODE" = false ] && [ -n "$OVMF_PATH" ]; then
|
|
echo -e "\n${BOLD}=== Test Suite 8: QEMU Boot Test ===${NC}\n"
|
|
|
|
ISO="${PROJECT_ROOT}/darkforge-live.iso"
|
|
if [ -f "$ISO" ]; then
|
|
echo " Testing ISO boot in QEMU (60s timeout)..."
|
|
QEMU_DISK=$(mktemp /tmp/darkforge-qemu-XXXXX.qcow2)
|
|
qemu-img create -f qcow2 "$QEMU_DISK" 20G >/dev/null 2>&1
|
|
|
|
KVM_FLAG=""
|
|
[ -c /dev/kvm ] && KVM_FLAG="-enable-kvm"
|
|
|
|
# Build OVMF flags — split CODE/VARS files need -drive, single .fd uses -bios
|
|
OVMF_FLAGS=""
|
|
if echo "$OVMF_PATH" | grep -q "OVMF_CODE"; then
|
|
OVMF_VARS_TEMPLATE="$(dirname "$OVMF_PATH")/OVMF_VARS.fd"
|
|
# Try 4m variant first
|
|
if [ ! -f "$OVMF_VARS_TEMPLATE" ]; then
|
|
OVMF_VARS_TEMPLATE="$(dirname "$OVMF_PATH")/OVMF_VARS.4m.fd"
|
|
fi
|
|
OVMF_VARS_COPY="/tmp/darkforge-ovmf-vars.fd"
|
|
cp "$OVMF_VARS_TEMPLATE" "$OVMF_VARS_COPY" 2>/dev/null || dd if=/dev/zero of="$OVMF_VARS_COPY" bs=256K count=1 2>/dev/null
|
|
OVMF_FLAGS="-drive if=pflash,format=raw,readonly=on,file=${OVMF_PATH} -drive if=pflash,format=raw,file=${OVMF_VARS_COPY}"
|
|
else
|
|
OVMF_FLAGS="-bios ${OVMF_PATH}"
|
|
fi
|
|
|
|
timeout 60 qemu-system-x86_64 \
|
|
${KVM_FLAG} \
|
|
-m 2G \
|
|
-smp 2 \
|
|
${OVMF_FLAGS} \
|
|
-cdrom "$ISO" \
|
|
-drive file="$QEMU_DISK",format=qcow2,if=virtio \
|
|
-nographic \
|
|
-serial mon:stdio \
|
|
-no-reboot \
|
|
2>"${LOG_DIR}/qemu-stderr.log" | head -200 > "${LOG_DIR}/qemu-output.log" &
|
|
QEMU_PID=$!
|
|
|
|
sleep 60
|
|
kill $QEMU_PID 2>/dev/null
|
|
wait $QEMU_PID 2>/dev/null
|
|
|
|
# Check if we got kernel boot messages
|
|
if grep -qi "linux version\|darkforge\|kernel" "${LOG_DIR}/qemu-output.log" 2>/dev/null; then
|
|
record_test "qemu.kernel_boots" "pass"
|
|
else
|
|
record_test "qemu.kernel_boots" "fail" "No kernel boot messages in serial output"
|
|
fi
|
|
|
|
# Check if we got to userspace
|
|
if grep -qi "login:\|installer\|welcome\|darkforge" "${LOG_DIR}/qemu-output.log" 2>/dev/null; then
|
|
record_test "qemu.reaches_userspace" "pass"
|
|
else
|
|
record_test "qemu.reaches_userspace" "fail" "Did not reach login prompt"
|
|
fi
|
|
|
|
rm -f "$QEMU_DISK"
|
|
else
|
|
record_test "qemu.iso_exists" "fail" "No ISO found — build it first with src/iso/build-iso.sh"
|
|
record_test "qemu.kernel_boots" "skip" "No ISO"
|
|
record_test "qemu.reaches_userspace" "skip" "No ISO"
|
|
fi
|
|
else
|
|
echo -e "\n${BOLD}=== Test Suite 8: QEMU Boot Test (SKIPPED) ===${NC}\n"
|
|
record_test "qemu.kernel_boots" "skip" "Quick mode or no OVMF"
|
|
fi
|
|
|
|
# ============================================================================
|
|
# Generate Report
|
|
# ============================================================================
|
|
end_time=$(date +%s)
|
|
total_duration=$((end_time - start_time))
|
|
TOTAL=$((PASS + FAIL + SKIP))
|
|
|
|
# JSON report
|
|
cat > "$REPORT_JSON" << JSONEOF
|
|
{
|
|
"project": "DarkForge Linux",
|
|
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
|
|
"host": "$(uname -n) $(uname -r) $(uname -m)",
|
|
"duration_s": ${total_duration},
|
|
"summary": {
|
|
"total": ${TOTAL},
|
|
"pass": ${PASS},
|
|
"fail": ${FAIL},
|
|
"skip": ${SKIP}
|
|
},
|
|
"tests": [
|
|
$(IFS=,; echo "${TESTS[*]}")
|
|
]
|
|
}
|
|
JSONEOF
|
|
|
|
# Human-readable report
|
|
cat > "$REPORT_TXT" << TXTEOF
|
|
================================================================================
|
|
DarkForge Linux — Integration Test Report
|
|
================================================================================
|
|
Date: $(date -u +"%Y-%m-%d %H:%M:%S UTC")
|
|
Host: $(uname -n) $(uname -r) $(uname -m)
|
|
Duration: ${total_duration}s
|
|
|
|
RESULTS: ${PASS} pass, ${FAIL} fail, ${SKIP} skip (${TOTAL} total)
|
|
================================================================================
|
|
|
|
TXTEOF
|
|
|
|
# Append failures to the text report
|
|
if [ "$FAIL" -gt 0 ]; then
|
|
echo "FAILURES:" >> "$REPORT_TXT"
|
|
echo "" >> "$REPORT_TXT"
|
|
for t in "${TESTS[@]}"; do
|
|
if echo "$t" | grep -q '"status":"fail"'; then
|
|
name=$(echo "$t" | sed 's/.*"name":"\([^"]*\)".*/\1/')
|
|
detail=$(echo "$t" | sed 's/.*"detail":"\([^"]*\)".*/\1/')
|
|
echo " FAIL: ${name}" >> "$REPORT_TXT"
|
|
echo " ${detail}" >> "$REPORT_TXT"
|
|
echo "" >> "$REPORT_TXT"
|
|
fi
|
|
done
|
|
fi
|
|
|
|
echo "" >> "$REPORT_TXT"
|
|
echo "Full results in: ${REPORT_JSON}" >> "$REPORT_TXT"
|
|
|
|
# Print summary
|
|
echo ""
|
|
echo -e "${BOLD}═══════════════════════════════════════════════${NC}"
|
|
echo -e " ${BOLD}Results:${NC} ${GREEN}${PASS} pass${NC}, ${RED}${FAIL} fail${NC}, ${YELLOW}${SKIP} skip${NC} (${TOTAL} total)"
|
|
echo -e " ${BOLD}Duration:${NC} ${total_duration}s"
|
|
echo -e " ${BOLD}Report:${NC} ${REPORT_TXT}"
|
|
echo -e " ${BOLD}JSON:${NC} ${REPORT_JSON}"
|
|
echo -e "${BOLD}═══════════════════════════════════════════════${NC}"
|
|
|
|
# Exit with failure code if any tests failed
|
|
[ "$FAIL" -eq 0 ] && exit 0 || exit 1
|