mirror of
https://github.com/rcourtman/Pulse.git
synced 2026-02-18 00:17:39 +01:00
chore: remove outdated docs, update cleanup script and release workflow
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
# pulse-sensor-cleanup.sh - Removes Pulse SSH keys from Proxmox nodes when they're removed from Pulse
|
||||
# pulse-sensor-cleanup.sh - Complete Pulse footprint removal when nodes are removed
|
||||
# Removes: SSH keys, proxy service, binaries, API tokens, and LXC bind mounts
|
||||
# This script is triggered by systemd path unit when cleanup-request.json is created
|
||||
|
||||
set -euo pipefail
|
||||
@@ -8,7 +9,9 @@ set -euo pipefail
|
||||
# Configuration
|
||||
WORK_DIR="/var/lib/pulse-sensor-proxy"
|
||||
CLEANUP_REQUEST="${WORK_DIR}/cleanup-request.json"
|
||||
LOCKFILE="${WORK_DIR}/cleanup.lock"
|
||||
LOG_TAG="pulse-sensor-cleanup"
|
||||
INSTALLER_PATH="/opt/pulse/sensor-proxy/install-sensor-proxy.sh"
|
||||
|
||||
# Logging functions
|
||||
log_info() {
|
||||
@@ -26,6 +29,13 @@ log_error() {
|
||||
echo "[ERROR] $1" >&2
|
||||
}
|
||||
|
||||
# Acquire exclusive lock to prevent concurrent cleanup runs
|
||||
exec 200>"$LOCKFILE"
|
||||
if ! flock -n 200; then
|
||||
log_info "Another cleanup instance is running, exiting"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check if cleanup request file exists
|
||||
if [[ ! -f "$CLEANUP_REQUEST" ]]; then
|
||||
log_info "No cleanup request found at $CLEANUP_REQUEST"
|
||||
@@ -41,8 +51,12 @@ REQUESTED_AT=$(echo "$CLEANUP_DATA" | grep -o '"requestedAt":"[^"]*"' | cut -d'"
|
||||
|
||||
log_info "Cleanup requested at: ${REQUESTED_AT:-unknown}"
|
||||
|
||||
# Remove the cleanup request file immediately to prevent re-processing
|
||||
rm -f "$CLEANUP_REQUEST"
|
||||
# Rename request file to .processing to prevent re-triggering while allowing retry on failure
|
||||
PROCESSING_FILE="${CLEANUP_REQUEST}.processing"
|
||||
mv "$CLEANUP_REQUEST" "$PROCESSING_FILE" 2>/dev/null || {
|
||||
log_warn "Failed to rename cleanup request file, may have been processed by another instance"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# If no specific host was provided, clean up all known nodes
|
||||
if [[ -z "$HOST" ]]; then
|
||||
@@ -50,7 +64,7 @@ if [[ -z "$HOST" ]]; then
|
||||
|
||||
# Discover cluster nodes
|
||||
if command -v pvecm >/dev/null 2>&1; then
|
||||
CLUSTER_NODES=$(pvecm status 2>/dev/null | awk '/0x[0-9a-f]+.*[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/ {for(i=1;i<=NF;i++) if($i ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) print $i}')
|
||||
CLUSTER_NODES=$(pvecm status 2>/dev/null | awk '/0x[0-9a-f]+.*[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/ {for(i=1;i<=NF;i++) if($i ~ /^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$/) print $i}' || true)
|
||||
|
||||
if [[ -n "$CLUSTER_NODES" ]]; then
|
||||
for node_ip in $CLUSTER_NODES; do
|
||||
@@ -79,13 +93,16 @@ if [[ -z "$HOST" ]]; then
|
||||
else
|
||||
log_info "Cleaning up specific host: $HOST"
|
||||
|
||||
# Extract IP from host URL
|
||||
# Extract hostname/IP from host URL
|
||||
HOST_CLEAN=$(echo "$HOST" | sed -e 's|^https\?://||' -e 's|:.*$||')
|
||||
|
||||
# Check if this is localhost
|
||||
# Check if this is localhost (by IP, hostname, or FQDN)
|
||||
LOCAL_IPS=$(hostname -I 2>/dev/null || echo "")
|
||||
LOCAL_HOSTNAME=$(hostname 2>/dev/null || echo "")
|
||||
LOCAL_FQDN=$(hostname -f 2>/dev/null || echo "")
|
||||
IS_LOCAL=false
|
||||
|
||||
# Check against all local IPs
|
||||
for local_ip in $LOCAL_IPS; do
|
||||
if [[ "$HOST_CLEAN" == "$local_ip" ]]; then
|
||||
IS_LOCAL=true
|
||||
@@ -93,15 +110,143 @@ else
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$HOST_CLEAN" == "127.0.0.1" || "$HOST_CLEAN" == "localhost" ]]; then
|
||||
# Check against hostname and FQDN
|
||||
if [[ "$HOST_CLEAN" == "127.0.0.1" || "$HOST_CLEAN" == "localhost" || \
|
||||
"$HOST_CLEAN" == "$LOCAL_HOSTNAME" || "$HOST_CLEAN" == "$LOCAL_FQDN" ]]; then
|
||||
IS_LOCAL=true
|
||||
fi
|
||||
|
||||
if [[ "$IS_LOCAL" == true ]]; then
|
||||
log_info "Cleaning up localhost SSH keys"
|
||||
log_info "Performing full cleanup on localhost"
|
||||
|
||||
# 1. Remove SSH keys
|
||||
log_info "Removing SSH keys from authorized_keys"
|
||||
sed -i -e '/# pulse-managed-key$/d' -e '/# pulse-proxy-key$/d' /root/.ssh/authorized_keys 2>&1 | \
|
||||
logger -t "$LOG_TAG" -p user.info || \
|
||||
log_warn "Failed to clean up SSH keys on localhost"
|
||||
log_warn "Failed to clean up SSH keys"
|
||||
|
||||
# 2. Delete API tokens and user
|
||||
log_info "Removing Proxmox API tokens and pulse-monitor user"
|
||||
if command -v pveum >/dev/null 2>&1; then
|
||||
# Try JSON output first (pveum with --output-format json)
|
||||
TOKEN_IDS=""
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
# Try pveum with JSON output
|
||||
if TOKEN_JSON=$(pveum user token list pulse-monitor@pam --output-format json 2>/dev/null); then
|
||||
TOKEN_IDS=$(echo "$TOKEN_JSON" | python3 -c '
|
||||
import sys, json
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
if isinstance(data, list):
|
||||
for item in data:
|
||||
if "tokenid" in item:
|
||||
print(item["tokenid"])
|
||||
except: pass
|
||||
' || true)
|
||||
fi
|
||||
fi
|
||||
|
||||
# Fall back to pvesh JSON API if pveum JSON didn't work
|
||||
if [[ -z "$TOKEN_IDS" ]] && command -v pvesh >/dev/null 2>&1; then
|
||||
if TOKEN_JSON=$(pvesh get /access/users/pulse-monitor@pam/token 2>/dev/null); then
|
||||
TOKEN_IDS=$(echo "$TOKEN_JSON" | python3 -c '
|
||||
import sys, json
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
if isinstance(data, dict) and "data" in data:
|
||||
for item in data["data"]:
|
||||
if "tokenid" in item:
|
||||
print(item["tokenid"])
|
||||
except: pass
|
||||
' 2>/dev/null || true)
|
||||
fi
|
||||
fi
|
||||
|
||||
# Last resort: parse table output with better filtering
|
||||
if [[ -z "$TOKEN_IDS" ]]; then
|
||||
TOKEN_IDS=$(pveum user token list pulse-monitor@pam 2>/dev/null | \
|
||||
awk 'NR>1 && /^[[:space:]]*pulse/ {print $1}' | grep -v '^[│┌└╞─]' | grep -v '^$' || true)
|
||||
fi
|
||||
|
||||
if [[ -n "$TOKEN_IDS" ]]; then
|
||||
for token_id in $TOKEN_IDS; do
|
||||
log_info "Deleting API token: $token_id"
|
||||
pveum user token remove pulse-monitor@pam "${token_id}" 2>&1 | \
|
||||
logger -t "$LOG_TAG" -p user.info || \
|
||||
log_warn "Failed to delete token $token_id"
|
||||
done
|
||||
else
|
||||
log_info "No API tokens found for pulse-monitor@pam"
|
||||
fi
|
||||
|
||||
# Remove the pulse-monitor user
|
||||
log_info "Removing pulse-monitor@pam user"
|
||||
pveum user delete pulse-monitor@pam 2>&1 | \
|
||||
logger -t "$LOG_TAG" -p user.info || \
|
||||
log_warn "pulse-monitor@pam user not found or already removed"
|
||||
else
|
||||
log_warn "pveum command not available, skipping API token cleanup"
|
||||
fi
|
||||
|
||||
# 3. Remove LXC bind mounts
|
||||
log_info "Removing LXC bind mounts from container configs"
|
||||
if command -v pct >/dev/null 2>&1; then
|
||||
for ctid in $(pct list 2>/dev/null | awk 'NR>1 {print $1}' || true); do
|
||||
CONF_FILE="/etc/pve/lxc/${ctid}.conf"
|
||||
if [[ -f "$CONF_FILE" ]]; then
|
||||
# Find pulse-sensor-proxy mount points and remove them using pct
|
||||
for mp_key in $(grep -o "^mp[0-9]\+:" "$CONF_FILE" | grep -f <(grep "pulse-sensor-proxy" "$CONF_FILE" | grep -o "^mp[0-9]\+:") || true); do
|
||||
mp_num="${mp_key%:}"
|
||||
log_info "Removing ${mp_num} (pulse-sensor-proxy) from container $ctid"
|
||||
if pct set "$ctid" -delete "${mp_num}" 2>&1 | logger -t "$LOG_TAG" -p user.info; then
|
||||
log_info "Successfully removed ${mp_num} from container $ctid"
|
||||
else
|
||||
log_warn "Failed to remove ${mp_num} from container $ctid"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# 4. Uninstall proxy service and remove binaries via isolated transient unit
|
||||
log_info "Starting full uninstallation (service, binaries, configs)"
|
||||
if [[ -x "$INSTALLER_PATH" ]]; then
|
||||
# Use systemd-run to create isolated transient unit that won't be killed
|
||||
# when we stop pulse-sensor-proxy.service
|
||||
if command -v systemd-run >/dev/null 2>&1; then
|
||||
# Use UUID for unique unit name (prevents same-second collisions)
|
||||
UNINSTALL_UUID=$(cat /proc/sys/kernel/random/uuid 2>/dev/null || date +%s%N)
|
||||
UNINSTALL_UNIT="pulse-uninstall-${UNINSTALL_UUID}"
|
||||
log_info "Spawning isolated uninstaller unit: $UNINSTALL_UNIT"
|
||||
|
||||
systemd-run \
|
||||
--unit="${UNINSTALL_UNIT}" \
|
||||
--property="Type=oneshot" \
|
||||
--property="Conflicts=pulse-sensor-proxy.service" \
|
||||
--collect \
|
||||
--wait \
|
||||
--quiet \
|
||||
-- bash -c "$INSTALLER_PATH --uninstall --purge --quiet >> /var/log/pulse/sensor-proxy/uninstall.log 2>&1" \
|
||||
2>&1 | logger -t "$LOG_TAG" -p user.info
|
||||
|
||||
UNINSTALL_EXIT=$?
|
||||
if [[ $UNINSTALL_EXIT -eq 0 ]]; then
|
||||
log_info "Uninstaller completed successfully"
|
||||
else
|
||||
log_error "Uninstaller failed with exit code $UNINSTALL_EXIT"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
log_warn "systemd-run not available, attempting direct uninstall (may fail)"
|
||||
bash "$INSTALLER_PATH" --uninstall --quiet >> /var/log/pulse/sensor-proxy/uninstall.log 2>&1 || \
|
||||
log_error "Uninstaller failed - manual cleanup may be required"
|
||||
fi
|
||||
else
|
||||
log_warn "Installer not found at $INSTALLER_PATH, cannot run uninstaller"
|
||||
log_info "Manual cleanup required: systemctl stop pulse-sensor-proxy && systemctl disable pulse-sensor-proxy"
|
||||
fi
|
||||
|
||||
log_info "Localhost cleanup initiated (uninstaller running in background)"
|
||||
else
|
||||
log_info "Cleaning up remote host: $HOST_CLEAN"
|
||||
|
||||
@@ -134,56 +279,8 @@ else
|
||||
fi
|
||||
fi
|
||||
|
||||
# Full cleanup: uninstall proxy service, remove bind mounts, delete API tokens
|
||||
log_info "Starting full cleanup: uninstalling proxy service and removing remaining artifacts"
|
||||
# Remove processing file on success
|
||||
rm -f "$PROCESSING_FILE"
|
||||
|
||||
# 1. Run the proxy uninstaller if available
|
||||
INSTALLER_PATH="/usr/local/share/pulse/install-sensor-proxy.sh"
|
||||
if [[ -x "$INSTALLER_PATH" ]]; then
|
||||
log_info "Running proxy uninstaller to remove service and bind mounts"
|
||||
if "$INSTALLER_PATH" --uninstall --quiet; then
|
||||
log_info "Proxy service uninstalled successfully"
|
||||
else
|
||||
log_warn "Proxy uninstaller reported errors (may already be removed)"
|
||||
fi
|
||||
else
|
||||
log_warn "Proxy uninstaller not found at $INSTALLER_PATH - manual cleanup may be required"
|
||||
fi
|
||||
|
||||
# 2. Delete Proxmox API tokens
|
||||
log_info "Removing Proxmox API tokens for pulse-monitor user"
|
||||
|
||||
# Find all API tokens for pulse-monitor user
|
||||
if command -v pveum >/dev/null 2>&1; then
|
||||
# List tokens for pulse-monitor user
|
||||
TOKENS=$(pveum user token list pulse-monitor@pam 2>/dev/null | awk 'NR>1 {print $1}' || echo "")
|
||||
|
||||
if [[ -n "$TOKENS" ]]; then
|
||||
for token_id in $TOKENS; do
|
||||
log_info "Removing API token: pulse-monitor@pam!${token_id}"
|
||||
if pveum user token remove pulse-monitor@pam "${token_id}" 2>/dev/null; then
|
||||
log_info "Successfully removed token: ${token_id}"
|
||||
else
|
||||
log_warn "Failed to remove token: ${token_id}"
|
||||
fi
|
||||
done
|
||||
else
|
||||
log_info "No API tokens found for pulse-monitor@pam user"
|
||||
fi
|
||||
|
||||
# Remove the pulse-monitor user entirely
|
||||
if pveum user list 2>/dev/null | grep -q "pulse-monitor@pam"; then
|
||||
log_info "Removing pulse-monitor@pam user"
|
||||
if pveum user delete pulse-monitor@pam 2>/dev/null; then
|
||||
log_info "Successfully removed pulse-monitor@pam user"
|
||||
else
|
||||
log_warn "Failed to remove pulse-monitor@pam user"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
log_warn "pveum command not available - cannot remove API tokens automatically"
|
||||
log_info "Manual cleanup: pveum user delete pulse-monitor@pam"
|
||||
fi
|
||||
|
||||
log_info "Full cleanup completed successfully"
|
||||
log_info "Cleanup completed successfully"
|
||||
exit 0
|
||||
|
||||
@@ -100,18 +100,46 @@ if [ -f "$NOTES_FILE" ]; then
|
||||
echo "Release notes file ignored"
|
||||
NOTES_FILE=""
|
||||
fi
|
||||
else
|
||||
else
|
||||
echo "No release notes file found at ${NOTES_FILE}"
|
||||
echo ""
|
||||
echo "Create release notes manually or let the workflow prompt you."
|
||||
read -p "Generate release notes automatically? [Y/n] " -n 1 -r
|
||||
echo ""
|
||||
read -p "Continue without release notes file? [y/N] " -n 1 -r
|
||||
echo ""
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo "Aborted"
|
||||
exit 1
|
||||
if [[ ! $REPLY =~ ^[Nn]$ ]]; then
|
||||
echo "Generating release notes..."
|
||||
# Try to find previous tag for better context
|
||||
PREV_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "")
|
||||
|
||||
if ./scripts/generate-release-notes.sh "$VERSION" "$PREV_TAG" > "$NOTES_FILE"; then
|
||||
echo "Release notes generated at ${NOTES_FILE}"
|
||||
echo ""
|
||||
# Show first few lines
|
||||
head -n 20 "$NOTES_FILE"
|
||||
echo "... (truncated)"
|
||||
echo ""
|
||||
read -p "Use these release notes? [Y/n] " -n 1 -r
|
||||
echo ""
|
||||
if [[ $REPLY =~ ^[Nn]$ ]]; then
|
||||
echo "Release notes rejected."
|
||||
rm "$NOTES_FILE"
|
||||
NOTES_FILE=""
|
||||
fi
|
||||
else
|
||||
echo "Failed to generate release notes."
|
||||
NOTES_FILE=""
|
||||
fi
|
||||
else
|
||||
NOTES_FILE=""
|
||||
fi
|
||||
NOTES_FILE=""
|
||||
fi
|
||||
|
||||
if [ -z "$NOTES_FILE" ]; then
|
||||
echo "❌ Error: Release notes are required"
|
||||
echo ""
|
||||
echo "Create ${NOTES_FILE} manually, then run this script again."
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Trigger the workflow
|
||||
@@ -122,11 +150,8 @@ if [ -n "$NOTES_FILE" ]; then
|
||||
-f version="${VERSION}" \
|
||||
-f release_notes="$(cat "$NOTES_FILE")"
|
||||
else
|
||||
echo ""
|
||||
# This should be unreachable due to check above, but kept for safety
|
||||
echo "❌ Error: Release notes are required"
|
||||
echo ""
|
||||
echo "Create ${NOTES_FILE} with your release notes, then run this script again."
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,359 +0,0 @@
|
||||
# Update Integration Tests - Implementation Summary
|
||||
|
||||
## Overview
|
||||
|
||||
This implementation provides a comprehensive end-to-end testing framework for the Pulse update flow, validating the entire path from UI to backend with controllable test scenarios.
|
||||
|
||||
## What Was Built
|
||||
|
||||
### 1. Test Harness Infrastructure
|
||||
|
||||
#### Mock GitHub Release Server (`mock-github-server/`)
|
||||
- **Language**: Go
|
||||
- **Features**:
|
||||
- Simulates GitHub Releases API
|
||||
- Generates realistic release tarballs with checksums
|
||||
- Controllable failure modes via environment variables
|
||||
- Rate limiting simulation
|
||||
- Stale release detection
|
||||
- Network error simulation
|
||||
|
||||
#### Docker Compose Test Environment (`docker-compose.test.yml`)
|
||||
- **Services**:
|
||||
- `pulse-test`: Pulse server configured for testing
|
||||
- `mock-github`: Mock GitHub API server
|
||||
- **Features**:
|
||||
- Isolated network for testing
|
||||
- Health checks for both services
|
||||
- Environment-based configuration for different test scenarios
|
||||
- Automatic cleanup after tests
|
||||
|
||||
### 2. Playwright Test Suite
|
||||
|
||||
#### Test Infrastructure
|
||||
- **Framework**: Playwright with TypeScript
|
||||
- **Configuration**: `playwright.config.ts`
|
||||
- **Helpers**: `tests/helpers.ts` with reusable test utilities
|
||||
- **Browser**: Chromium (headless in CI)
|
||||
|
||||
#### Test Scenarios Implemented
|
||||
|
||||
##### 01. Happy Path (`01-happy-path.spec.ts`)
|
||||
- ✅ Display update banner when update is available
|
||||
- ✅ Show confirmation modal with version details
|
||||
- ✅ Show progress modal during update
|
||||
- ✅ Progress modal appears exactly once (no duplicates)
|
||||
- ✅ Display different stages (downloading, verifying, extracting, etc.)
|
||||
- ✅ Verify checksum during update
|
||||
- ✅ Complete end-to-end update flow
|
||||
- ✅ Include release notes in update banner
|
||||
|
||||
**Tests**: 8 test cases
|
||||
|
||||
##### 02. Bad Checksums (`02-bad-checksums.spec.ts`)
|
||||
- ✅ Display error when checksum validation fails
|
||||
- ✅ Show error modal EXACTLY ONCE (not twice) ⭐ **Critical for v4.28.0 issue**
|
||||
- ✅ Display user-friendly error message
|
||||
- ✅ Allow dismissing error modal
|
||||
- ✅ No raw API error responses shown
|
||||
- ✅ Prevent retry with same bad checksum
|
||||
- ✅ Maintain single modal through state changes
|
||||
- ✅ Show specific checksum error details
|
||||
|
||||
**Tests**: 8 test cases
|
||||
**Key Feature**: Catches the v4.28.0 duplicate error modal issue
|
||||
|
||||
##### 03. Rate Limiting (`03-rate-limiting.spec.ts`)
|
||||
- ✅ Rate limit excessive update check requests
|
||||
- ✅ Include rate limit headers in response
|
||||
- ✅ Include Retry-After header when rate limited
|
||||
- ✅ Allow requests after rate limit window expires
|
||||
- ✅ Rate limit per IP address independently
|
||||
- ✅ Provide clear error message when rate limited
|
||||
- ✅ Don't rate limit reasonable request patterns
|
||||
- ✅ Rate limit apply update endpoint separately
|
||||
- ✅ Decrement rate limit counter appropriately
|
||||
|
||||
**Tests**: 9 test cases
|
||||
|
||||
##### 04. Network Failure (`04-network-failure.spec.ts`)
|
||||
- ✅ Retry failed update check requests
|
||||
- ✅ Use exponential backoff for retries
|
||||
- ✅ Show loading state during retry
|
||||
- ✅ Eventually succeed after transient failures
|
||||
- ✅ Don't retry indefinitely
|
||||
- ✅ Show error after max retries exceeded
|
||||
- ✅ Handle timeout during download
|
||||
- ✅ Use exponential backoff with maximum cap
|
||||
- ✅ Preserve user context during retries
|
||||
- ✅ Handle partial download failures gracefully
|
||||
|
||||
**Tests**: 10 test cases
|
||||
|
||||
##### 05. Stale Release (`05-stale-release.spec.ts`)
|
||||
- ✅ Reject stale release during download
|
||||
- ✅ Detect stale release before extraction
|
||||
- ✅ Provide informative message about rejection
|
||||
- ✅ Don't create backup for stale release
|
||||
- ✅ Reject stale release even with valid checksum
|
||||
- ✅ Log stale release rejection attempt
|
||||
- ✅ Handle X-Release-Status header from server
|
||||
- ✅ Allow checking for other updates after rejection
|
||||
- ✅ Differentiate stale release error from other errors
|
||||
- ✅ Prevent installation of specific flagged version
|
||||
|
||||
**Tests**: 10 test cases
|
||||
|
||||
##### 06. Frontend Validation (`06-frontend-validation.spec.ts`)
|
||||
- ✅ UpdateProgressModal appears exactly once during update ⭐
|
||||
- ✅ No duplicate modals during state transitions ⭐
|
||||
- ✅ Error modal appears exactly once on checksum failure ⭐
|
||||
- ✅ Error messages are user-friendly (not raw API errors) ⭐
|
||||
- ✅ Modal can be dismissed after error ⭐
|
||||
- ✅ Modal has accessible close button
|
||||
- ✅ ESC key dismisses modal after error
|
||||
- ✅ Error message doesn't contain stack traces
|
||||
- ✅ Error message doesn't contain internal API paths
|
||||
- ✅ Error message is concise and actionable
|
||||
- ✅ Modal has proper ARIA attributes for accessibility
|
||||
- ✅ Progress bar has proper ARIA attributes
|
||||
- ✅ Modal backdrop prevents interaction with background
|
||||
- ✅ Modal maintains focus trap during update
|
||||
- ✅ No console errors during update flow
|
||||
|
||||
**Tests**: 15 test cases
|
||||
**Key Feature**: Comprehensive UX validation to prevent regressions
|
||||
|
||||
### 3. CI/CD Integration
|
||||
|
||||
#### GitHub Actions Workflow (`.github/workflows/test-updates.yml`)
|
||||
- **Triggers**:
|
||||
- Pull requests touching update-related code
|
||||
- Pushes to main/master
|
||||
- Manual workflow dispatch
|
||||
- **Jobs**:
|
||||
- `integration-tests`: Runs all test suites with different configurations
|
||||
- `regression-test`: Verifies tests catch v4.28.0-style checksum issues
|
||||
- **Features**:
|
||||
- Runs each test suite with appropriate mock configuration
|
||||
- Uploads test reports and failure artifacts
|
||||
- Comments on PR when tests fail
|
||||
- Parallel test execution where possible
|
||||
- Automatic cleanup of Docker resources
|
||||
|
||||
### 4. Helper Scripts
|
||||
|
||||
#### Setup Script (`scripts/setup.sh`)
|
||||
- Checks prerequisites (Docker, Node.js, Go)
|
||||
- Installs npm dependencies
|
||||
- Installs Playwright browsers
|
||||
- Builds Docker images
|
||||
- Provides clear setup instructions
|
||||
|
||||
#### Test Runner (`scripts/run-tests.sh`)
|
||||
- Run all tests or specific test suite
|
||||
- Manages Docker environment per test
|
||||
- Provides colored output for test results
|
||||
- Handles cleanup after tests
|
||||
- Reports summary of passed/failed tests
|
||||
|
||||
### 5. Documentation
|
||||
|
||||
#### Main README (`README.md`)
|
||||
- Architecture overview
|
||||
- Test scenario descriptions
|
||||
- Running instructions
|
||||
- Success criteria
|
||||
|
||||
#### Quick Start Guide (`QUICK_START.md`)
|
||||
- Prerequisites
|
||||
- One-time setup
|
||||
- Running tests (all patterns)
|
||||
- Troubleshooting guide
|
||||
- Architecture diagram
|
||||
|
||||
#### Implementation Summary (this document)
|
||||
- Complete overview of what was built
|
||||
- Test coverage statistics
|
||||
- Success criteria verification
|
||||
|
||||
## Test Coverage Statistics
|
||||
|
||||
- **Total Test Files**: 6
|
||||
- **Total Test Cases**: 60+
|
||||
- **Test Scenarios**: 5 major scenarios + frontend validation
|
||||
- **Lines of Test Code**: ~2,500+
|
||||
- **Mock Server Code**: ~300 lines
|
||||
- **Helper Functions**: 20+
|
||||
|
||||
## Success Criteria Verification
|
||||
|
||||
### ✅ Tests run in CI on every PR touching update code
|
||||
**Status**: Implemented in `.github/workflows/test-updates.yml`
|
||||
- Triggers on update-related file changes
|
||||
- Runs automatically on PRs and pushes
|
||||
|
||||
### ✅ All scenarios pass reliably
|
||||
**Status**: Test suite designed for reliability
|
||||
- Each test suite runs in isolated Docker environment
|
||||
- Services have health checks
|
||||
- Proper wait times and timeouts
|
||||
- Cleanup after each test
|
||||
|
||||
### ✅ Tests catch the v4.28.0 checksum issue type automatically
|
||||
**Status**: Specific test coverage implemented
|
||||
- Test suite `02-bad-checksums.spec.ts` specifically validates:
|
||||
- Error appears exactly once (not twice)
|
||||
- No duplicate modals
|
||||
- User-friendly error messages
|
||||
- Regression test job verifies this works
|
||||
|
||||
### ✅ Frontend UX regressions are blocked
|
||||
**Status**: Comprehensive frontend validation suite
|
||||
- Test suite `06-frontend-validation.spec.ts` with 15 test cases
|
||||
- Validates modal behavior, error messages, accessibility
|
||||
- Ensures no duplicate modals in any scenario
|
||||
- Checks for user-friendly error messages
|
||||
- Validates proper ARIA attributes
|
||||
|
||||
## Key Features
|
||||
|
||||
### 1. Controllable Test Environment
|
||||
Environment variables control mock server behavior:
|
||||
```bash
|
||||
MOCK_CHECKSUM_ERROR=true # Return invalid checksums
|
||||
MOCK_NETWORK_ERROR=true # Simulate network failures
|
||||
MOCK_RATE_LIMIT=true # Enable aggressive rate limiting
|
||||
MOCK_STALE_RELEASE=true # Mark releases as stale
|
||||
```
|
||||
|
||||
### 2. Realistic Mock GitHub Server
|
||||
- Generates actual tarball files with checksums
|
||||
- Simulates GitHub API responses accurately
|
||||
- Provides controllable failure modes
|
||||
- Includes rate limiting
|
||||
- Supports multiple release versions
|
||||
|
||||
### 3. Comprehensive Helper Library
|
||||
20+ helper functions including:
|
||||
- `loginAsAdmin()`, `navigateToSettings()`
|
||||
- `waitForUpdateBanner()`, `clickApplyUpdate()`
|
||||
- `waitForProgressModal()`, `countVisibleModals()`
|
||||
- `assertUserFriendlyError()`, `dismissModal()`
|
||||
- API helpers for direct backend testing
|
||||
|
||||
### 4. CI-Ready
|
||||
- Runs in GitHub Actions
|
||||
- Produces test reports and artifacts
|
||||
- Comments on PRs with results
|
||||
- Verifies regression prevention
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
tests/integration/
|
||||
├── README.md # Main documentation
|
||||
├── QUICK_START.md # Quick start guide
|
||||
├── IMPLEMENTATION_SUMMARY.md # This file
|
||||
├── package.json # npm dependencies
|
||||
├── playwright.config.ts # Playwright configuration
|
||||
├── tsconfig.json # TypeScript configuration
|
||||
├── docker-compose.test.yml # Test environment
|
||||
├── .gitignore # Git ignore rules
|
||||
│
|
||||
├── mock-github-server/ # Mock GitHub API
|
||||
│ ├── main.go # Server implementation
|
||||
│ ├── go.mod # Go dependencies
|
||||
│ └── Dockerfile # Container image
|
||||
│
|
||||
├── scripts/ # Helper scripts
|
||||
│ ├── setup.sh # One-time setup
|
||||
│ └── run-tests.sh # Test runner
|
||||
│
|
||||
└── tests/ # Test suites
|
||||
├── helpers.ts # Test utilities
|
||||
├── 01-happy-path.spec.ts # Happy path tests
|
||||
├── 02-bad-checksums.spec.ts # Checksum validation tests
|
||||
├── 03-rate-limiting.spec.ts # Rate limit tests
|
||||
├── 04-network-failure.spec.ts # Network failure tests
|
||||
├── 05-stale-release.spec.ts # Stale release tests
|
||||
└── 06-frontend-validation.spec.ts # Frontend UX tests
|
||||
```
|
||||
|
||||
## Running the Tests
|
||||
|
||||
### Quick Start
|
||||
```bash
|
||||
cd tests/integration
|
||||
./scripts/setup.sh # One-time setup
|
||||
npm test # Run all tests
|
||||
```
|
||||
|
||||
### Specific Scenarios
|
||||
```bash
|
||||
./scripts/run-tests.sh happy # Happy path only
|
||||
./scripts/run-tests.sh checksums # Bad checksums
|
||||
./scripts/run-tests.sh rate-limit # Rate limiting
|
||||
./scripts/run-tests.sh network # Network failures
|
||||
./scripts/run-tests.sh stale # Stale releases
|
||||
./scripts/run-tests.sh frontend # Frontend validation
|
||||
```
|
||||
|
||||
### Interactive Mode
|
||||
```bash
|
||||
npm run test:ui # Playwright UI
|
||||
npm run test:debug # Debug mode
|
||||
npm run test:headed # Headed browser
|
||||
```
|
||||
|
||||
## Technologies Used
|
||||
|
||||
- **Test Framework**: Playwright
|
||||
- **Language**: TypeScript
|
||||
- **Mock Server**: Go
|
||||
- **Container Platform**: Docker & Docker Compose
|
||||
- **CI/CD**: GitHub Actions
|
||||
- **Browser**: Chromium
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
Potential improvements for future iterations:
|
||||
|
||||
1. **Additional Test Scenarios**
|
||||
- Multi-version update paths
|
||||
- Rollback scenarios
|
||||
- Concurrent update attempts
|
||||
- Permission failures
|
||||
|
||||
2. **Performance Testing**
|
||||
- Update download speed
|
||||
- UI responsiveness during update
|
||||
- Backend processing time
|
||||
|
||||
3. **Cross-browser Testing**
|
||||
- Firefox support
|
||||
- Safari/WebKit support
|
||||
|
||||
4. **Test Data Variations**
|
||||
- Different release sizes
|
||||
- Various network speeds
|
||||
- Different update channels (stable vs RC)
|
||||
|
||||
5. **Monitoring Integration**
|
||||
- Test metrics dashboard
|
||||
- Failure trend analysis
|
||||
- Performance benchmarks
|
||||
|
||||
## Conclusion
|
||||
|
||||
This implementation provides a robust, comprehensive testing framework for the Pulse update flow that:
|
||||
|
||||
✅ Catches critical issues like the v4.28.0 duplicate modal bug
|
||||
✅ Validates frontend UX to prevent regressions
|
||||
✅ Tests backend logic thoroughly
|
||||
✅ Runs automatically in CI
|
||||
✅ Is easy to run locally
|
||||
✅ Is well-documented
|
||||
✅ Is maintainable and extensible
|
||||
|
||||
The test suite meets all success criteria and provides confidence that update flow changes won't introduce regressions.
|
||||
Reference in New Issue
Block a user