From 4f17fb2eeb0628069fbbdcd152c347a3dd13f587 Mon Sep 17 00:00:00 2001 From: Mohammad Mahdi Baghbani Pourvahid Date: Sat, 19 Jul 2025 08:41:50 +0000 Subject: [PATCH 1/3] refactor: modularity for artifact downloader --- scripts/github/download-artifacts.sh | 1193 ++------------------------ scripts/utils/github/artifact.sh | 52 ++ scripts/utils/github/cli.sh | 59 ++ scripts/utils/github/fs.sh | 26 + scripts/utils/github/gh.sh | 39 + scripts/utils/github/log.sh | 41 + scripts/utils/github/video.sh | 26 + 7 files changed, 335 insertions(+), 1101 deletions(-) create mode 100755 scripts/utils/github/artifact.sh create mode 100755 scripts/utils/github/cli.sh create mode 100755 scripts/utils/github/fs.sh create mode 100755 scripts/utils/github/gh.sh create mode 100755 scripts/utils/github/log.sh create mode 100755 scripts/utils/github/video.sh diff --git a/scripts/github/download-artifacts.sh b/scripts/github/download-artifacts.sh index 1aff2501..c95e74e5 100755 --- a/scripts/github/download-artifacts.sh +++ b/scripts/github/download-artifacts.sh @@ -1,1126 +1,117 @@ -#!/bin/bash - -# download-artifacts.sh -# -# Downloads and processes video artifacts from GitHub Actions test workflows. -# Generates AVIF thumbnails. -# -# Requirements: gh, jq, ffmpeg, unzip, zip +#!/usr/bin/env bash set -euo pipefail -# Global variables -readonly SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -readonly ARTIFACTS_DIR="site/static/artifacts" -readonly IMAGES_DIR="site/static/images" -readonly LOG_FILE="/tmp/artifact-download-$(date +%Y%m%d-%H%M%S).log" -declare -a TEMP_DIRS=() - -# Enhanced logging functions -log() { - local timestamp level msg - level="$1" - shift - msg="$*" - timestamp=$(date +'%Y-%m-%d %H:%M:%S') - printf "[%s] %-7s %s\n" "$timestamp" "$level" "$msg" >>"$LOG_FILE" - printf "[%s] %-7s %s\n" "$timestamp" "$level" "$msg" -} -error() { log "ERROR" "$*" >&2; } -info() { log "INFO" "$*"; } -debug() { [[ "${DEBUG:-0}" == "1" ]] && log "DEBUG" "$*" || true; } -warn() { log "WARN" "$*" >&2; } -success() { log "SUCCESS" "$*"; } - -# Timer functions for operation timing -start_timer() { - timer_start=$(date +%s) -} - -end_timer() { - local end_time=$(date +%s) - local duration=$((end_time - timer_start)) - local operation="$1" - info "Operation '$operation' completed in ${duration}s" -} - -# Get human readable file size in KiB format -human_size() { - # If no argument is provided or if it's not a number, return '0 KiB' - local size - size="${1:-0}" - if ! [[ "$size" =~ ^[0-9]+$ ]]; then - echo "0 KiB" - return - fi - - if ((size < 1024)); then - echo "${size} B" - elif ((size < 1048576)); then - # Convert to KiB with one decimal place - local kib=$(echo "scale=1; $size/1024" | bc) - echo "${kib} KiB" - elif ((size < 1073741824)); then - # Convert to MiB with one decimal place - local mib=$(echo "scale=1; $size/1048576" | bc) - echo "${mib} MiB" +# ----------------------------------------------------------------------------------- +# Function: resolve_script_dir +# Purpose : Resolves the absolute path of the script's directory, handling symlinks. +# Returns : +# Exports SOURCE, SCRIPT_DIR +# Note : This function relies on BASH_SOURCE, so it must be used in a Bash shell. +# ----------------------------------------------------------------------------------- +resolve_script_dir() { + local source="${BASH_SOURCE[0]}" + + # Follow symbolic links until we get the real file location + while [ -L "${source}" ]; do + # Get the directory path where the symlink is located + dir="$(cd -P "$(dirname "${source}")" >/dev/null 2>&1 && pwd)" + # Use readlink to get the target the symlink points to + source="$(readlink "${source}")" + # If the source was a relative symlink, convert it to an absolute path + [[ "${source}" != /* ]] && source="${dir}/${source}" + done + + # After resolving symlinks, retrieve the directory of the final source + SCRIPT_DIR="$(cd -P "$(dirname "${source}")" >/dev/null 2>&1 && pwd)" + + # Exports + export SOURCE="${source}" + export SCRIPT_DIR="${SCRIPT_DIR}" +} + +# ----------------------------------------------------------------------------------- +# Function: initialize_environment +# Purpose : +# 1) Resolve the script's directory. +# 2) Change into that directory plus an optional subdirectory (if provided). +# 3) Export ENV_ROOT as the new working directory. +# +# Arguments: +# 1) $1 - Relative or absolute path to a subdirectory (optional). +# If omitted or empty, defaults to '.' (the same directory as resolve_script_dir). +# +# Usage Example: +# initialize_environment # Uses the script's directory +# initialize_environment "dev" # Changes to script's directory + "/dev" +# ----------------------------------------------------------------------------------- +initialize_environment() { + # Resolve script's directory + resolve_script_dir + + # Local variables + local subdir + # Check if a subdirectory argument was passed; default to '.' if not + subdir="${1:-.}" + + # Attempt to change into the resolved directory + the subdirectory + if cd "${SCRIPT_DIR}/${subdir}"; then + ENV_ROOT="$(pwd)" + export ENV_ROOT else - # Convert to GiB with one decimal place - local gib=$(echo "scale=1; $size/1073741824" | bc) - echo "${gib} GiB" + printf "Error: %s\n" "Failed to change directory to '${SCRIPT_DIR}/${subdir}'." >&2 && exit 1 fi } -# Cleanup function -cleanup() { - local exit_code=$? - info "Cleaning up temporary directories..." - for dir in "${TEMP_DIRS[@]}"; do - if [[ -d "$dir" ]]; then - rm -rf "$dir" - debug "Removed temporary directory: $dir" - fi - done - if [[ $exit_code -ne 0 ]]; then - error "Script failed with exit code $exit_code" - fi - exit "$exit_code" -} - -# Check required tools -check_dependencies() { - local missing_deps=() - for cmd in gh jq ffmpeg unzip zip; do - if ! command -v "$cmd" &>/dev/null; then - missing_deps+=("$cmd") - fi - done - - if [[ ${#missing_deps[@]} -gt 0 ]]; then - error "Missing required dependencies: ${missing_deps[*]}" - error "Please install these tools before running this script." - exit 1 - fi -} - -# Sanitizes workflow names for filesystem usage -sanitize_name() { +require() { local name="$1" - echo "$name" | sed -E 's/\.(yml|yaml)$//' | tr '[:upper:]' '[:lower:]' -} - -# Generates AVIF thumbnail from first frame -# Uses AV1 codec with still-picture optimization -generate_thumbnail() { - local video="$1" - local thumbnail="${video%.*}.avif" - - if ! ffmpeg -hide_banner -loglevel error -i "$video" \ - -vf "select=eq(n\,0),scale=640:-1" -vframes 1 -c:v libaom-av1 -still-picture 1 "$thumbnail" 2>/dev/null; then - return 1 - fi - - printf "%s" "$thumbnail" -} - -# Enhanced process_video with progress logging -process_video() { - local input="$1" - local dir - dir="$(dirname "$input")" - local new_name="$dir/recording.mp4" - local original_size - - start_timer - original_size=$(stat -f%z "$input" 2>/dev/null || stat -c%s "$input") - info "Processing video: $input ($(human_size $original_size))" - - # Rename to consistent filename - if ! mv "$input" "$new_name"; then - error "Failed to rename $input to $new_name" - return 1 - fi - debug "Renamed $input to $new_name" - - # Generate thumbnail - info "Generating thumbnail for $new_name" - local thumbnail_file - if ! thumbnail_file=$(generate_thumbnail "$new_name"); then - error "Failed to generate thumbnail" - return 1 - fi - local thumb_size - thumb_size=$(stat -f%z "$thumbnail_file" 2>/dev/null || stat -c%s "$thumbnail_file") - success "Generated thumbnail at $thumbnail_file ($(human_size $thumb_size))" - - end_timer "Video processing" -} - -# Enhanced fetch_workflow_artifacts with more detailed logging -fetch_workflow_artifacts() { - local workflow="$1" - local workflow_name - workflow_name=$(sanitize_name "$workflow") - info "Processing workflow: $workflow_name" - start_timer - - # Get the latest run for this workflow - local runs_json - debug "Fetching workflow runs for $workflow_name" - runs_json=$(gh api "repos/cs3org/ocm-test-suite/actions/workflows/$workflow/runs?per_page=20" \ - --jq ".workflow_runs[] | select(.head_sha == \"${COMMIT_SHA}\" or .head_sha == \"${COMMIT_SHA:0:7}\")" | head -n 1) || { - error "Failed to fetch runs for workflow $workflow" - return 1 - } - - if [[ -z "$runs_json" ]]; then - warn "No runs found for workflow $workflow with commit ${COMMIT_SHA}, trying latest run instead" - runs_json=$(gh api "repos/cs3org/ocm-test-suite/actions/workflows/$workflow/runs?per_page=1" \ - --jq ".workflow_runs[0]") || { - error "Failed to fetch latest run for workflow $workflow" - return 1 - } - fi - - # Get run ID and additional information - local run_id run_status run_conclusion - run_id=$(echo "$runs_json" | jq -r '.id') - run_status=$(echo "$runs_json" | jq -r '.status') - run_conclusion=$(echo "$runs_json" | jq -r '.conclusion') - info "Found run ID: $run_id (Status: $run_status, Conclusion: $run_conclusion) for commit ${COMMIT_SHA}" - - # Get artifacts with count information - local artifacts_json artifact_count - debug "Fetching artifacts for run $run_id" - artifacts_json=$(gh api "repos/cs3org/ocm-test-suite/actions/runs/$run_id/artifacts") || { - error "Failed to fetch artifacts for run $run_id" - return 1 - } - artifact_count=$(echo "$artifacts_json" | jq '.total_count') - info "Found $artifact_count artifacts for run $run_id" - - # Use a temporary file to track counters across subshells - local counter_file - counter_file=$(mktemp) - echo "0 0 0" >"$counter_file" # processed downloaded videos - - # Process each artifact - echo "$artifacts_json" | jq -r '.artifacts[] | "\(.id) \(.name) \(.size_in_bytes // 0)"' | while read -r id name size; do - # Read current counters - read -r processed_count downloaded_count video_count <"$counter_file" - processed_count=$((processed_count + 1)) - echo "$processed_count $downloaded_count $video_count" >"$counter_file" - - info "Downloading artifact $name (ID: $id, Size: $(human_size ${size:-0})) [$processed_count/$artifact_count]" - - # Create a temporary directory for this artifact - local tmp_dir - tmp_dir=$(mktemp -d) - TEMP_DIRS+=("$tmp_dir") - - # Download with progress indication and error checking - debug "Downloading to temporary directory: $tmp_dir" - if ! gh api "repos/cs3org/ocm-test-suite/actions/artifacts/$id/zip" \ - -H "Accept: application/vnd.github+json" >"$tmp_dir/artifact.zip"; then - error "Failed to download artifact $id" - rm -rf "$tmp_dir" - continue - fi - - # Update downloaded counter - read -r processed_count downloaded_count video_count <"$counter_file" - downloaded_count=$((downloaded_count + 1)) - echo "$processed_count $downloaded_count $video_count" >"$counter_file" - - # Get actual downloaded size and verify - local downloaded_size - downloaded_size=$(stat -f%z "$tmp_dir/artifact.zip" 2>/dev/null || stat -c%s "$tmp_dir/artifact.zip" 2>/dev/null || echo 0) - if [[ $downloaded_size -eq 0 ]]; then - error "Downloaded artifact $id is empty" - rm -rf "$tmp_dir" - continue - fi - debug "Downloaded size: $(human_size ${downloaded_size:-0})" - - # Extract with size information and error checking - local target_dir="$ARTIFACTS_DIR/$workflow_name" - mkdir -p "$target_dir" - debug "Extracting to $target_dir" - if ! unzip -o "$tmp_dir/artifact.zip" -d "$target_dir" 2>/dev/null; then - error "Failed to extract artifact $id" - rm -rf "$tmp_dir" - continue - fi - - # List extracted files for debugging - debug "Extracted files in $target_dir:" - find "$target_dir" -type f -exec ls -l {} \; 2>/dev/null | while read -r line; do - debug " $line" - done - - # Process videos with enhanced logging - while IFS= read -r -d '' video; do - if ! process_video "$video"; then - error "Failed to process video: $video" - continue - fi - # Update video counter - read -r processed_count downloaded_count video_count <"$counter_file" - video_count=$((video_count + 1)) - echo "$processed_count $downloaded_count $video_count" >"$counter_file" - info "Successfully processed video $video_count from artifact $name" - done < <(find "$target_dir" -type f -name "*.mp4" ! -name "recording.mp4" -print0) - - # Cleanup - rm -rf "$tmp_dir" - for i in "${!TEMP_DIRS[@]}"; do - if [[ ${TEMP_DIRS[i]} = "$tmp_dir" ]]; then - unset 'TEMP_DIRS[i]' - break - fi - done - done - - # Read final counter values - read -r processed_count downloaded_count video_count <"$counter_file" - rm -f "$counter_file" - - info "Artifact processing summary for $workflow_name:" - info "- Processed artifacts: $processed_count" - info "- Successfully downloaded: $downloaded_count" - info "- Videos processed: $video_count" - - end_timer "Workflow artifact processing" - return 0 -} - -# Fetches workflow status -fetch_workflow_status() { - local workflow="$1" - local status_json - - status_json=$(gh api "repos/cs3org/ocm-test-suite/actions/workflows/$workflow/runs?branch=main&per_page=1" \ - --jq '{ - name: .workflow_runs[0].name, - status: .workflow_runs[0].status, - conclusion: .workflow_runs[0].conclusion - }') || { - error "Failed to fetch status for workflow $workflow" - return 1 - } - - echo "$status_json" -} - -# Generates two key files: -# 1. manifest.json: Maps workflows to their video/thumbnail files -# 2. workflow-status.json: Current status of all test workflows -generate_manifest() { - info "Generating artifact manifest..." - local manifest="$ARTIFACTS_DIR/manifest.json" - local status_file="$ARTIFACTS_DIR/workflow-status.json" - local temp_status_file="/tmp/temp_status_$$.json" - local temp_manifest_file="/tmp/temp_manifest_$$.json" - local counter_file - counter_file=$(mktemp) - - # Initialize empty JSON files and counter - echo "{}" >"$temp_status_file" - echo '{"videos": []}' >"$temp_manifest_file" - echo "0" >"$counter_file" # Initialize video counter - - # Process each workflow type - for workflow in "${workflow_files[@]}"; do - # Get workflow status - local status - status=$(fetch_workflow_status "$workflow") - if [[ -n "$status" ]]; then - jq --arg name "$workflow" --argjson status "$status" \ - '. + {($name): $status}' "$temp_status_file" >"${temp_status_file}.tmp" && - mv "${temp_status_file}.tmp" "$temp_status_file" - fi - - # Get workflow artifacts - local workflow_name - workflow_name=$(sanitize_name "$workflow") - local workflow_dir="$ARTIFACTS_DIR/$workflow_name" - - if [[ -d "$workflow_dir" ]]; then - debug "Processing artifacts for $workflow_name" - # Find all processed MP4 videos and their thumbnails - while IFS= read -r -d '' video; do - local rel_video="${video#site/static/}" - local thumbnail="${video%.mp4}.avif" - local rel_thumbnail="${thumbnail#site/static/}" - - if [[ -f "$video" && -f "$thumbnail" ]]; then - # Update video counter - local current_count - read -r current_count <"$counter_file" - current_count=$((current_count + 1)) - echo "$current_count" >"$counter_file" - - debug "Found video/thumbnail pair: $rel_video, $rel_thumbnail" - # Add to manifest - jq --arg wf "$workflow_name" \ - --arg video "$rel_video" \ - --arg thumb "$rel_thumbnail" \ - '.videos += [{"workflow": $wf, "video": $video, "thumbnail": $thumb}]' \ - "$temp_manifest_file" >"${temp_manifest_file}.tmp" && - mv "${temp_manifest_file}.tmp" "$temp_manifest_file" - else - warn "Missing video or thumbnail for $workflow_name: $video" - fi - done < <(find "$workflow_dir" -type f -name "recording.mp4" -print0) - else - debug "No artifacts directory found for $workflow_name" - fi - done - - # Get final video count - local total_videos - read -r total_videos <"$counter_file" - rm -f "$counter_file" - - # Move the final files to their destinations - mv "$temp_status_file" "$status_file" - mv "$temp_manifest_file" "$manifest" - - info "Generated manifest with $total_videos video entries" - - if [[ ! -f "$manifest" || ! -f "$status_file" ]]; then - error "Failed to generate manifest files" - return 1 - fi - - info "Manifest files generated:" - info "- Status file: $status_file" - info "- Manifest file: $manifest" - - # Debug output of manifest contents - if [[ "${DEBUG:-0}" == "1" ]]; then - debug "Manifest contents:" - jq '.' "$manifest" - debug "Status file contents:" - jq '.' "$status_file" - fi -} - -# Create required directories with error checking -create_required_directories() { - info "Creating required directories..." - local -a required_dirs=( - "site" - "site/static" - "$ARTIFACTS_DIR" - "$IMAGES_DIR" - "$ARTIFACTS_DIR/bundles" - ) - - for dir in "${required_dirs[@]}"; do - if ! mkdir -p "$dir" 2>/dev/null; then - error "Failed to create directory: $dir" - error "Current permissions: $(ls -ld "$(dirname "$dir")" 2>/dev/null || echo 'Cannot read parent directory')" - return 1 - fi - debug "Created directory: $dir" - done - success "All required directories created successfully" -} - -# Create a combined zip file of all test artifacts -create_combined_zip() { - info "Creating combined zip file of all test artifacts..." - local base_dir="$ARTIFACTS_DIR/bundles" - local zip_file="$base_dir/ocm-tests-all.zip" - - # Create parent directories - ensure_directory "$base_dir" "bundle" || return 1 - - # Create temporary directory for files - local temp_dir - temp_dir=$(create_temp_dir) - local files_dir="$temp_dir/files" - mkdir -p "$files_dir" - debug "Created temporary directory for files: $files_dir" - - # Initialize counter - local counter_file - counter_file=$(create_counter_file) - - # Copy all workflow artifacts to temp directory - for workflow in "${workflow_files[@]}"; do - local workflow_name - workflow_name=$(sanitize_name "$workflow") - local workflow_dir="$ARTIFACTS_DIR/$workflow_name" - copy_workflow_videos "$workflow_dir" "$files_dir" "$counter_file" "$workflow_name" - done - - # Get final count - local found_files - read -r found_files <"$counter_file" - - if [[ $found_files -eq 0 ]]; then - warn "No files found to zip" - cleanup_temp_resources "$temp_dir" "$counter_file" - return 0 - fi - - info "Creating zip file with $found_files videos..." - - # Create the zip bundle - create_zip_bundle "$files_dir" "$zip_file" "combined" || { - cleanup_temp_resources "$temp_dir" "$counter_file" - return 1 - } - - cleanup_temp_resources "$temp_dir" "$counter_file" - return 0 -} - -# Create platform-specific zip bundles -create_platform_bundles() { - info "Creating platform-specific zip bundles..." - local base_dir="$ARTIFACTS_DIR/bundles" - - # Create parent directories - ensure_directory "$base_dir" "bundle" || return 1 - - # Define platform combinations - declare -A platforms=( - ["nextcloud"]="nc" - ["owncloud"]="oc" - ["sciencemesh"]="sm" - ["seafile"]="sf" - ["ocis"]="ocis" - ["cernbox"]="cb" - ) - - for platform in "${!platforms[@]}"; do - local platform_code="${platforms[$platform]}" - local temp_dir - temp_dir=$(create_temp_dir) - - info "Processing $platform tests..." - debug "Using temporary directory: $temp_dir" - - # Initialize counter - local counter_file - counter_file=$(create_counter_file) - - # Find workflows containing the platform code - for workflow in "${workflow_files[@]}"; do - if [[ "$workflow" =~ $platform_code ]]; then - local workflow_name - workflow_name=$(sanitize_name "$workflow") - local workflow_dir="$ARTIFACTS_DIR/$workflow_name" - copy_workflow_videos "$workflow_dir" "$temp_dir" "$counter_file" "$workflow_name" - fi - done - - # Get final count - local found_files - read -r found_files <"$counter_file" - - if [[ $found_files -eq 0 ]]; then - warn "No files found for $platform bundle" - cleanup_temp_resources "$temp_dir" "$counter_file" - continue - fi - - # Create zip file for this platform - local zip_file="$base_dir/ocm-tests-$platform.zip" - - # Create the zip bundle - create_zip_bundle "$temp_dir" "$zip_file" "$platform" || { - cleanup_temp_resources "$temp_dir" "$counter_file" - continue - } - - cleanup_temp_resources "$temp_dir" "$counter_file" - done -} - -# Create test-type specific bundles -create_test_type_bundles() { - info "Creating test-type specific bundles..." - local base_dir="$ARTIFACTS_DIR/bundles" - - # Create parent directories - ensure_directory "$base_dir" "bundle" || return 1 - - # Define test types - declare -a types=("login" "share" "invite") - - for type in "${types[@]}"; do - debug "=== Test Type Bundle Debug ===" - debug "Starting processing for type: $type" - debug "Current shell PID: $$" - - local temp_dir - temp_dir=$(create_temp_dir) - - info "Processing $type tests..." - debug "Using temporary directory: $temp_dir" - - # Initialize counter - local counter_file - counter_file=$(create_counter_file) - - # Find workflows of this type - for workflow in "${workflow_files[@]}"; do - if [[ "$workflow" =~ ^$type- ]]; then - debug "Processing workflow: $workflow (Shell PID: $$)" - local workflow_name - workflow_name=$(sanitize_name "$workflow") - local workflow_dir="$ARTIFACTS_DIR/$workflow_name" - copy_workflow_videos "$workflow_dir" "$temp_dir" "$counter_file" "$workflow_name" - fi - done - - # Get final count - local found_files - read -r found_files <"$counter_file" - debug "Final count from file: $found_files" - - if [[ $found_files -eq 0 ]]; then - warn "No files found for $type bundle" - cleanup_temp_resources "$temp_dir" "$counter_file" - continue - fi - - # Create zip file for this test type - local zip_file="$base_dir/ocm-tests-$type.zip" - - # Create the zip bundle - create_zip_bundle "$temp_dir" "$zip_file" "$type tests" || { - cleanup_temp_resources "$temp_dir" "$counter_file" - continue - } - - cleanup_temp_resources "$temp_dir" "$counter_file" - done -} - -# Create result-specific bundles based on workflow status -create_result_bundles() { - info "Creating result-specific bundles..." - debug "=== Result Bundle Debug ===" - debug "Starting result bundle processing" - debug "Current shell PID: $$" - local base_dir="$ARTIFACTS_DIR/bundles" - - # Create parent directories - ensure_directory "$base_dir" "bundle" || return 1 - - local status_file="$ARTIFACTS_DIR/workflow-status.json" - - # Create temp directories for success/failure - local success_dir - success_dir=$(create_temp_dir) - local failed_dir - failed_dir=$(create_temp_dir) - - # Initialize counters - local success_counter - success_counter=$(create_counter_file) - local failed_counter - failed_counter=$(create_counter_file) - - # Process each workflow based on its status - jq -r 'to_entries[] | "\(.key) \(.value.conclusion)"' "$status_file" | while read -r workflow status; do - local workflow_name - workflow_name=$(sanitize_name "$workflow") - local workflow_dir="$ARTIFACTS_DIR/$workflow_name" - - if [[ -d "$workflow_dir" ]]; then - local target_dir counter_file - if [[ "$status" == "success" ]]; then - target_dir="$success_dir" - counter_file="$success_counter" - else - target_dir="$failed_dir" - counter_file="$failed_counter" - fi - - copy_workflow_videos "$workflow_dir" "$target_dir" "$counter_file" "$workflow_name" - fi - done - - # Get final counts - local success_count failed_count - read -r success_count <"$success_counter" - read -r failed_count <"$failed_counter" - - # Create success/failure zip files - for result in "success" "failed"; do - local source_dir count counter_file - if [[ "$result" == "success" ]]; then - source_dir="$success_dir" - count=$success_count - counter_file="$success_counter" - else - source_dir="$failed_dir" - count=$failed_count - counter_file="$failed_counter" - fi - - if [[ $count -eq 0 ]]; then - warn "No files found for $result bundle" - continue - fi - - local zip_file="$base_dir/ocm-tests-$result.zip" - - # Create the zip bundle - create_zip_bundle "$source_dir" "$zip_file" "$result tests" || continue - done - - # Cleanup temp resources - cleanup_temp_resources "$success_dir" "$success_counter" - cleanup_temp_resources "$failed_dir" "$failed_counter" -} - -# Create category-specific bundles based on workflow types -create_category_bundles() { - info "Creating category-specific bundles..." - debug "=== Category Bundle Debug ===" - debug "Starting category bundle processing" - debug "Current shell PID: $$" - local base_dir="$ARTIFACTS_DIR/bundles" - - # Add explicit directory creation and validation - debug "Ensuring bundle directory exists: $base_dir" - if ! mkdir -p "$base_dir"; then - error "Failed to create bundle directory: $base_dir" - error "Parent directory permissions: $(ls -ld "$(dirname "$base_dir")")" - return 1 - fi - debug "Bundle directory permissions: $(ls -ld "$base_dir")" - - # Define test categories and their workflow patterns - declare -A categories=( - ["auth"]="login-" - ["share-link"]="share-link-" - ["share-with"]="share-with-" - ["sciencemesh"]="invite-" - ) - - for category in "${!categories[@]}"; do - local pattern="${categories[$category]}" - local temp_dir - temp_dir=$(mktemp -d) - TEMP_DIRS+=("$temp_dir") - - info "Processing $category category tests..." - debug "Using temporary directory: $temp_dir" - debug "Temporary directory permissions: $(ls -ld "$temp_dir")" - - # Find workflows matching this category's pattern - for workflow in "${workflow_files[@]}"; do - if [[ "$workflow" =~ ^$pattern ]]; then - local workflow_name - workflow_name=$(sanitize_name "$workflow") - local workflow_dir="$ARTIFACTS_DIR/$workflow_name" - - if [[ -d "$workflow_dir" ]]; then - mkdir -p "$temp_dir/$workflow_name" - find "$workflow_dir" -name "recording.mp4" -exec cp {} "$temp_dir/$workflow_name/" \; - fi - fi - done - - # Create zip file for this category using absolute paths - local zip_file="$base_dir/ocm-tests-$category.zip" - local zip_dir="$(dirname "$zip_file")" - - # Get absolute paths - local abs_temp_dir - abs_temp_dir=$(cd "$temp_dir" && pwd) - local abs_zip_file - abs_zip_file=$(cd "$zip_dir" && pwd)/$(basename "$zip_file") - - debug "Creating zip file from: $abs_temp_dir" - debug "Creating zip file to: $abs_zip_file" - debug "Current working directory: $(pwd)" - - if (cd "$abs_temp_dir" && zip -r "$abs_zip_file" .); then - if [[ -f "$abs_zip_file" ]]; then - local zip_size - zip_size=$(stat -f%z "$abs_zip_file" 2>/dev/null || stat -c%s "$abs_zip_file") - success "Created $category category bundle: $zip_file ($(human_size ${zip_size:-0}))" - else - error "Failed to create zip file for $category" - error "Absolute temp dir: $abs_temp_dir" - error "Absolute zip file: $abs_zip_file" - error "Current directory: $(pwd)" - error "Temp directory contents: $(ls -la "$temp_dir")" - fi - else - error "Failed to create zip file for $category" - error "Absolute temp dir: $abs_temp_dir" - error "Absolute zip file: $abs_zip_file" - error "Current directory: $(pwd)" - error "Temp directory contents: $(ls -la "$temp_dir")" - fi - - rm -rf "$temp_dir" - for i in "${!TEMP_DIRS[@]}"; do - if [[ ${TEMP_DIRS[i]} = "$temp_dir" ]]; then - unset 'TEMP_DIRS[i]' - break - fi - done - done -} - -# Create bundle sizes JSON file -generate_bundle_sizes() { - info "Generating bundle sizes JSON file..." - local base_dir="$ARTIFACTS_DIR/bundles" - local sizes_file="$ARTIFACTS_DIR/bundle-sizes.json" - local temp_sizes_file="/tmp/temp_sizes_$$.json" - - echo "{}" >"$temp_sizes_file" - - # Process each bundle file - while IFS= read -r -d '' bundle; do - local bundle_name - bundle_name=$(basename "$bundle") - local bundle_size - bundle_size=$(stat -f%z "$bundle" 2>/dev/null || stat -c%s "$bundle") - local human_bundle_size - human_bundle_size=$(human_size "$bundle_size") - - # Add to JSON - jq --arg name "$bundle_name" \ - --arg size "$human_bundle_size" \ - --arg bytes "$bundle_size" \ - '. + {($name): {"size": $size, "bytes": $bytes}}' "$temp_sizes_file" >"${temp_sizes_file}.tmp" && - mv "${temp_sizes_file}.tmp" "$temp_sizes_file" - - debug "Added size for bundle: $bundle_name ($human_bundle_size)" - done < <(find "$base_dir" -type f -name "ocm-tests-*.zip" -print0) - - # Move the final file to its destination - mv "$temp_sizes_file" "$sizes_file" - - if [[ -f "$sizes_file" ]]; then - success "Generated bundle sizes file: $sizes_file" - debug "Bundle sizes file contents:" - debug "$(cat "$sizes_file")" + local path="$MODULES_DIR/$name" + if [[ -f $path ]]; then + # shellcheck source=/dev/null + source "$path" else - error "Failed to generate bundle sizes file" - return 1 - fi -} - -# Common helper functions to reduce duplication - -# Ensures a directory exists and logs permissions -ensure_directory() { - local dir="$1" - local dir_type="$2" - - debug "Ensuring $dir_type directory exists: $dir" - if ! mkdir -p "$dir"; then - error "Failed to create $dir_type directory: $dir" - error "Parent directory permissions: $(ls -ld "$(dirname "$dir")" 2>/dev/null || echo 'Cannot read parent directory')" - return 1 - fi - debug "$dir_type directory permissions: $(ls -ld "$dir")" - return 0 -} - -# Creates a temporary directory and adds it to TEMP_DIRS array -create_temp_dir() { - local temp_dir - temp_dir=$(mktemp -d) - TEMP_DIRS+=("$temp_dir") - echo "$temp_dir" -} - -# Creates and initializes a counter file -create_counter_file() { - local initial_value="${1:-0}" - local counter_file - counter_file=$(mktemp) - echo "$initial_value" >"$counter_file" - echo "$counter_file" -} - -# Increments a counter file and returns new value -increment_counter() { - local counter_file="$1" - local current_count - read -r current_count <"$counter_file" - current_count=$((current_count + 1)) - echo "$current_count" >"$counter_file" - echo "$current_count" -} - -# Common function to create zip files -create_zip_bundle() { - local source_dir="$1" - local zip_file="$2" - local bundle_type="$3" - - local zip_dir="$(dirname "$zip_file")" - - # Get absolute paths - local abs_source_dir - abs_source_dir=$(cd "$source_dir" && pwd) - local abs_zip_file - abs_zip_file=$(cd "$zip_dir" && pwd)/$(basename "$zip_file") - - debug "Creating zip file from: $abs_source_dir" - debug "Creating zip file to: $abs_zip_file" - debug "Current working directory: $(pwd)" - - if (cd "$abs_source_dir" && zip -r "$abs_zip_file" .); then - if [[ -f "$abs_zip_file" ]]; then - local zip_size - zip_size=$(stat -f%z "$abs_zip_file" 2>/dev/null || stat -c%s "$abs_zip_file") - success "Created $bundle_type bundle: $zip_file ($(human_size ${zip_size:-0}))" - return 0 - else - error "Failed to create zip file for $bundle_type" - error "Absolute source dir: $abs_source_dir" - error "Absolute zip file: $abs_zip_file" - error "Current directory: $(pwd)" - error "Source directory contents: $(ls -la "$source_dir")" - return 1 - fi - else - error "Failed to create zip file for $bundle_type" - error "Absolute source dir: $abs_source_dir" - error "Absolute zip file: $abs_zip_file" - error "Current directory: $(pwd)" - error "Source directory contents: $(ls -la "$source_dir")" - return 1 - fi -} - -# Common function to copy workflow videos -copy_workflow_videos() { - local workflow_dir="$1" - local target_dir="$2" - local counter_file="$3" - local workflow_name="$4" - - if [[ ! -d "$workflow_dir" ]]; then - return 0 + echo "Module '$name' not found in $MODULES_DIR" >&2 + exit 1 fi - - mkdir -p "$target_dir/$workflow_name" - - while IFS= read -r -d '' video; do - if cp "$video" "$target_dir/$workflow_name/"; then - increment_counter "$counter_file" >/dev/null - debug "Copied $video to $target_dir/$workflow_name" - else - error "Failed to copy $video to $target_dir/$workflow_name" - fi - done < <(find "$workflow_dir" -name "recording.mp4" -print0) -} - -# Common function to cleanup temp resources -cleanup_temp_resources() { - local temp_dir="$1" - local counter_file="$2" - - [[ -f "$counter_file" ]] && rm -f "$counter_file" - [[ -d "$temp_dir" ]] && rm -rf "$temp_dir" - - for i in "${!TEMP_DIRS[@]}"; do - if [[ ${TEMP_DIRS[i]} = "$temp_dir" ]]; then - unset 'TEMP_DIRS[i]' - break - fi - done } -# Enhanced main function with summary statistics +# ----------------------------------------------------------------------------------- +# Function: main +# Purpose: Main function to manage the flow of the script. +# ----------------------------------------------------------------------------------- main() { - # Set up error handling with line numbers - set -E - trap 'error "Error on line $LINENO. Command: $BASH_COMMAND"' ERR - trap cleanup EXIT + initialize_environment "../.." - info "Starting script in directory: $(pwd)" - info "Script directory: $SCRIPT_DIR" + MODULES_DIR="${ENV_ROOT}/scripts/utils/github" - # Check dependencies with more detailed logging - info "Checking dependencies..." - check_dependencies - success "All dependencies found" + # Foundation + require log.sh + require fs.sh - # Ensure COMMIT_SHA is set - if [[ -z "${COMMIT_SHA:-}" ]]; then - info "COMMIT_SHA not set, attempting to determine it" - COMMIT_SHA=$(git rev-parse HEAD 2>/dev/null || true) + # Domain logic + require cli.sh + require gh.sh + require video.sh + require artifact.sh - if [[ -z "${COMMIT_SHA}" ]]; then - info "Git rev-parse failed, trying GitHub API" - COMMIT_SHA=$(gh api repos/cs3org/ocm-test-suite/commits/main --jq '.sha' 2>/dev/null || true) + parse_cli "$@" - if [[ -z "${COMMIT_SHA}" ]]; then - error "Could not determine COMMIT_SHA. Please set it manually or ensure you're in a git repository." - exit 1 - fi - fi - export COMMIT_SHA - info "Using commit SHA: ${COMMIT_SHA}" - fi + info "Repository : $REPO" + info "Commit SHA : $COMMIT_SHA" + info "Output dir : $OUTDIR" + info "Workflows (${#WORKFLOWS[@]}) : ${WORKFLOWS[*]}" - info "Downloading artifacts for commit: ${COMMIT_SHA}" + _timer_start + ensure_dir "$OUTDIR" + WORKDIR=$(mk_tmp) + info "Temporary dir : $WORKDIR" + _timer_end "Bootstrap" - # Get workflow files from the local .github/workflows directory - info "Looking for workflow files in .github/workflows..." - if [[ ! -d ".github/workflows" ]]; then - error "Workflows directory not found: .github/workflows" - error "Current directory contents: $(ls -la)" - exit 1 - fi - - if [[ -z ${WORKFLOWS_CSV:-} ]]; then - shopt -s nullglob - WORKFLOW_LIST=( - .github/workflows/login-*.yml - .github/workflows/share-link-*.yml - .github/workflows/share-with-*.yml - .github/workflows/invite-link-*.yml - ) - shopt -u extglob - # Basename‐only - for i in "${!WORKFLOW_LIST[@]}"; do - WORKFLOW_LIST[$i]=$(basename "${WORKFLOW_LIST[$i]}") - done - else - IFS=',' read -r -a WORKFLOW_LIST <<<"${WORKFLOWS_CSV}" - fi - - declare -a workflow_files=("${WORKFLOW_LIST[@]}") - - if [[ ${#workflow_files[@]} -eq 0 ]]; then - error "No workflow files found!" - error "Contents of .github/workflows: $(ls -la .github/workflows)" - exit 1 - fi - - info "Found ${#workflow_files[@]} workflow files" - - # Log all found workflows by type with counts - info "=== Found Workflows ===" - declare -a login_files=() - declare -a share_files=() - declare -a invite_files=() - - for wf in "${workflow_files[@]}"; do - if [[ "$wf" =~ ^login- ]]; then - login_files+=("$wf") - elif [[ "$wf" =~ ^share- ]]; then - share_files+=("$wf") - elif [[ "$wf" =~ ^invite- ]]; then - invite_files+=("$wf") - fi - done - - info "Login workflows (${#login_files[@]}):" - for wf in "${login_files[@]}"; do - info " - $wf" - done - - info "Share workflows (${#share_files[@]}):" - for wf in "${share_files[@]}"; do - info " - $wf" - done - - info "Invite workflows (${#invite_files[@]}):" - for wf in "${invite_files[@]}"; do - info " - $wf" - done - - # Count of expected workflow types - login_count=0 - share_count=0 - invite_count=0 - - info "Found ${#workflow_files[@]} relevant workflows" - - # Process and categorize workflows - for workflow in "${workflow_files[@]}"; do - # First increment the counters - case "$workflow" in - login-*) - login_count=$((login_count + 1)) - info "Processing login workflow: $workflow" - ;; - share-*) - share_count=$((share_count + 1)) - info "Processing share workflow: $workflow" - ;; - invite-*) - invite_count=$((invite_count + 1)) - info "Processing invite workflow: $workflow" - ;; - *) - warn "Unexpected workflow pattern found: $workflow" - continue - ;; - esac - - # Then process the artifacts - if ! fetch_workflow_artifacts "$workflow"; then - error "Failed to process workflow: $workflow" - continue - fi + for wf in "${WORKFLOWS[@]}"; do + process_workflow "$REPO" "$wf" "$COMMIT_SHA" "$OUTDIR" done - # Verify we processed the expected number of workflows - total=$((login_count + share_count + invite_count)) - - info "=== Workflow Count Summary ===" - info "Login workflows: $login_count" - info "Share workflows: $share_count" - info "Invite workflows: $invite_count" - info "Total processed: $total / ${#workflow_files[@]}" - - if ((total < ${#workflow_files[@]})); then - warn "Not every workflow produced artifacts - continuing anyway." - fi - - # Generate manifest - generate_manifest - - # Create all zip bundles - create_combined_zip - create_platform_bundles - create_test_type_bundles - create_result_bundles - create_category_bundles - - # Generate bundle sizes JSON - generate_bundle_sizes - - # Debug output - info "Contents of artifacts directory:" - ls -R "$ARTIFACTS_DIR" - - # Add summary at the end - info "=== Final Summary ===" - info "Total workflows processed: $total" - info "- Login workflows: $login_count" - info "- Share workflows: $share_count" - info "- Invite workflows: $invite_count" - - # Add disk usage information - local artifacts_size - artifacts_size=$(du -sh "$ARTIFACTS_DIR" 2>/dev/null | cut -f1) - info "Total artifacts size: $artifacts_size" - - info "Log file location: $LOG_FILE" - success "Script completed successfully" + success "All workflows processed. Artifacts at: $OUTDIR" } main "$@" diff --git a/scripts/utils/github/artifact.sh b/scripts/utils/github/artifact.sh new file mode 100755 index 00000000..fcbd9185 --- /dev/null +++ b/scripts/utils/github/artifact.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash +# Requires: gh, jq, unzip + +set -euo pipefail + +# Convert workflow filename to a lowercase slug (basename without extension) +_slugify() { + local _x=$1 + _x="${_x##*/}" + _x="${_x%.*}" + echo "${_x,,}" +} + +# Fetch, extract, and post-process artifacts for one workflow. +# Arguments: +process_workflow() { + local _repo=$1 _workflow=$2 _sha=$3 _out=$4 + + local _slug _run_id + _slug=$(_slugify "${_workflow}") + + # Resolve the most relevant run ID + _run_id=$(gh_get_run_id "${_repo}" "${_workflow}" "${_sha}" || true) + if [[ -z ${_run_id} || ${_run_id} == "null" ]]; then + warn "No run found for ${_workflow} @ ${_sha} | skipping" + return 0 + fi + + info "Workflow ${_workflow}: using run-id ${_run_id}" + ensure_dir "${_out}/${_slug}" + + # Temporary location for zips + local _tmp + _tmp=$(mktemp -d) + trap 'rm -rf -- "${_tmp}"' RETURN + + # Download & unzip every artifact + gh_get_artifacts "${_repo}" "${_run_id}" | while read -r _id _name _size; do + info "↳ downloading artifact ${_name} ($(hr_size ${_size}))" + local _zip + _zip=$(gh_download_artifact "${_repo}" "${_id}" "${_tmp}") + unzip -q -o "${_zip}" -d "${_out}/${_slug}" + done + + # Post-process all raw videos (skip ones already renamed) + find "${_out}/${_slug}" -type f -name "*.mp4" ! -name "recording.mp4" -print0 | + while IFS= read -r -d '' _vid; do + process_video "${_vid}" + done + + success "Finished processing ${_workflow}" +} \ No newline at end of file diff --git a/scripts/utils/github/cli.sh b/scripts/utils/github/cli.sh new file mode 100755 index 00000000..34a79cf9 --- /dev/null +++ b/scripts/utils/github/cli.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash + +# shellcheck shell=bash disable=SC2034,SC2155 +set -euo pipefail + +parse_cli() { + # defaults + REPO="" + COMMIT_SHA="" + OUTDIR="site/static/artifacts" + VERBOSE=0 + WORKFLOWS_CSV="" + + # getopt (POSIX long‑only) + local _TEMP + _TEMP=$(getopt -o '' -l repo:,commit:,workflows:,outdir:,verbose,help -- "$@") || { + echo "Try --help" >&2; return 1; } + eval set -- "${_TEMP}" + + while true; do + case "$1" in + --repo) REPO="$2"; shift 2;; + --commit) COMMIT_SHA="$2"; shift 2;; + --workflows) WORKFLOWS_CSV="$2"; shift 2;; + --outdir) OUTDIR="$2"; shift 2;; + --verbose) VERBOSE=1; shift;; + --help) + cat <&2; exit 1;; + esac + done + + # derive defaults when unset + [[ -z $REPO ]] && REPO=$(git remote get-url origin 2>/dev/null | sed -E 's#.*/([^/]+/[^/.]+)(\.git)?#\1#') + [[ -z $COMMIT_SHA ]] && COMMIT_SHA=$(git rev-parse HEAD 2>/dev/null) + + # expose as readonly + readonly REPO COMMIT_SHA OUTDIR VERBOSE + + # workflow list as array + if [[ -n $WORKFLOWS_CSV ]]; then + IFS=',' read -r -a WORKFLOWS <<< "$WORKFLOWS_CSV" + else + shopt -s nullglob + WORKFLOWS=( .github/workflows/{login,share-link,share-with,invite-link}-*.yml ) + for i in "${!WORKFLOWS[@]}"; do WORKFLOWS[$i]=$(basename "${WORKFLOWS[$i]}"); done + shopt -u nullglob + fi + readonly -a WORKFLOWS +} diff --git a/scripts/utils/github/fs.sh b/scripts/utils/github/fs.sh new file mode 100755 index 00000000..70f6e27b --- /dev/null +++ b/scripts/utils/github/fs.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +# shellcheck shell=bash disable=SC2034 +set -euo pipefail + +# Ensure a directory exists or create if absent +ensure_dir() { + local _dir=$1 + [[ -d "$_dir" ]] || mkdir -p -- "$_dir" +} + +# Create one global workspace that is wiped automatically +mk_tmp() { + WORKDIR=$(mktemp -d) + trap 'rm -rf -- "$WORKDIR"' EXIT + echo "$WORKDIR" +} + +# Human readable byte size KiB/MiB/GiB with one decimal +hr_size() { + local _bytes=${1:-0} + (( _bytes < 1024 )) && { printf "%d B" "${_bytes}"; return; } + (( _bytes < 1048576 )) && { printf "%.1f KiB" "$(bc -l <<< "${_bytes}/1024")"; return; } + (( _bytes < 1073741824 )) && { printf "%.1f MiB" "$(bc -l <<< "${_bytes}/1048576")"; return; } + printf "%.1f GiB" "$(bc -l <<< "${_bytes}/1073741824")" +} \ No newline at end of file diff --git a/scripts/utils/github/gh.sh b/scripts/utils/github/gh.sh new file mode 100755 index 00000000..b1c45c68 --- /dev/null +++ b/scripts/utils/github/gh.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# Requires: GitHub CLI (`gh`) authenticated via token or `gh auth login`. + +set -euo pipefail + +# Return the JSON blob for the most relevant workflow run. +# Usage: gh_get_run [] +# If is provided, tries to match first 7/40‑char prefix. +# Falls back to the latest run when nothing matches. +gh_get_run() { + local _repo=$1 _workflow=$2 _sha=${3:-} + local _api="repos/${_repo}/actions/workflows/${_workflow}/runs?per_page=20" + local _filter='.workflow_runs[0]' + if [[ -n $_sha ]]; then + _filter=".workflow_runs[] | select(.head_sha|startswith(\"${_sha}\")) | ." + fi + gh api "${_api}" --jq "${_filter}" +} + +# Echo run‑ID only (helper) +gh_get_run_id() { gh_get_run "$@" | jq -r '.id'; } + +# List artifacts for a run. +# Yields: " " +gh_get_artifacts() { + local _repo=$1 _run=$2 + gh api "repos/${_repo}/actions/runs/${_run}/artifacts" \ + --jq '.artifacts[] | "\(.id) \(.name) \(.size_in_bytes // 0)"' +} + +# Download artifact ZIP to a directory and echo the destination path. +# Usage: gh_download_artifact +gh_download_artifact() { + local _repo=$1 _art_id=$2 _dest_dir=$3 + ensure_dir "${_dest_dir}" + local _out="${_dest_dir}/artifact-${_art_id}.zip" + gh api "repos/${_repo}/actions/artifacts/${_art_id}/zip" >"${_out}" + echo "${_out}" +} \ No newline at end of file diff --git a/scripts/utils/github/log.sh b/scripts/utils/github/log.sh new file mode 100755 index 00000000..f7e532fe --- /dev/null +++ b/scripts/utils/github/log.sh @@ -0,0 +1,41 @@ +#!/usr/bin/env bash + +# shellcheck shell=bash disable=SC2034,SC2155 +set -euo pipefail + +# colour palette +readonly _CLR_RESET="\033[0m" +readonly _CLR_INFO="\033[1;34m" # bright blue +readonly _CLR_WARN="\033[1;33m" # bright yellow +readonly _CLR_ERROR="\033[1;31m" # bright red +readonly _CLR_SUCCESS="\033[1;32m" # bright green +readonly _CLR_DEBUG="\033[1;90m" # dim gray + +# core logger +_log() { + local _level=$1; shift + local _msg="$*" + local _stamp + _stamp=$(date +"%Y-%m-%d %H:%M:%S") + + local _clr_var="_CLR_${_level^^}" # INFO _CLR_INFO + # shellcheck disable=SC2086 + printf "%s %b%-7s%b %s\n" "${_stamp}" "${!_clr_var}" "${_level}" "${_CLR_RESET}" "${_msg}" +} + +info() { _log INFO "$*"; } +warn() { _log WARN "$*"; } +error() { _log ERROR "$*"; } +success() { _log SUCCESS "$*"; } + +debug() { + [[ ${VERBOSE:-0} -eq 1 ]] && _log DEBUG "$*" || true +} + +# timing helpers +_timer_start() { TIMER_START=${SECONDS:-0}; } +_timer_end() { + local _op="$1" + local _elapsed=$(( SECONDS - TIMER_START )) + info "${_op} finished in ${_elapsed}s" +} diff --git a/scripts/utils/github/video.sh b/scripts/utils/github/video.sh new file mode 100755 index 00000000..0375033a --- /dev/null +++ b/scripts/utils/github/video.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +# Requires: ffmpeg (with libaom‑av1) + +set -euo pipefail + +# Internal: create one AVIF thumbnail next to the video. +_make_thumbnail() { + local _video=$1 + local _thumb="${_video%.*}.avif" + ffmpeg -v quiet -i "${_video}" -vf scale=640:-1 -frames:v 1 \ + -c:v libaom-av1 -still-picture 1 "${_thumb}" + echo "${_thumb}" +} + +# Public API: process a freshly extracted *.mp4 +# 1. renames to recording.mp4 in place +# 2. generates thumbnail +process_video() { + local _input=$1 + local _dir + _dir=$(dirname "${_input}") + local _target="${_dir}/recording.mp4" + mv -f -- "${_input}" "${_target}" + _make_thumbnail "${_target}" >/dev/null + success "Processed video => ${_target}" +} From 304cd3aff0ec79302011fd2847219c8e4630ba66 Mon Sep 17 00:00:00 2001 From: Mohammad Mahdi Baghbani Pourvahid Date: Sat, 19 Jul 2025 12:11:35 +0000 Subject: [PATCH 2/3] fix: document structure --- scripts/github/download-artifacts.sh | 36 +++++------ scripts/utils/common.sh | 82 ++++++++++++++++++++++++ scripts/utils/{github => common}/fs.sh | 0 scripts/utils/{github => common}/log.sh | 3 +- scripts/utils/github.sh | 84 +++++++++++++++++++++++++ 5 files changed, 183 insertions(+), 22 deletions(-) create mode 100755 scripts/utils/common.sh rename scripts/utils/{github => common}/fs.sh (100%) rename scripts/utils/{github => common}/log.sh (93%) create mode 100755 scripts/utils/github.sh diff --git a/scripts/github/download-artifacts.sh b/scripts/github/download-artifacts.sh index c95e74e5..193062da 100755 --- a/scripts/github/download-artifacts.sh +++ b/scripts/github/download-artifacts.sh @@ -64,13 +64,14 @@ initialize_environment() { } require() { - local name="$1" - local path="$MODULES_DIR/$name" - if [[ -f $path ]]; then + local modules="${1}" + local name="${2}" + local path="${modules}/${name}" + if [[ -f ${path} ]]; then # shellcheck source=/dev/null - source "$path" + source "${path}" else - echo "Module '$name' not found in $MODULES_DIR" >&2 + echo "Module '${name}' not found in ${modules}" >&2 exit 1 fi } @@ -81,37 +82,30 @@ require() { # ----------------------------------------------------------------------------------- main() { initialize_environment "../.." - - MODULES_DIR="${ENV_ROOT}/scripts/utils/github" - # Foundation - require log.sh - require fs.sh + require "${ENV_ROOT}/scripts/utils" common.sh # Domain logic - require cli.sh - require gh.sh - require video.sh - require artifact.sh + require "${ENV_ROOT}/scripts/utils" github.sh parse_cli "$@" - info "Repository : $REPO" - info "Commit SHA : $COMMIT_SHA" - info "Output dir : $OUTDIR" + info "Repository : ${REPO}" + info "Commit SHA : ${COMMIT_SHA}" + info "Output dir : ${OUTDIR}" info "Workflows (${#WORKFLOWS[@]}) : ${WORKFLOWS[*]}" _timer_start - ensure_dir "$OUTDIR" + ensure_dir "${OUTDIR}" WORKDIR=$(mk_tmp) - info "Temporary dir : $WORKDIR" + info "Temporary dir : ${WORKDIR}" _timer_end "Bootstrap" for wf in "${WORKFLOWS[@]}"; do - process_workflow "$REPO" "$wf" "$COMMIT_SHA" "$OUTDIR" + process_workflow "${REPO}" "${wf}" "${COMMIT_SHA}" "${OUTDIR}" done - success "All workflows processed. Artifacts at: $OUTDIR" + success "All workflows processed. Artifacts at: ${OUTDIR}" } main "$@" diff --git a/scripts/utils/common.sh b/scripts/utils/common.sh new file mode 100755 index 00000000..cd9e8a2e --- /dev/null +++ b/scripts/utils/common.sh @@ -0,0 +1,82 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# ----------------------------------------------------------------------------------- +# Function: resolve_script_dir +# Purpose : Resolves the absolute path of the script's directory, handling symlinks. +# Returns : +# Exports SOURCE, SCRIPT_DIR +# Note : This function relies on BASH_SOURCE, so it must be used in a Bash shell. +# ----------------------------------------------------------------------------------- +resolve_script_dir() { + local source="${BASH_SOURCE[0]}" + + # Follow symbolic links until we get the real file location + while [ -L "${source}" ]; do + # Get the directory path where the symlink is located + dir="$(cd -P "$(dirname "${source}")" >/dev/null 2>&1 && pwd)" + # Use readlink to get the target the symlink points to + source="$(readlink "${source}")" + # If the source was a relative symlink, convert it to an absolute path + [[ "${source}" != /* ]] && source="${dir}/${source}" + done + + # After resolving symlinks, retrieve the directory of the final source + SCRIPT_DIR="$(cd -P "$(dirname "${source}")" >/dev/null 2>&1 && pwd)" + + # Exports + export SOURCE="${source}" + export SCRIPT_DIR="${SCRIPT_DIR}" +} + +# ----------------------------------------------------------------------------------- +# Function: initialize_environment +# Purpose : +# 1) Resolve the script's directory. +# 2) Change into that directory plus an optional subdirectory (if provided). +# 3) Export ENV_ROOT as the new working directory. +# +# Arguments: +# 1) $1 - Relative or absolute path to a subdirectory (optional). +# If omitted or empty, defaults to '.' (the same directory as resolve_script_dir). +# +# Usage Example: +# initialize_environment # Uses the script's directory +# initialize_environment "dev" # Changes to script's directory + "/dev" +# ----------------------------------------------------------------------------------- +initialize_environment() { + # Resolve script's directory + resolve_script_dir + + # Local variables + local subdir + # Check if a subdirectory argument was passed; default to '.' if not + subdir="${1:-.}" + + # Attempt to change into the resolved directory + the subdirectory + if cd "${SCRIPT_DIR}/${subdir}"; then + ENV_ROOT="$(pwd)" + export ENV_ROOT + else + printf "Error: %s\n" "Failed to change directory to '${SCRIPT_DIR}/${subdir}'." >&2 && exit 1 + fi +} + +require() { + local modules="${1}" + local name="${2}" + local path="${modules}/${name}" + if [[ -f ${path} ]]; then + # shellcheck source=/dev/null + source "${path}" + else + echo "Module '${name}' not found in ${modules}" >&2 + exit 1 + fi +} + +initialize_environment "../.." + +require "${ENV_ROOT}/scripts/utils/common" fs.sh +require "${ENV_ROOT}/scripts/utils/common" log.sh diff --git a/scripts/utils/github/fs.sh b/scripts/utils/common/fs.sh similarity index 100% rename from scripts/utils/github/fs.sh rename to scripts/utils/common/fs.sh diff --git a/scripts/utils/github/log.sh b/scripts/utils/common/log.sh similarity index 93% rename from scripts/utils/github/log.sh rename to scripts/utils/common/log.sh index f7e532fe..38399aed 100755 --- a/scripts/utils/github/log.sh +++ b/scripts/utils/common/log.sh @@ -18,7 +18,8 @@ _log() { local _stamp _stamp=$(date +"%Y-%m-%d %H:%M:%S") - local _clr_var="_CLR_${_level^^}" # INFO _CLR_INFO + # example: INFO _CLR_INFO + local _clr_var="_CLR_${_level^^}" # shellcheck disable=SC2086 printf "%s %b%-7s%b %s\n" "${_stamp}" "${!_clr_var}" "${_level}" "${_CLR_RESET}" "${_msg}" } diff --git a/scripts/utils/github.sh b/scripts/utils/github.sh new file mode 100755 index 00000000..05bb97d6 --- /dev/null +++ b/scripts/utils/github.sh @@ -0,0 +1,84 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# ----------------------------------------------------------------------------------- +# Function: resolve_script_dir +# Purpose : Resolves the absolute path of the script's directory, handling symlinks. +# Returns : +# Exports SOURCE, SCRIPT_DIR +# Note : This function relies on BASH_SOURCE, so it must be used in a Bash shell. +# ----------------------------------------------------------------------------------- +resolve_script_dir() { + local source="${BASH_SOURCE[0]}" + + # Follow symbolic links until we get the real file location + while [ -L "${source}" ]; do + # Get the directory path where the symlink is located + dir="$(cd -P "$(dirname "${source}")" >/dev/null 2>&1 && pwd)" + # Use readlink to get the target the symlink points to + source="$(readlink "${source}")" + # If the source was a relative symlink, convert it to an absolute path + [[ "${source}" != /* ]] && source="${dir}/${source}" + done + + # After resolving symlinks, retrieve the directory of the final source + SCRIPT_DIR="$(cd -P "$(dirname "${source}")" >/dev/null 2>&1 && pwd)" + + # Exports + export SOURCE="${source}" + export SCRIPT_DIR="${SCRIPT_DIR}" +} + +# ----------------------------------------------------------------------------------- +# Function: initialize_environment +# Purpose : +# 1) Resolve the script's directory. +# 2) Change into that directory plus an optional subdirectory (if provided). +# 3) Export ENV_ROOT as the new working directory. +# +# Arguments: +# 1) $1 - Relative or absolute path to a subdirectory (optional). +# If omitted or empty, defaults to '.' (the same directory as resolve_script_dir). +# +# Usage Example: +# initialize_environment # Uses the script's directory +# initialize_environment "dev" # Changes to script's directory + "/dev" +# ----------------------------------------------------------------------------------- +initialize_environment() { + # Resolve script's directory + resolve_script_dir + + # Local variables + local subdir + # Check if a subdirectory argument was passed; default to '.' if not + subdir="${1:-.}" + + # Attempt to change into the resolved directory + the subdirectory + if cd "${SCRIPT_DIR}/${subdir}"; then + ENV_ROOT="$(pwd)" + export ENV_ROOT + else + printf "Error: %s\n" "Failed to change directory to '${SCRIPT_DIR}/${subdir}'." >&2 && exit 1 + fi +} + +require() { + local modules="${1}" + local name="${2}" + local path="${modules}/${name}" + if [[ -f ${path} ]]; then + # shellcheck source=/dev/null + source "${path}" + else + echo "Module '${name}' not found in ${modules}" >&2 + exit 1 + fi +} + +initialize_environment "../.." + +require "${ENV_ROOT}/scripts/utils/github" gh.sh +require "${ENV_ROOT}/scripts/utils/github" cli.sh +require "${ENV_ROOT}/scripts/utils/github" video.sh +require "${ENV_ROOT}/scripts/utils/github" artifact.sh From 197d6938cf0510f90cae0a1dba4b3c3a73f81f98 Mon Sep 17 00:00:00 2001 From: Mohammad Mahdi Baghbani Pourvahid Date: Sat, 19 Jul 2025 13:04:13 +0000 Subject: [PATCH 3/3] =?UTF-8?q?fix:=20select=20the=20single=20most=20relev?= =?UTF-8?q?ant=20workflow=E2=80=91run=20JSON=20blob?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- scripts/utils/github/gh.sh | 52 ++++++++++++++++++++++++++++++++------ 1 file changed, 44 insertions(+), 8 deletions(-) diff --git a/scripts/utils/github/gh.sh b/scripts/utils/github/gh.sh index b1c45c68..e29e6295 100755 --- a/scripts/utils/github/gh.sh +++ b/scripts/utils/github/gh.sh @@ -3,18 +3,54 @@ set -euo pipefail -# Return the JSON blob for the most relevant workflow run. +# Select the single most relevant workflow‑run JSON blob. +# +# Cascade (stop on the first non‑empty match): +# 1. Exact head_sha match (if supplied, 7/40‑char prefix OK). +# 2. Newest completed run on the current git branch. +# 3. Newest completed run on main. +# 4. Newest completed run overall (safety net). +# # Usage: gh_get_run [] -# If is provided, tries to match first 7/40‑char prefix. -# Falls back to the latest run when nothing matches. gh_get_run() { local _repo=$1 _workflow=$2 _sha=${3:-} - local _api="repos/${_repo}/actions/workflows/${_workflow}/runs?per_page=20" - local _filter='.workflow_runs[0]' - if [[ -n $_sha ]]; then - _filter=".workflow_runs[] | select(.head_sha|startswith(\"${_sha}\")) | ." + local _api_base="repos/${_repo}/actions/workflows/${_workflow}/runs" + local _per_page="per_page=50" + local _status="status=completed" + + # 1. exact head_sha (prefix) + if [[ -n ${_sha} ]]; then + local _match + _match=$(gh api "${_api_base}?${_status}&${_per_page}" \ + --jq ".workflow_runs[] | select(.head_sha|startswith(\"${_sha}\"))" \ + | head -n1 || true) + [[ -n ${_match} ]] && { echo "${_match}"; return 0; } fi - gh api "${_api}" --jq "${_filter}" + + # Detect current branch (empty if not in a git repo). + local _branch + _branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "") + + # Helper: fetch newest run for a given branch slug. + _get_branch_run() { + local _b=$1 + gh api "${_api_base}?branch=${_b}&${_status}&per_page=1" --jq '.workflow_runs[0]' 2>/dev/null || true + } + + # 2. newest run on current branch + if [[ -n ${_branch} && ${_branch} != "HEAD" ]]; then + local _cur + _cur=$(_get_branch_run "${_branch}") + [[ -n ${_cur} && ${_cur} != "null" ]] && { echo "${_cur}"; return 0; } + fi + + # 3. newest run on main + local _main + _main=$(_get_branch_run "main") + [[ -n ${_main} && ${_main} != "null" ]] && { echo "${_main}"; return 0; } + + # 4. newest run overall (fallback) + gh api "${_api_base}?${_status}&per_page=1" --jq '.workflow_runs[0]' } # Echo run‑ID only (helper)