Fading Coder

One Final Commit for the Last Sprint

Home > Tech > Content

Automated Debian Mirror Performance Testing Suite

Tech 2

The following Bash utility performs multi-dimensional analysis of Debian package repository performance, measuring TCP handshake latency, total transfer time, and HTTP throughput across multiple geographic endpoints.

#!/bin/bash

# Multi-metric benchmark for Debian repository selection
# Evaluates endpoints based on reliability, latency, and bandwidth

# ==================== Configuration ====================

REPO_ENDPOINTS=(
    "Tsinghua University|https://mirrors.tuna.tsinghua.edu.cn/debian|academic"
    "USTC|https://mirrors.ustc.edu.cn/debian|academic"
    "CERNET|https://mirrors.cernet.edu.cn/debian|academic"
    "SJTU|https://mirror.sjtu.edu.cn/debian|academic"
    "Tencent Cloud|http://mirrors.tencent.com/debian|commercial"
    "Alibaba Cloud|https://mirrors.aliyun.com/debian|commercial"
    "NetEase|http://mirrors.163.com/debian|commercial"
    "Sohu|http://mirrors.sohu.com/debian|commercial"
    "LZU|http://mirror.lzu.edu.cn/debian|academic"
    "HIT|http://run.hit.edu.cn/debian|academic"
    "BIT|http://mirror.bit.edu.cn/debian|academic"
    "DLUT|http://mirror.dlut.edu.cn/debian|academic"
    "BJTU|http://mirror.bjtu.edu.cn/debian|academic"
    "XMU|http://mirrors.xmu.edu.cn/debian|academic"
)

PROBE_PATH="dists/bookworm/main/binary-amd64/Release"
CONNECTION_TIMEOUT=10
SAMPLES=3

C_RED='\033[0;31m'
C_GREEN='\033[0;32m'
C_YELLOW='\033[1;33m'
C_BLUE='\033[0;34m'
C_RESET='\033[0m'

HAS_IPV6=false
if ip addr show | grep -q "inet6.*global"; then
    HAS_IPV6=true
    echo -e "${C_BLUE}IPv6 connectivity detected${C_RESET}"
else
    echo -e "${C_YELLOW}IPv6 unavailable, using IPv4 only${C_RESET}"
fi

# ==================== Functions ====================

show_banner() {
    echo "==================================================="
    echo "  Debian Mirror Performance Benchmark"
    echo "  Timestamp: $(date '+%Y-%m-%d %H:%M:%S')"
    echo "==================================================="
    echo
}

measure_endpoint() {
    local provider=$1
    local endpoint=$2
    local category=$3
    
    echo -e "${C_BLUE}Benchmarking ${provider} (${category})...${C_RESET}"
    
    local successful=0
    local total_latency=0
    local total_throughput=0
    
    for ((i=1; i<=SAMPLES; i++)); do
        metrics=$(curl -sL -w "%{time_connect},%{time_total},%{speed_download},%{http_code}" \
                  -o /dev/null --max-time ${CONNECTION_TIMEOUT} \
                  "${endpoint}/${PROBE_PATH}" 2>/dev/null)
        
        http_code=$(echo "$metrics" | cut -d',' -f4)
        
        if [ "$http_code" = "200" ]; then
            successful=$((successful + 1))
            conn_time=$(echo "$metrics" | cut -d',' -f1)
            total_time=$(echo "$metrics" | cut -d',' -f2)
            speed=$(echo "$metrics" | cut -d',' -f3)
            
            total_latency=$(echo "$total_latency + $total_time" | bc)
            total_throughput=$(echo "$total_throughput + $speed" | bc)
            
            echo -e "  Sample ${i}: ${C_GREEN}OK${C_RESET} (connect: ${conn_time}s, total: ${total_time}s)"
        else
            echo -e "  Sample ${i}: ${C_RED}FAIL${C_RESET} (HTTP ${http_code:-timeout})"
        fi
        
        sleep 0.5
    done
    
    if [ $successful -gt 0 ]; then
        avg_latency=$(echo "scale=4; $total_latency / $successful" | bc)
        avg_speed_kbps=$(echo "scale=2; ($total_throughput / $successful) / 1024" | bc)
        echo -e "  Summary: ${C_GREEN}${successful}/${SAMPLES} successful${C_RESET}, ${avg_latency}s avg latency, ${avg_speed_kbps} KB/s"
        echo "${provider}|${endpoint}|${category}|${successful}|${avg_latency}|${avg_speed_kbps}"
    else
        echo -e "  Summary: ${C_RED}All attempts failed${C_RESET}"
        echo "${provider}|${endpoint}|${category}|0|999.999|0"
    fi
    echo
}

generate_report() {
    local datafile=$1
    local sorted_file=$(mktemp)
    
    sort -t'|' -k4,4nr -k5,5n -k6,6nr "$datafile" > "$sorted_file"
    
    echo "==================================================="
    echo "  Benchmark Results"
    echo "==================================================="
    echo
    
    echo -e "${C_GREEN}Ranked by reliability and performance:${C_RESET}"
    echo "---------------------------------------------------"
    
    while IFS='|' read -r provider endpoint category success latency throughput; do
        if [ "$success" -gt 0 ]; then
            printf "%-18s | Success: %d/%d | Latency: %.3fs | Speed: %6.2f KB/s | %s\n" \
                   "$provider" "$success" "$SAMPLES" "$latency" "$throughput" "$category"
        else
            printf "%-18s | Unreachable\n" "$provider"
        fi
    done < "$sorted_file"
    
    echo
    local top_line=$(head -n1 "$sorted_file")
    IFS='|' read -r best_name best_url best_type best_succ best_lat best_spd <<< "$top_line"
    
    if [ "$best_succ" -gt 0 ]; then
        echo -e "${C_GREEN}Recommended Mirror: ${best_name} (${best_type})${C_RESET}"
        echo "  URL: ${best_url}"
        echo "  Average Latency: ${best_lat}s"
        echo "  Average Speed: ${best_spd} KB/s"
        echo
        echo "Configuration for Debian 12 (Bookworm):"
        echo "---------------------------------------------------"
        echo "deb ${best_url} bookworm main contrib non-free non-free-firmware"
        echo "deb ${best_url} bookworm-updates main contrib non-free non-free-firmware"
        echo "deb ${best_url} bookworm-backports main contrib non-free non-free-firmware"
        echo "deb http://security.debian.org/debian-security bookworm-security main contrib non-free non-free-firmware"
    else
        echo -e "${C_RED}No reachable mirrors detected. Check network configuration.${C_RESET}"
    fi
    
    rm -f "$sorted_file"
}

cleanup_temp() {
    rm -f /tmp/mirror_benchmark_*.txt
}

# ==================== Execution ====================

for cmd in curl bc ip; do
    if ! command -v $cmd &> /dev/null; then
        echo -e "${C_RED}Error: ${cmd} is required but not installed.${C_RESET}"
        exit 1
    fi
done

trap cleanup_temp EXIT

show_banner

RESULTS_FILE=$(mktemp /tmp/mirror_benchmark_XXXXXX.txt)

for entry in "${REPO_ENDPOINTS[@]}"; do
    IFS='|' read -r name url type <<< "$entry"
    
    if [ "$HAS_IPV6" = false ] && [[ "$name" == *"IPv6"* ]]; then
        continue
    fi
    
    result=$(measure_endpoint "$name" "$url" "$type")
    if [[ "$result" == *"|"* ]]; then
        echo "$result" >> "$RESULTS_FILE"
    fi
done

generate_report "$RESULTS_FILE"

echo
echo "==================================================="

Configuration Parameters

  • REPO_ENDPOINTS: Array defining repository providers, URLs, and classification tags
  • PROBE_PATH: Specific file path accessed for download tests (default targets Debian 12 Bookworm; modify to dists/bullseye/main/binary-amd64/Release for Debian 11)
  • SAMPLES: Number of iterative tests per andpoint to ensure statistical relevance
  • CONNECTION_TIMEOUT: Maximum seconds to wait for TCP/HTTP establishment

Execution Instructions

Save the script to mirror_benchmark.sh, set execution permissions, and invoke directly:

chmod +x mirror_benchmark.sh
./mirror_benchmark.sh

System Requirements

The automation requires standard Unix utilities:

  • curl: HTTP client for metric collection
  • bc: Arbitrary precision calculator for statistical averaging
  • ip: Network interface enumeration for protocol capabiilty detection

Install missing dependencies via:

sudo apt update && sudo apt install -y curl bc iproute2

Related Articles

Understanding Strong and Weak References in Java

Strong References Strong reference are the most prevalent type of object referencing in Java. When an object has a strong reference pointing to it, the garbage collector will not reclaim its memory. F...

Comprehensive Guide to SSTI Explained with Payload Bypass Techniques

Introduction Server-Side Template Injection (SSTI) is a vulnerability in web applications where user input is improper handled within the template engine and executed on the server. This exploit can r...

Implement Image Upload Functionality for Django Integrated TinyMCE Editor

Django’s Admin panel is highly user-friendly, and pairing it with TinyMCE, an effective rich text editor, simplifies content management significantly. Combining the two is particular useful for bloggi...

Leave a Comment

Anonymous

◎Feel free to join the discussion and share your thoughts.