#!/bin/bash# ConfigurationDB_DRIVER="mysql"DB_NAME="loadtest"DB_USER="root"DB_PASSWORD="Password"DB_PORT="3306"DB_HOST="host"TABLES_COUNT=64TABLE_SIZE=10000000DURATION=300REPORT_INTERVAL=3THREADS_LIST=(4 8 16 32 64 128 256)DELAY_BETWEEN_TESTS=60PROGRESS_INTERVAL=100# Create output directoryOUTPUT_DIR="sysbench_results_$(date +%Y%m%d_%H%M%S)"mkdir -p "$OUTPUT_DIR"# Result file with timestampRESULT_FILE="${OUTPUT_DIR}/sysbench_summary_$(date +%Y%m%d_%H%M%S).csv"# Write CSV headerecho "Threads,Queries per sec,Transactions per sec,95th percentile (ms),Avg Query Time (ms), Start Time, End Time" > "$RESULT_FILE"# Main test loopfor threads in "${THREADS_LIST[@]}"; do echo "----------------------------------------------------------------" echo "Starting test with $threads threads" echo "----------------------------------------------------------------" # Get start time test_start=$(date +%s) # Output file for this test LOG_FILE="${OUTPUT_DIR}/mysql8.0_1st_${threads}_$(date +%Y%m%d_%H%M%S).log" # format start time std_start_time=$(date +%Y%m%d_%H:%M:%S) # Start sysbench in background sysbench /usr/local/share/sysbench/tests/include/oltp_legacy/oltp.lua \ --db-driver="$DB_DRIVER" \ --mysql-db="$DB_NAME" \ --mysql-user="$DB_USER" \ --mysql-password="$DB_PASSWORD" \ --mysql-port="$DB_PORT" \ --mysql-host="$DB_HOST" \ --oltp-tables-count="$TABLES_COUNT" \ --oltp-table-size="$TABLE_SIZE" \ --time="$DURATION" \ --max-requests=0 \ --threads="$threads" \ --report-interval="$REPORT_INTERVAL" \ --forced-shutdown=1 \ run > "$LOG_FILE" 2>&1 & SYSBENCH_PID=$! # Progress monitoring while ps -p $SYSBENCH_PID > /dev/null; do elapsed=$(( $(date +%s) - test_start )) if (( elapsed % PROGRESS_INTERVAL == 0 )); then echo "[$(date +%H:%M:%S)] Current threads: $threads, Elapsed: $elapsed/$DURATION seconds" fi sleep 1 done # format end time std_end_time=$(date +%Y%m%d_%H:%M:%S) # Get end time test_end=$(date +%s) # Wait for sysbench to complete wait $SYSBENCH_PID # Parse results from log file queries_per_sec=$(grep "queries:" "$LOG_FILE" | awk '{print $3}' | tr -d '(') transactions_per_sec=$(grep "transactions:" "$LOG_FILE" | awk '{print $3}' | tr -d '(') percentile_95=$(grep "95th percentile:" "$LOG_FILE" | awk '{print $3}') avg_query_time=$(grep "avg:" "$LOG_FILE" | awk '{print $2}') # Output results to CSV echo "$threads,$queries_per_sec,$transactions_per_sec,$percentile_95,$avg_query_time,$std_start_time,$std_end_time" >> "$RESULT_FILE" echo "Completed test with $threads threads" echo "Results:" echo "Threads: $threads, Queries(per/snc): $queries_per_sec, Transactions(per/snd): $transactions_per_sec, 95th percentile: $percentile_95, Avg Query Time (ms): $avg_query_time, Start Time: $std_start_time, End Time: $std_end_time" echo "" # Delay between tests if not the last one if [[ "$threads" != "${THREADS_LIST[-1]}" ]]; then echo "Waiting $DELAY_BETWEEN_TESTS seconds before next test..." sleep "$DELAY_BETWEEN_TESTS" fidoneecho "----------------------------------------------------------------"echo "All tests completed. Results saved to:"echo "Summary: $RESULT_FILE"echo "Detailed logs in: $OUTPUT_DIR/"echo "----------------------------------------------------------------"
Redis Stress Test
#!/bin/bash# ConfigurationREDIS_HOST="host"REDIS_PORT=6379REQUESTS=100000CONCURRENCY_LEVELS=(20 50 100 200)PASSWORD="password"REPEATS=3OUTPUT_CSV="redis_benchmark_results.csv"LOG_DIR="benchmark_logs_$(date +%Y%m%d)"# Create log directorymkdir -p "$LOG_DIR"# Initialize CSV with UTF-8 BOM for Chinese charactersecho -ne "\xEF\xBB\xBF指标,20并发第一次测试,20并发第二次测试,20并发第三次测试,20并发平均值,50并发第一次测试,50并发第二次测试,50并发第三次测试,50并发平均值,100并发第一次测试,100并发第二次测试,100并发第三次测试,100并发平均值,200并发第一次测试,200并发第二次测试,200并发第三次测试,200并发平均值\n" > "$OUTPUT_CSV"# Define all metrics including those in the logMETRICS=( "PING_INLINE" "PING_MBULK" "SET" "GET" "INCR" "LPUSH" "RPUSH" "LPOP" "RPOP" "SADD" "HSET" "SPOP" "ZADD" "ZPOPMIN" "XADD" "MSET (10 keys)" "LPUSH (needed to benchmark LRANGE)" "LRANGE_100 (first 100 elements)" "LRANGE_300 (first 300 elements)" "LRANGE_500 (first 500 elements)" "LRANGE_600 (first 600 elements)")# Initialize results arraydeclare -A results# New extract_metric function using grepextract_metric() { local output_file=$1 local metric="$2" echo " DEBUG: Extracting [$metric] from $output_file" >&2 # First find the section header section_line=$(grep -n -m 1 "====== $metric ======" "$output_file" | cut -d: -f1) if [ -z "$section_line" ]; then echo " WARNING: Could not find section for [$metric]" >&2 echo "-" return fi echo " DEBUG: Found section at line $section_line" >&2 # Extract the throughput value using grep between the section and next section throughput=$(tail -n +"$section_line" "$output_file" | \ grep -m 1 -A 20 "throughput summary:" | \ grep "throughput summary:" | \ awk '{print $3}' | \ tr -d '\r') if [ -z "$throughput" ]; then echo " WARNING: Could not extract throughput for [$metric]" >&2 echo "-" return fi echo " DEBUG: Extracted throughput: $throughput" >&2 echo "$throughput"}# Function to calculate averagecalculate_average() { local values=("$@") local sum=0 local count=0 for value in "${values[@]}"; do if [[ "$value" != "-" ]] && [[ "$value" =~ ^[0-9]+(\.[0-9]+)?$ ]]; then sum=$(echo "$sum + $value" | bc) ((count++)) fi done if ((count > 0)); then printf "%.2f" $(echo "scale=2; $sum / $count" | bc) else echo "-" fi}# Function to write results to CSVwrite_csv() { for metric in "${METRICS[@]}"; do # Convert metric name for display (replace underscores with spaces) display_metric=$(echo "$metric" | tr '_' ' ') # Start the CSV line line="$display_metric" for concurrency in "${CONCURRENCY_LEVELS[@]}"; do # Collect values for this concurrency level values=() for ((repeat=1; repeat<=REPEATS; repeat++)); do key="${metric}_${concurrency}_${repeat}" values+=("${results[$key]}") done # Calculate average avg=$(calculate_average "${values[@]}") results["${metric}_${concurrency}_avg"]="$avg" # Add to CSV line for value in "${values[@]}"; do line+=",$value" done line+=",$avg" done # Write to CSV echo "$line" >> "$OUTPUT_CSV" done}# Main benchmarking loopfor concurrency in "${CONCURRENCY_LEVELS[@]}"; do for ((repeat=1; repeat<=REPEATS; repeat++)); do echo "Running benchmark with concurrency $concurrency, iteration $repeat..." start_time=$(date +"%Y%m%d_%H%M%S") output_file="${LOG_DIR}/${repeat}_${concurrency}_${start_time}.log" # Run benchmark with timeout and cluster mode timeout 300 redis-benchmark \ -h "$REDIS_HOST" \ -p "$REDIS_PORT" \ -a "$PASSWORD" \ -n "$REQUESTS" \ -c "$concurrency" \ --cluster > "$output_file" 2>&1 end_time=$(date +"%Y%m%d_%H%M%S") mv "$output_file" "${LOG_DIR}/${repeat}_${concurrency}_${start_time}__${end_time}.log" # Extract all metrics for metric in "${METRICS[@]}"; do value=$(extract_metric "${LOG_DIR}/${repeat}_${concurrency}_${start_time}__${end_time}.log" "$metric") results["${metric}_${concurrency}_${repeat}"]="$value" echo " $metric: $value" done sleep 10 # Cooldown between runs donedone# Calculate and write resultswrite_csvecho -e "\nBenchmark completed. Results saved to $OUTPUT_CSV"echo "Individual logs saved in $LOG_DIR/"# Show final CSV outputecho -e "\nCSV Results Preview:"column -t -s, "$OUTPUT_CSV" | head -n 20