linux-image-manager/scripts/analyze/system/dd_optimal_bs_test.sh

38 lines
1.3 KiB
Bash

#!/bin/bash
# Wrong scripped but good as a base to optimize later. See http://blog.tdg5.com/tuning-dd-block-size/
# Since we're dealing with dd, abort if any errors occur
set -e
TEST_FILE=${1:-dd_obs_testfile}
[ -e "$TEST_FILE" ]; TEST_FILE_EXISTS=$?
TEST_FILE_SIZE=134217728
# Header
PRINTF_FORMAT="%8s : %s\n"
printf "$PRINTF_FORMAT" 'block size' 'transfer rate'
# Block sizes of 512b 1K 2K 4K 8K 16K 32K 64K 128K 256K 512K 1M 2M 4M 8M 16M 32M 64M
for BLOCK_SIZE in 512 1024 2048 4096 8192 16384 32768 65536 131072 262144 524288 1048576 2097152 4194304 8388608 16777216 33554432 67108864
do
# Calculate number of segments required to copy
COUNT=$(($TEST_FILE_SIZE / $BLOCK_SIZE))
if [ $COUNT -le 0 ]; then
echo "Block size of $BLOCK_SIZE estimated to require $COUNT blocks, aborting further tests."
break
fi
# Create a test file with the specified block size
DD_RESULT=$(dd if=/dev/zero of=$TEST_FILE bs=$BLOCK_SIZE count=$COUNT 2>&1 1>/dev/null)
echo $DD_RESULT
# Extract the transfer rate from dd's STDERR output
TRANSFER_RATE=$(echo $DD_RESULT | \grep --only-matching -E '[0-9.]+ ([MGk]?B|bytes)/s(ec)?')
# Clean up the test file if we created one
[ $TEST_FILE_EXISTS -ne 0 ] && rm $TEST_FILE
# Output the result
printf "$PRINTF_FORMAT" "$BLOCK_SIZE" "$TRANSFER_RATE"
done