Loading...
Alerts when files exceed size thresholds that could impact performance
{
"hookConfig": {
"hooks": {
"postToolUse": {
"script": "./.claude/hooks/file-size-warning-monitor.sh",
"matchers": [
"write",
"edit"
]
}
}
},
"scriptContent": "#!/usr/bin/env bash\n\n# Read the tool input from stdin\nINPUT=$(cat)\nTOOL_NAME=$(echo \"$INPUT\" | jq -r '.tool_name')\nFILE_PATH=$(echo \"$INPUT\" | jq -r '.tool_input.file_path // .tool_input.path // \"\"')\n\nif [ -z \"$FILE_PATH\" ]; then\n exit 0\nfi\n\n# Check if file exists and is a regular file\nif [ ! -f \"$FILE_PATH\" ]; then\n exit 0\nfi\n\necho \"📏 Checking file size for: $(basename \"$FILE_PATH\")\" >&2\n\n# Get file size in bytes (cross-platform)\nget_file_size() {\n local file=\"$1\"\n \n # Try different stat formats for cross-platform compatibility\n if stat -f%z \"$file\" 2>/dev/null; then\n # macOS/BSD\n return 0\n elif stat -c%s \"$file\" 2>/dev/null; then\n # Linux/GNU\n return 0\n elif [ -f \"$file\" ]; then\n # Fallback: use wc for text files (less accurate for binary)\n wc -c < \"$file\" 2>/dev/null || echo \"0\"\n else\n echo \"0\"\n fi\n}\n\n# Convert bytes to human-readable format\nformat_size() {\n local bytes=\"$1\"\n \n if [ \"$bytes\" -lt 1024 ]; then\n echo \"${bytes}B\"\n elif [ \"$bytes\" -lt 1048576 ]; then\n echo \"$((bytes / 1024))KB\"\n elif [ \"$bytes\" -lt 1073741824 ]; then\n echo \"$((bytes / 1048576))MB\"\n else\n echo \"$((bytes / 1073741824))GB\"\n fi\n}\n\n# Get file extension\nget_file_extension() {\n local file=\"$1\"\n echo \"${file##*.}\" | tr '[:upper:]' '[:lower:]'\n}\n\n# Define file type categories and their thresholds\nget_size_threshold() {\n local extension=\"$1\"\n \n case \"$extension\" in\n # Source code files - should be relatively small\n js|jsx|ts|tsx|py|rb|go|rs|java|cpp|c|h|hpp|php|cs)\n echo \"500000\" # 500KB\n ;;\n # Data/config files\n json|xml|yaml|yml|toml|ini|conf)\n echo \"1048576\" # 1MB\n ;;\n # Documentation\n md|txt|rst|org)\n echo \"1048576\" # 1MB\n ;;\n # Images\n jpg|jpeg|png|gif|bmp|webp|svg)\n echo \"2097152\" # 2MB\n ;;\n # Videos\n mp4|avi|mov|wmv|flv|webm|mkv)\n echo \"52428800\" # 50MB\n ;;\n # Audio\n mp3|wav|flac|aac|ogg)\n echo \"10485760\" # 10MB\n ;;\n # Archives\n zip|tar|gz|bz2|xz|7z|rar)\n echo \"20971520\" # 20MB\n ;;\n # Binary executables\n exe|bin|app|dmg|deb|rpm)\n echo \"104857600\" # 100MB\n ;;\n # Default for unknown file types\n *)\n echo \"5242880\" # 5MB\n ;;\n esac\n}\n\n# Get optimization suggestions for file type\nget_optimization_suggestions() {\n local extension=\"$1\"\n local size_mb=\"$2\"\n \n case \"$extension\" in\n js|jsx|ts|tsx)\n echo \"Consider code splitting, tree shaking, or minification\"\n ;;\n json)\n echo \"Consider JSON streaming, compression, or breaking into smaller files\"\n ;;\n jpg|jpeg)\n echo \"Consider JPEG optimization, WebP format, or progressive JPEG\"\n ;;\n png)\n echo \"Consider PNG optimization, WebP format, or SVG for simple graphics\"\n ;;\n gif)\n echo \"Consider converting to WebP or MP4 for better compression\"\n ;;\n svg)\n echo \"Consider SVG optimization tools to remove unnecessary elements\"\n ;;\n mp4|mov)\n echo \"Consider video compression, lower resolution, or streaming\"\n ;;\n pdf)\n echo \"Consider PDF compression or splitting into smaller documents\"\n ;;\n zip|tar|gz)\n echo \"Archive seems large - verify contents are necessary\"\n ;;\n md|txt)\n echo \"Consider breaking into smaller documents or using external storage\"\n ;;\n *)\n echo \"Consider file compression or alternative storage solutions\"\n ;;\n esac\n}\n\n# Get file size\nSIZE_BYTES=$(get_file_size \"$FILE_PATH\")\nSIZE_HUMAN=$(format_size \"$SIZE_BYTES\")\nSIZE_MB=$((SIZE_BYTES / 1048576))\nSIZE_KB=$((SIZE_BYTES / 1024))\n\n# Get file info\nFILE_EXTENSION=$(get_file_extension \"$FILE_PATH\")\nFILE_NAME=$(basename \"$FILE_PATH\")\nTHRESHOLD_BYTES=$(get_size_threshold \"$FILE_EXTENSION\")\nTHRESHOLD_HUMAN=$(format_size \"$THRESHOLD_BYTES\")\n\necho \"📊 File: $FILE_NAME ($SIZE_HUMAN)\" >&2\n\n# Check if file exceeds threshold\nif [ \"$SIZE_BYTES\" -gt \"$THRESHOLD_BYTES\" ]; then\n echo \"⚠️ SIZE WARNING: File exceeds recommended threshold for .$FILE_EXTENSION files\" >&2\n echo \" Current: $SIZE_HUMAN | Recommended: < $THRESHOLD_HUMAN\" >&2\n \n # Provide optimization suggestions\n SUGGESTION=$(get_optimization_suggestions \"$FILE_EXTENSION\" \"$SIZE_MB\")\n echo \"💡 Suggestion: $SUGGESTION\" >&2\n \n # Specific warnings for very large files\n if [ \"$SIZE_MB\" -gt 50 ]; then\n echo \"🚨 VERY LARGE FILE: This file may cause performance issues\" >&2\n echo \" Consider using Git LFS for files over 50MB\" >&2\n elif [ \"$SIZE_MB\" -gt 10 ]; then\n echo \"⚠️ LARGE FILE: May impact repository performance\" >&2\n fi\n \nelse\n echo \"✅ File size within acceptable range ($THRESHOLD_HUMAN threshold)\" >&2\nfi\n\n# Special checks for specific file types\ncase \"$FILE_EXTENSION\" in\n js|jsx|ts|tsx)\n if [ \"$SIZE_KB\" -gt 100 ]; then\n echo \"📦 JavaScript bundle size check: Consider code splitting for better performance\" >&2\n fi\n ;;\n json)\n if [ \"$SIZE_KB\" -gt 500 ]; then\n echo \"📄 Large JSON detected: Consider pagination or streaming for API responses\" >&2\n fi\n ;;\n jpg|jpeg|png|gif|webp)\n if [ \"$SIZE_KB\" -gt 500 ]; then\n echo \"🖼️ Image optimization: Large images impact web performance\" >&2\n if command -v identify &> /dev/null; then\n DIMENSIONS=$(identify -format '%wx%h' \"$FILE_PATH\" 2>/dev/null || echo \"unknown\")\n echo \" Dimensions: $DIMENSIONS\" >&2\n fi\n fi\n ;;\n css|scss|sass)\n if [ \"$SIZE_KB\" -gt 200 ]; then\n echo \"🎨 CSS size check: Consider removing unused styles or splitting stylesheets\" >&2\n fi\n ;;\nesac\n\n# Check if file is in git repository\nif command -v git &> /dev/null && git rev-parse --git-dir > /dev/null 2>&1; then\n # Check if file is tracked by git\n if git ls-files --error-unmatch \"$FILE_PATH\" &> /dev/null; then\n echo \"🔄 Git repository impact:\" >&2\n \n # Check if file has grown significantly\n if git log --oneline -n 1 -- \"$FILE_PATH\" &> /dev/null; then\n # File has history, check previous size\n PREV_SIZE=$(git show HEAD:\"$FILE_PATH\" 2>/dev/null | wc -c | xargs || echo \"0\")\n if [ \"$PREV_SIZE\" -gt 0 ]; then\n PREV_SIZE_HUMAN=$(format_size \"$PREV_SIZE\")\n SIZE_DIFF=$((SIZE_BYTES - PREV_SIZE))\n \n if [ \"$SIZE_DIFF\" -gt 0 ]; then\n DIFF_HUMAN=$(format_size \"$SIZE_DIFF\")\n PERCENT_INCREASE=$((SIZE_DIFF * 100 / PREV_SIZE))\n echo \" Size change: +$DIFF_HUMAN (+$PERCENT_INCREASE%) from previous version\" >&2\n \n if [ \"$PERCENT_INCREASE\" -gt 100 ]; then\n echo \" 📈 Significant size increase detected\" >&2\n fi\n fi\n fi\n fi\n \n # Suggest Git LFS for large files\n if [ \"$SIZE_MB\" -gt 10 ]; then\n echo \" 💡 Consider using Git LFS for this large file\" >&2\n if [ -f \".gitattributes\" ]; then\n if ! grep -q \"*.$FILE_EXTENSION.*lfs\" \".gitattributes\" 2>/dev/null; then\n echo \" Add to .gitattributes: *.$FILE_EXTENSION filter=lfs diff=lfs merge=lfs -text\" >&2\n fi\n else\n echo \" Create .gitattributes with: *.$FILE_EXTENSION filter=lfs diff=lfs merge=lfs -text\" >&2\n fi\n fi\n fi\nfi\n\n# Overall performance impact assessment\necho \"\" >&2\necho \"📋 Performance Impact Assessment:\" >&2\n\nif [ \"$SIZE_MB\" -gt 50 ]; then\n echo \" 🔴 High Impact: File may cause significant performance issues\" >&2\nelif [ \"$SIZE_MB\" -gt 10 ]; then\n echo \" 🟡 Medium Impact: File may cause minor performance issues\" >&2\nelif [ \"$SIZE_KB\" -gt 500 ]; then\n echo \" 🟢 Low Impact: File size is acceptable but monitor growth\" >&2\nelse\n echo \" ✅ Minimal Impact: File size is optimal\" >&2\nfi\n\necho \"\" >&2\necho \"💡 File Size Best Practices:\" >&2\necho \" • Keep source code files under 500KB\" >&2\necho \" • Optimize images before committing\" >&2\necho \" • Use Git LFS for files over 10MB\" >&2\necho \" • Consider file compression for large data files\" >&2\n\nexit 0"
}.claude/hooks/~/.claude/hooks/{
"hooks": {
"postToolUse": {
"script": "./.claude/hooks/file-size-warning-monitor.sh",
"matchers": [
"write",
"edit"
]
}
}
}#!/usr/bin/env bash
# Read the tool input from stdin
INPUT=$(cat)
TOOL_NAME=$(echo "$INPUT" | jq -r '.tool_name')
FILE_PATH=$(echo "$INPUT" | jq -r '.tool_input.file_path // .tool_input.path // ""')
if [ -z "$FILE_PATH" ]; then
exit 0
fi
# Check if file exists and is a regular file
if [ ! -f "$FILE_PATH" ]; then
exit 0
fi
echo "📏 Checking file size for: $(basename "$FILE_PATH")" >&2
# Get file size in bytes (cross-platform)
get_file_size() {
local file="$1"
# Try different stat formats for cross-platform compatibility
if stat -f%z "$file" 2>/dev/null; then
# macOS/BSD
return 0
elif stat -c%s "$file" 2>/dev/null; then
# Linux/GNU
return 0
elif [ -f "$file" ]; then
# Fallback: use wc for text files (less accurate for binary)
wc -c < "$file" 2>/dev/null || echo "0"
else
echo "0"
fi
}
# Convert bytes to human-readable format
format_size() {
local bytes="$1"
if [ "$bytes" -lt 1024 ]; then
echo "${bytes}B"
elif [ "$bytes" -lt 1048576 ]; then
echo "$((bytes / 1024))KB"
elif [ "$bytes" -lt 1073741824 ]; then
echo "$((bytes / 1048576))MB"
else
echo "$((bytes / 1073741824))GB"
fi
}
# Get file extension
get_file_extension() {
local file="$1"
echo "${file##*.}" | tr '[:upper:]' '[:lower:]'
}
# Define file type categories and their thresholds
get_size_threshold() {
local extension="$1"
case "$extension" in
# Source code files - should be relatively small
js|jsx|ts|tsx|py|rb|go|rs|java|cpp|c|h|hpp|php|cs)
echo "500000" # 500KB
;;
# Data/config files
json|xml|yaml|yml|toml|ini|conf)
echo "1048576" # 1MB
;;
# Documentation
md|txt|rst|org)
echo "1048576" # 1MB
;;
# Images
jpg|jpeg|png|gif|bmp|webp|svg)
echo "2097152" # 2MB
;;
# Videos
mp4|avi|mov|wmv|flv|webm|mkv)
echo "52428800" # 50MB
;;
# Audio
mp3|wav|flac|aac|ogg)
echo "10485760" # 10MB
;;
# Archives
zip|tar|gz|bz2|xz|7z|rar)
echo "20971520" # 20MB
;;
# Binary executables
exe|bin|app|dmg|deb|rpm)
echo "104857600" # 100MB
;;
# Default for unknown file types
*)
echo "5242880" # 5MB
;;
esac
}
# Get optimization suggestions for file type
get_optimization_suggestions() {
local extension="$1"
local size_mb="$2"
case "$extension" in
js|jsx|ts|tsx)
echo "Consider code splitting, tree shaking, or minification"
;;
json)
echo "Consider JSON streaming, compression, or breaking into smaller files"
;;
jpg|jpeg)
echo "Consider JPEG optimization, WebP format, or progressive JPEG"
;;
png)
echo "Consider PNG optimization, WebP format, or SVG for simple graphics"
;;
gif)
echo "Consider converting to WebP or MP4 for better compression"
;;
svg)
echo "Consider SVG optimization tools to remove unnecessary elements"
;;
mp4|mov)
echo "Consider video compression, lower resolution, or streaming"
;;
pdf)
echo "Consider PDF compression or splitting into smaller documents"
;;
zip|tar|gz)
echo "Archive seems large - verify contents are necessary"
;;
md|txt)
echo "Consider breaking into smaller documents or using external storage"
;;
*)
echo "Consider file compression or alternative storage solutions"
;;
esac
}
# Get file size
SIZE_BYTES=$(get_file_size "$FILE_PATH")
SIZE_HUMAN=$(format_size "$SIZE_BYTES")
SIZE_MB=$((SIZE_BYTES / 1048576))
SIZE_KB=$((SIZE_BYTES / 1024))
# Get file info
FILE_EXTENSION=$(get_file_extension "$FILE_PATH")
FILE_NAME=$(basename "$FILE_PATH")
THRESHOLD_BYTES=$(get_size_threshold "$FILE_EXTENSION")
THRESHOLD_HUMAN=$(format_size "$THRESHOLD_BYTES")
echo "📊 File: $FILE_NAME ($SIZE_HUMAN)" >&2
# Check if file exceeds threshold
if [ "$SIZE_BYTES" -gt "$THRESHOLD_BYTES" ]; then
echo "⚠️ SIZE WARNING: File exceeds recommended threshold for .$FILE_EXTENSION files" >&2
echo " Current: $SIZE_HUMAN | Recommended: < $THRESHOLD_HUMAN" >&2
# Provide optimization suggestions
SUGGESTION=$(get_optimization_suggestions "$FILE_EXTENSION" "$SIZE_MB")
echo "💡 Suggestion: $SUGGESTION" >&2
# Specific warnings for very large files
if [ "$SIZE_MB" -gt 50 ]; then
echo "🚨 VERY LARGE FILE: This file may cause performance issues" >&2
echo " Consider using Git LFS for files over 50MB" >&2
elif [ "$SIZE_MB" -gt 10 ]; then
echo "⚠️ LARGE FILE: May impact repository performance" >&2
fi
else
echo "✅ File size within acceptable range ($THRESHOLD_HUMAN threshold)" >&2
fi
# Special checks for specific file types
case "$FILE_EXTENSION" in
js|jsx|ts|tsx)
if [ "$SIZE_KB" -gt 100 ]; then
echo "📦 JavaScript bundle size check: Consider code splitting for better performance" >&2
fi
;;
json)
if [ "$SIZE_KB" -gt 500 ]; then
echo "📄 Large JSON detected: Consider pagination or streaming for API responses" >&2
fi
;;
jpg|jpeg|png|gif|webp)
if [ "$SIZE_KB" -gt 500 ]; then
echo "🖼️ Image optimization: Large images impact web performance" >&2
if command -v identify &> /dev/null; then
DIMENSIONS=$(identify -format '%wx%h' "$FILE_PATH" 2>/dev/null || echo "unknown")
echo " Dimensions: $DIMENSIONS" >&2
fi
fi
;;
css|scss|sass)
if [ "$SIZE_KB" -gt 200 ]; then
echo "🎨 CSS size check: Consider removing unused styles or splitting stylesheets" >&2
fi
;;
esac
# Check if file is in git repository
if command -v git &> /dev/null && git rev-parse --git-dir > /dev/null 2>&1; then
# Check if file is tracked by git
if git ls-files --error-unmatch "$FILE_PATH" &> /dev/null; then
echo "🔄 Git repository impact:" >&2
# Check if file has grown significantly
if git log --oneline -n 1 -- "$FILE_PATH" &> /dev/null; then
# File has history, check previous size
PREV_SIZE=$(git show HEAD:"$FILE_PATH" 2>/dev/null | wc -c | xargs || echo "0")
if [ "$PREV_SIZE" -gt 0 ]; then
PREV_SIZE_HUMAN=$(format_size "$PREV_SIZE")
SIZE_DIFF=$((SIZE_BYTES - PREV_SIZE))
if [ "$SIZE_DIFF" -gt 0 ]; then
DIFF_HUMAN=$(format_size "$SIZE_DIFF")
PERCENT_INCREASE=$((SIZE_DIFF * 100 / PREV_SIZE))
echo " Size change: +$DIFF_HUMAN (+$PERCENT_INCREASE%) from previous version" >&2
if [ "$PERCENT_INCREASE" -gt 100 ]; then
echo " 📈 Significant size increase detected" >&2
fi
fi
fi
fi
# Suggest Git LFS for large files
if [ "$SIZE_MB" -gt 10 ]; then
echo " 💡 Consider using Git LFS for this large file" >&2
if [ -f ".gitattributes" ]; then
if ! grep -q "*.$FILE_EXTENSION.*lfs" ".gitattributes" 2>/dev/null; then
echo " Add to .gitattributes: *.$FILE_EXTENSION filter=lfs diff=lfs merge=lfs -text" >&2
fi
else
echo " Create .gitattributes with: *.$FILE_EXTENSION filter=lfs diff=lfs merge=lfs -text" >&2
fi
fi
fi
fi
# Overall performance impact assessment
echo "" >&2
echo "📋 Performance Impact Assessment:" >&2
if [ "$SIZE_MB" -gt 50 ]; then
echo " 🔴 High Impact: File may cause significant performance issues" >&2
elif [ "$SIZE_MB" -gt 10 ]; then
echo " 🟡 Medium Impact: File may cause minor performance issues" >&2
elif [ "$SIZE_KB" -gt 500 ]; then
echo " 🟢 Low Impact: File size is acceptable but monitor growth" >&2
else
echo " ✅ Minimal Impact: File size is optimal" >&2
fi
echo "" >&2
echo "💡 File Size Best Practices:" >&2
echo " • Keep source code files under 500KB" >&2
echo " • Optimize images before committing" >&2
echo " • Use Git LFS for files over 10MB" >&2
echo " • Consider file compression for large data files" >&2
exit 0Hook triggers warnings for legitimate large binary files
Create size threshold overrides in .claude/hook-config.json: THRESHOLD_OVERRIDES={'*.wasm': 10485760}. Add file extension exclusions for known large asset types.
File size calculation fails on Windows with stat errors
Use PowerShell fallback for Windows: (Get-Item $FILE_PATH).Length. Add platform detection: if [[ $OSTYPE == 'msys' ]], use alternative stat format or wc -c.
Git size comparison shows incorrect previous version size
Check if file is staged vs committed: use git show :"$FILE_PATH" for staged, git show HEAD:"$FILE_PATH" for committed. Handle new files with [ -z $PREV_SIZE ] check.
Hook slows down every file write operation significantly
Add file size pre-check before running full analysis: skip hook if size < 100KB. Use matcher filters: matchers: ['write'] only, exclude 'edit' for incremental changes.
Image dimension detection with identify command fails
Check if ImageMagick installed: command -v identify || skip dimension check. Use alternative: file command for basic image info without requiring external dependencies.
Loading reviews...
Join our community of Claude power users. No spam, unsubscribe anytime.
Automated accessibility testing and compliance checking for web applications following WCAG guidelines
Automatically generates or updates API documentation when endpoint files are modified
Automatically formats code files after Claude writes or edits them using Prettier, Black, or other formatters