ShowerLoop-cc/docker/showerloop/public/optimize-js.sh

108 lines
3.3 KiB
Bash
Executable File

#!/bin/bash
set -e
# Set SKIP_COMPRESSION=1 to skip the compression step if it's problematic
SKIP_COMPRESSION=${SKIP_COMPRESSION:-0}
echo "🔍 Starting JavaScript optimization..."
# Create directories
mkdir -p public/js
# Build optimized bundles
echo "📦 Building optimized JavaScript bundles..."
npm run build:js
# Copy all JS files to public
echo "📦 Moving optimized files to public directory..."
mkdir -p public/js
cp static/js/*.min.js public/js/ || echo "Note: No .min.js files found"
cp static/js/*.map public/js/ 2>/dev/null || echo "Note: No source maps found"
# Create utils.js re-export file
echo "📝 Creating utils.js module alias..."
echo "// Re-export everything from the minified module
export * from './utils.modern.min.js';" > public/js/utils.js
# Skip compression if requested
if [ "$SKIP_COMPRESSION" = "1" ]; then
echo "🚫 Skipping file compression (SKIP_COMPRESSION=1)"
else
# Create compressed versions with gzip and brotli
echo "🗜️ Creating compressed versions..."
echo "Compressing JS files..."
# Check if compression tools are available
if ! command -v gzip &>/dev/null; then
echo "⚠️ Warning: gzip not found, skipping gzip compression"
HAS_GZIP=0
else
HAS_GZIP=1
fi
if ! command -v brotli &>/dev/null; then
echo "⚠️ Warning: brotli not found, skipping brotli compression"
HAS_BROTLI=0
else
HAS_BROTLI=1
fi
# Compression with timeout
for file in public/js/*.min.js; do
if [ -f "$file" ]; then
echo "Processing $file..."
# Gzip compression with timeout
if [ "$HAS_GZIP" = "1" ]; then
echo " - Gzip compressing..."
timeout 5s gzip -9 -k "$file" 2>/dev/null || echo " ⚠️ Gzip compression timed out or failed"
fi
# Brotli compression with timeout
if [ "$HAS_BROTLI" = "1" ]; then
echo " - Brotli compressing..."
timeout 5s brotli -k "$file" 2>/dev/null || echo " ⚠️ Brotli compression timed out or failed"
fi
fi
done
fi
# Add HTTP headers for efficient caching
echo "📝 Creating caching headers..."
cat > public/_headers <<EOL
# JavaScript files
/js/*.js
Cache-Control: public, max-age=31536000, immutable
Content-Type: application/javascript
# Source maps
/js/*.map
Cache-Control: public, max-age=31536000, immutable
EOL
# Optimize large CSS files with cssnano instead of PurgeCSS
echo "🎨 Optimizing CSS files..."
if command -v npx &>/dev/null; then
# Create directory for optimized CSS
mkdir -p public/css/optimized
# Process CSS files with cssnano
for CSS_FILE in public/css/vendor/*.css public/css/*.css; do
if [ -f "$CSS_FILE" ]; then
FILENAME=$(basename "$CSS_FILE")
echo "Optimizing $FILENAME..."
npx csso "$CSS_FILE" -o "public/css/optimized/$FILENAME"
fi
done
# Update references in HTML files
echo " - Updating CSS references..."
find public -name "*.html" -type f -exec sed -i '' 's|/css/vendor/|/css/optimized/|g' {} \;
find public -name "*.html" -type f -exec sed -i '' 's|/css/app.min.css|/css/optimized/app.min.css|g' {} \;
find public -name "*.html" -type f -exec sed -i '' 's|/css/custom.css|/css/optimized/custom.css|g' {} \;
else
echo "⚠️ npx not found, skipping CSS optimization"
fi
echo "✅ JavaScript optimization complete!"
echo "📊 Potential savings: ~1.2MB of unused JavaScript removed and CSS optimized"