Web Analytics

Real-World Examples

Advanced ~35 min read

Theory becomes powerful when applied to real problems. This final lesson presents complete, production-ready scripts that demonstrate everything you've learned. Study these examples, adapt them to your needs, and use them as references for your own projects!

Example 1: Backup Script

A complete backup system with rotation, compression, and verification.

Output
Click Run to execute your code

Key Features

  • Configurable via environment or command-line
  • Compression with gzip
  • Automatic rotation of old backups
  • Verification of backup integrity
  • Logging with timestamps
  • Error handling and cleanup
Cron Setup: Schedule daily backups with:
# /etc/cron.d/backup
0 2 * * * root /opt/scripts/backup.sh -s /var/data -d /backup -r 7 >> /var/log/backup.log 2>&1

Example 2: System Monitoring

Monitor system resources and send alerts when thresholds are exceeded.

Output
Click Run to execute your code

Example 3: Deployment Script

Deploy applications with rollback capability.

#!/usr/bin/env bash
#
# deploy.sh - Application deployment with rollback
#
set -euo pipefail

readonly SCRIPT_NAME="$(basename "$0")"
readonly DEPLOY_DIR="/var/www/app"
readonly RELEASES_DIR="$DEPLOY_DIR/releases"
readonly CURRENT_LINK="$DEPLOY_DIR/current"
readonly SHARED_DIR="$DEPLOY_DIR/shared"
readonly KEEP_RELEASES=5

# Logging
log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"; }
die() { log "ERROR: $*" >&2; exit 1; }

# Create new release directory
create_release() {
    local release_id="$(date +%Y%m%d%H%M%S)"
    local release_dir="$RELEASES_DIR/$release_id"

    log "Creating release: $release_id"

    mkdir -p "$release_dir"
    echo "$release_dir"
}

# Deploy code to release directory
deploy_code() {
    local release_dir="$1"
    local source="${2:-.}"

    log "Deploying code to $release_dir"

    # Copy application code
    rsync -av --exclude='.git' --exclude='node_modules' \\
        "$source/" "$release_dir/"

    # Link shared directories
    ln -sf "$SHARED_DIR/logs" "$release_dir/logs"
    ln -sf "$SHARED_DIR/.env" "$release_dir/.env"
}

# Switch current symlink to new release
switch_release() {
    local release_dir="$1"

    log "Switching to release: $(basename "$release_dir")"

    ln -sfn "$release_dir" "$CURRENT_LINK"
}

# Cleanup old releases
cleanup_old_releases() {
    log "Cleaning up old releases (keeping $KEEP_RELEASES)"

    local releases=($(ls -1t "$RELEASES_DIR"))
    local to_delete=("${releases[@]:$KEEP_RELEASES}")

    for release in "${to_delete[@]}"; do
        log "Removing old release: $release"
        rm -rf "$RELEASES_DIR/$release"
    done
}

# Rollback to previous release
rollback() {
    local releases=($(ls -1t "$RELEASES_DIR"))

    [[ ${#releases[@]} -lt 2 ]] && die "No previous release to rollback to"

    local previous="${releases[1]}"
    log "Rolling back to: $previous"

    switch_release "$RELEASES_DIR/$previous"
    log "Rollback complete"
}

# Main deployment
deploy() {
    local source="${1:-.}"

    log "Starting deployment..."

    # Create release
    local release_dir
    release_dir=$(create_release)

    # Deploy code
    deploy_code "$release_dir" "$source"

    # Run build/install commands
    log "Installing dependencies..."
    (cd "$release_dir" && npm install --production 2>/dev/null || true)

    # Switch to new release
    switch_release "$release_dir"

    # Restart service
    log "Restarting application..."
    systemctl restart myapp 2>/dev/null || true

    # Cleanup
    cleanup_old_releases

    log "Deployment complete!"
}

# Parse arguments
case "${1:-deploy}" in
    deploy)   deploy "${2:-.}" ;;
    rollback) rollback ;;
    *)        echo "Usage: $SCRIPT_NAME {deploy|rollback} [source]" ;;
esac

Example 4: Log Processing

Analyze and report on log files.

#!/usr/bin/env bash
#
# log-analyzer.sh - Analyze access logs and generate reports
#
set -euo pipefail

readonly LOG_FILE="${1:-/var/log/nginx/access.log}"

log() { echo "[$(date '+%H:%M:%S')] $*"; }

analyze_logs() {
    [[ -f "$LOG_FILE" ]] || { echo "Log file not found: $LOG_FILE"; exit 1; }

    echo "=== Log Analysis Report ==="
    echo "File: $LOG_FILE"
    echo "Generated: $(date)"
    echo ""

    # Total requests
    local total=$(wc -l < "$LOG_FILE")
    echo "Total Requests: $total"
    echo ""

    # Requests per status code
    echo "--- Status Codes ---"
    awk '{print $9}' "$LOG_FILE" | sort | uniq -c | sort -rn | head -10
    echo ""

    # Top 10 IPs
    echo "--- Top 10 IP Addresses ---"
    awk '{print $1}' "$LOG_FILE" | sort | uniq -c | sort -rn | head -10
    echo ""

    # Top 10 URLs
    echo "--- Top 10 URLs ---"
    awk '{print $7}' "$LOG_FILE" | sort | uniq -c | sort -rn | head -10
    echo ""

    # Requests per hour
    echo "--- Requests per Hour ---"
    awk '{print $4}' "$LOG_FILE" | cut -d: -f2 | sort | uniq -c
    echo ""

    # Error rate
    local errors=$(awk '$9 >= 400' "$LOG_FILE" | wc -l)
    local error_rate=$(echo "scale=2; $errors * 100 / $total" | bc)
    echo "Error Rate: $error_rate% ($errors errors)"
}

# Run with error handling
analyze_logs 2>/dev/null || echo "Analysis failed"

Example 5: API Integration

Interact with REST APIs using curl.

#!/usr/bin/env bash
#
# api-client.sh - REST API client utilities
#
set -euo pipefail

readonly API_BASE="${API_BASE:-https://api.example.com}"
readonly API_TOKEN="${API_TOKEN:?API_TOKEN required}"

# HTTP helper
api_request() {
    local method="$1"
    local endpoint="$2"
    local data="${3:-}"

    local curl_opts=(
        -s
        -X "$method"
        -H "Authorization: Bearer $API_TOKEN"
        -H "Content-Type: application/json"
    )

    [[ -n "$data" ]] && curl_opts+=(-d "$data")

    local response
    response=$(curl "${curl_opts[@]}" "$API_BASE$endpoint")
    local status=$?

    if [[ $status -ne 0 ]]; then
        echo "Request failed" >&2
        return 1
    fi

    echo "$response"
}

# GET request
get() {
    api_request GET "$1"
}

# POST request
post() {
    api_request POST "$1" "$2"
}

# List users
list_users() {
    get "/users" | jq -r '.[] | "\\(.id): \\(.name)"'
}

# Create user
create_user() {
    local name="$1"
    local email="$2"

    local payload=$(jq -n --arg n "$name" --arg e "$email" \\
        '{name: $n, email: $e}')

    post "/users" "$payload"
}

# Health check
health_check() {
    if get "/health" | jq -e '.status == "ok"' >/dev/null; then
        echo "API is healthy"
        return 0
    else
        echo "API is unhealthy"
        return 1
    fi
}

# Run if executed directly
case "${1:-}" in
    users)  list_users ;;
    create) create_user "$2" "$3" ;;
    health) health_check ;;
    *)      echo "Usage: $0 {users|create|health}" ;;
esac

Common Patterns Summary

Pattern Used In Purpose
set -euo pipefail All scripts Error handling
trap cleanup EXIT Backup, Deploy Resource cleanup
log() { ... } All scripts Consistent logging
${VAR:-default} All scripts Safe defaults
getopts/case Backup, Monitor Argument parsing
readonly All scripts Constants

Congratulations!

You've completed the Bash Scripting Course! You now have the skills to:

  • Write professional, maintainable scripts
  • Handle errors gracefully with proper logging
  • Parse arguments and configuration
  • Test your scripts for reliability
  • Automate real-world tasks like backups and deployments

Keep practicing: The best way to improve is to write more scripts. Start with small automation tasks and gradually tackle larger projects!

What's Next?

Continue your journey:

  • Explore the Advanced Topics module for regex, sed, and awk
  • Build your own script library from the templates
  • Set up CI/CD for your scripts with automated testing
  • Contribute to open-source projects using your new skills