Skip to content

Log_analysis

This chapter covers log analysis scripts for monitoring application logs, system logs, and security logs. These scripts are essential for troubleshooting and security analysis.


Terminal window
# System logs
/var/log/syslog # System messages (Debian/Ubuntu)
/var/log/messages # System messages (RHEL/CentOS)
/var/log/kern.log # Kernel messages
/var/log/dmesg # Boot messages
# Application logs
/var/log/nginx/ # Nginx web server
/var/log/apache2/ # Apache web server
/var/log/mysql/ # MySQL database
/var/log/postgresql/ # PostgreSQL
# Application-specific
/var/log/app/application.log

#!/usr/bin/env bash
# Search for ERROR in logs
LOG_FILE="${1:-/var/log/syslog}"
PATTERN="${2:-ERROR}"
grep -i "$PATTERN" "$LOG_FILE"
# Count occurrences
grep -c "$PATTERN" "$LOG_FILE"
# Show context
grep -B3 -A3 "$PATTERN" "$LOG_FILE"
#!/usr/bin/env bash
# Monitor logs in real-time
LOG_FILE="${1:-/var/log/syslog}"
tail -f "$LOG_FILE"
# With filtering
tail -f "$LOG_FILE" | grep --line-buffered "ERROR"
# Multiple patterns
tail -f "$LOG_FILE" | grep -E "ERROR|WARN|CRITICAL"

#!/usr/bin/env bash
# analyze_access_log.sh - Analyze web server access logs
set -euo pipefail
LOG_FILE="${1:-/var/log/nginx/access.log}"
echo "=== Access Log Analysis ==="
echo "Log file: $LOG_FILE"
echo ""
# Total requests
echo "Total requests:"
wc -l < "$LOG_FILE"
# Requests by status code
echo ""
echo "Requests by status code:"
awk '{print $9}' "$LOG_FILE" | sort | uniq -c | sort -rn
# Top 10 requested URLs
echo ""
echo "Top 10 URLs:"
awk '{print $7}' "$LOG_FILE" | sort | uniq -c | sort -rn | head -10
# Top 10 IPs
echo ""
echo "Top 10 IPs:"
awk '{print $1}' "$LOG_FILE" | sort | uniq -c | sort -rn | head -10
# Top 10 User Agents
echo ""
echo "Top 10 User Agents:"
awk -F'"' '{print $6}' "$LOG_FILE" | sort | uniq -c | sort -rn | head -10
#!/usr/bin/env bash
# analyze_error_log.sh - Analyze error logs
set -euo pipefail
LOG_FILE="${1:-/var/log/nginx/error.log}"
echo "=== Error Log Analysis ==="
# Count by error level
echo "Errors by level:"
grep -oE '\[error\]|\[warn\]|\[notice\]' "$LOG_FILE" | sort | uniq -c
# Common errors
echo ""
echo "Most common errors:"
grep -oE 'client denied by server configuration|file not found|no live upstreams|upstream timed out' \
"$LOG_FILE" | sort | uniq -c | sort -rn | head -10
# Recent errors
echo ""
echo "Recent errors:"
tail -20 "$LOG_FILE"

#!/usr/bin/env bash
# analyze_failed_logins.sh - Analyze failed login attempts
set -euo pipefail
LOG_FILE="${1:-/var/log/auth.log}"
echo "=== Failed Login Analysis ==="
echo ""
# Count failed logins
echo "Total failed logins:"
grep -c "Failed password" "$LOG_FILE"
# Failed logins by user
echo ""
echo "Failed logins by user:"
grep "Failed password" "$LOG_FILE" | \
awk '{print $11}' | sort | uniq -c | sort -rn | head -10
# Failed logins by IP
echo ""
echo "Failed logins by IP:"
grep "Failed password" "$LOG_FILE" | \
awk '{print $13}' | sort | uniq -c | sort -rn | head -10
# Recent failed logins
echo ""
echo "Recent failed logins:"
grep "Failed password" "$LOG_FILE" | tail -10
#!/usr/bin/env bash
# analyze_ssh_attacks.sh - Analyze SSH brute force attacks
set -euo pipefail
LOG_FILE="${1:-/var/log/auth.log}"
echo "=== SSH Attack Analysis ==="
# Unique attacking IPs
echo "Unique attacking IPs:"
grep "Failed password for" "$LOG_FILE" | \
awk '{print $13}' | sort -u | wc -l
# Top attacking IPs
echo ""
echo "Top 10 attacking IPs:"
grep "Failed password for" "$LOG_FILE" | \
awk '{print $13}' | sort | uniq -c | sort -rn | head -10
# Most targeted usernames
echo ""
echo "Most targeted usernames:"
grep "Failed password for invalid user" "$LOG_FILE" | \
awk '{print $11}' | sort | uniq -c | sort -rn | head -10
# Successful logins after failures
echo ""
echo "Successful logins after attack:"
grep -B5 "Accepted password" "$LOG_FILE" | \
grep "Failed password" | head -5

#!/usr/bin/env bash
# analyze_app_log.sh - Parse application logs
set -euo pipefail
LOG_FILE="${1:-/var/log/app.log}"
echo "=== Application Log Analysis ==="
# Parse JSON logs
echo "JSON log parsing example:"
if command -v jq &>/dev/null; then
grep '{"level":' "$LOG_FILE" | jq -r '.level' | sort | uniq -c
else
echo "jq not installed, using basic parsing"
fi
# Count by level
echo ""
echo "Log levels:"
grep -oE '\[INFO\]|\[WARN\]|\[ERROR\]|\[DEBUG\]' "$LOG_FILE" | \
sort | uniq -c
# Error messages
echo ""
echo "Most common errors:"
grep "ERROR" "$LOG_FILE" | \
sed 's/.*ERROR//' | sort | uniq -c | sort -rn | head -10
#!/usr/bin/env bash
# parse_timestamps.sh - Parse and analyze timestamps
set -euo pipefail
LOG_FILE="${1:-/var/log/app.log}"
echo "=== Timestamp Analysis ==="
# Extract timestamps
echo "Requests per hour:"
grep -oE '[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}' "$LOG_FILE" | \
cut -d' ' -f2 | cut -d':' -f1 | sort | uniq -c
# Peak hours
echo ""
echo "Peak hours:"
grep -oE '[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}' "$LOG_FILE" | \
cut -d' ' -f2 | cut -d':' - | uniq -c | sort -f1 | sortrn | head -5

#!/usr/bin/env bash
# alert_errors.sh - Alert on new errors
set -euo pipefail
LOG_FILE="${1:-/var/log/app.log}"
ALERT_THRESHOLD="${ALERT_THRESHOLD:-10}"
ALERT_EMAIL="${ALERT_EMAIL:-admin@example.com}"
# Get last check timestamp
LAST_CHECK="${LAST_CHECK:-$(date -d '1 minute ago' '+%Y-%m-%d %H:%M')}"
# Count errors since last check
error_count=$(grep "$LAST_CHECK" "$LOG_FILE" | grep -c "ERROR")
if [[ $error_count -ge $ALERT_THRESHOLD ]]; then
echo "ALERT: $error_count errors detected in last minute"
echo "$error_count errors detected in $LOG_FILE" | \
mail -s "Log Alert: Errors Detected" "$ALERT_EMAIL"
fi

#!/usr/bin/env bash
# archive_logs.sh - Archive and compress old logs
set -euo pipefail
LOG_DIR="${1:-/var/log}"
ARCHIVE_DIR="${ARCHIVE_DIR:-/var/log/archive}"
DAYS_TO_KEEP="${DAYS_TO_KEEP:-30}"
mkdir -p "$ARCHIVE_DIR"
# Find and archive logs
find "$LOG_DIR" -name "*.log" -type f -mtime +$DAYS_TO_KEEP | while read -r logfile; do
filename=$(basename "$logfile")
archive_file="$ARCHIVE_DIR/${filename}_$(date +%Y%m%d).gz"
echo "Archiving: $logfile -> $archive_file"
gzip -c "$logfile" > "$archive_file"
> "$logfile" # Truncate original
done

In this chapter, you learned:

  • ✅ Understanding log files and locations
  • ✅ Basic log search and filtering
  • ✅ Real-time log monitoring
  • ✅ Nginx/Apache log analysis
  • ✅ Security log analysis
  • ✅ Application log parsing
  • ✅ Timestamp analysis
  • ✅ Automated log alerts
  • ✅ Log archival

Continue to the next chapter to learn about Network Diagnostics Scripts.


Previous Chapter: Backup Scripts Next Chapter: Network Diagnostics Scripts