Takes a file called convert_cake_layers.png
Chops it up by the sizes 300 X 154 and outputs the files cl_0.png -> cl_9.png
#!/bin/bash
seq 0 9 | awk "{ print \"convert cake_layers.png -crop 300x154+0+\" \$0*154 \" cl_\" \$0 \".png\" }" | bash
#!/bin/bash
seq 0 9 | awk "{ print \"convert cake_layers.png -crop 300x154+0+\" \$0*154 \" cl_\" \$0 \".png\" }" | bash
awk '{ split($1, ar, "="); sum += ar[2]; } END { print sum; }' < ab2.txt
col1=`echo $s1 | awk '{print $1}'` col2=`echo $s1 | awk '{print $2}'`
ls | awk '{ split($1, ar, "_"); print ar[1] }' | uniq
Awk for http logs. Find all 400 errors in this todays log:
head -100 2014-02-13.log | awk -v FS='\t' '{ print $6,$0 }' | grep ^40*
Alternate way of finding 500 errors:
cat /log/nginx_access/current | awk '$9 == "500" { print $0 }' | less
start=$(date --date '5 apr 2014 0:00' +%s) stop=$(date --date '5 apr 2014 23:00' +%s) #These are expressed in seconds. You can use a for loop on their values, increasing them by 1 hour (3600 seconds): for t in $(seq ${start} 3600 ${stop}) do d=$(date --date @${t} +'%d/%b/%Y:%H') d_pretty=$(date --date @${t} +'%d-%b-%Y-%H') #echo $d grep $d 2014-04-05.log | cut -f 8 | sort | uniq -c| sort -rn | head -20 > out_$d_pretty.txt done
grep -c RuntimeError 2015-10-*