These scripts, while not fitting into the text of this document, do illustrate some interesting shell programming techniques. They are useful, too. Have fun analyzing and running them.
Example A-1. mailformat: Formatting an e-mail message
1 #!/bin/bash 2 # mail-format.sh (ver. 1.1): Format e-mail messages. 3 4 # Gets rid of carets, tabs, and also folds excessively long lines. 5 6 # ================================================================= 7 # Standard Check for Script Argument(s) 8 ARGS=1 9 E_BADARGS=65 10 E_NOFILE=66 11 12 if [ $# -ne $ARGS ] # Correct number of arguments passed to script? 13 then 14 echo "Usage: `basename $0` filename" 15 exit $E_BADARGS 16 fi 17 18 if [ -f "$1" ] # Check if file exists. 19 then 20 file_name=$1 21 else 22 echo "File \"$1\" does not exist." 23 exit $E_NOFILE 24 fi 25 # ================================================================= 26 27 MAXWIDTH=70 # Width to fold excessively long lines to. 28 29 # --------------------------------- 30 # A variable can hold a sed script. 31 sedscript='s/^>// 32 s/^ *>// 33 s/^ *// 34 s/ *//' 35 # --------------------------------- 36 37 # Delete carets and tabs at beginning of lines, 38 #+ then fold lines to $MAXWIDTH characters. 39 sed "$sedscript" $1 | fold -s --width=$MAXWIDTH 40 # -s option to "fold" 41 #+ breaks lines at whitespace, if possible. 42 43 44 # This script was inspired by an article in a well-known trade journal 45 #+ extolling a 164K MS Windows utility with similar functionality. 46 # 47 # An nice set of text processing utilities and an efficient 48 #+ scripting language provide an alternative to bloated executables. 49 50 exit |
Example A-2. rn: A simple-minded file renaming utility
This script is a modification of Example 15-22.
1 #! /bin/bash 2 # 3 # Very simpleminded filename "rename" utility (based on "lowercase.sh"). 4 # 5 # The "ren" utility, by Vladimir Lanin (lanin@csd2.nyu.edu), 6 #+ does a much better job of this. 7 8 9 ARGS=2 10 E_BADARGS=65 11 ONE=1 # For getting singular/plural right (see below). 12 13 if [ $# -ne "$ARGS" ] 14 then 15 echo "Usage: `basename $0` old-pattern new-pattern" 16 # As in "rn gif jpg", which renames all gif files in working directory to jpg. 17 exit $E_BADARGS 18 fi 19 20 number=0 # Keeps track of how many files actually renamed. 21 22 23 for filename in *$1* #Traverse all matching files in directory. 24 do 25 if [ -f "$filename" ] # If finds match... 26 then 27 fname=`basename $filename` # Strip off path. 28 n=`echo $fname | sed -e "s/$1/$2/"` # Substitute new for old in filename. 29 mv $fname $n # Rename. 30 let "number += 1" 31 fi 32 done 33 34 if [ "$number" -eq "$ONE" ] # For correct grammar. 35 then 36 echo "$number file renamed." 37 else 38 echo "$number files renamed." 39 fi 40 41 exit 0 42 43 44 # Exercises: 45 # --------- 46 # What type of files will this not work on? 47 # How can this be fixed? 48 # 49 # Rewrite this script to process all the files in a directory 50 #+ containing spaces in their names, and to rename them, 51 #+ substituting an underscore for each space. |
Example A-3. blank-rename: Renames filenames containing blanks
This is an even simpler-minded version of previous script.
1 #! /bin/bash 2 # blank-rename.sh 3 # 4 # Substitutes underscores for blanks in all the filenames in a directory. 5 6 ONE=1 # For getting singular/plural right (see below). 7 number=0 # Keeps track of how many files actually renamed. 8 FOUND=0 # Successful return value. 9 10 for filename in * #Traverse all files in directory. 11 do 12 echo "$filename" | grep -q " " # Check whether filename 13 if [ $? -eq $FOUND ] #+ contains space(s). 14 then 15 fname=$filename # Yes, this filename needs work. 16 n=`echo $fname | sed -e "s/ /_/g"` # Substitute underscore for blank. 17 mv "$fname" "$n" # Do the actual renaming. 18 let "number += 1" 19 fi 20 done 21 22 if [ "$number" -eq "$ONE" ] # For correct grammar. 23 then 24 echo "$number file renamed." 25 else 26 echo "$number files renamed." 27 fi 28 29 exit 0 |
Example A-4. encryptedpw: Uploading to an ftp site, using a locally encrypted password
1 #!/bin/bash 2 3 # Example "ex72.sh" modified to use encrypted password. 4 5 # Note that this is still rather insecure, 6 #+ since the decrypted password is sent in the clear. 7 # Use something like "ssh" if this is a concern. 8 9 E_BADARGS=65 10 11 if [ -z "$1" ] 12 then 13 echo "Usage: `basename $0` filename" 14 exit $E_BADARGS 15 fi 16 17 Username=bozo # Change to suit. 18 pword=/home/bozo/secret/password_encrypted.file 19 # File containing encrypted password. 20 21 Filename=`basename $1` # Strips pathname out of file name. 22 23 Server="XXX" 24 Directory="YYY" # Change above to actual server name & directory. 25 26 27 Password=`cruft <$pword` # Decrypt password. 28 # Uses the author's own "cruft" file encryption package, 29 #+ based on the classic "onetime pad" algorithm, 30 #+ and obtainable from: 31 #+ Primary-site: ftp://ibiblio.org/pub/Linux/utils/file 32 #+ cruft-0.2.tar.gz [16k] 33 34 35 ftp -n $Server <<End-Of-Session 36 user $Username $Password 37 binary 38 bell 39 cd $Directory 40 put $Filename 41 bye 42 End-Of-Session 43 # -n option to "ftp" disables auto-logon. 44 # Note that "bell" rings 'bell' after each file transfer. 45 46 exit 0 |
Example A-5. copy-cd: Copying a data CD
1 #!/bin/bash 2 # copy-cd.sh: copying a data CD 3 4 CDROM=/dev/cdrom # CD ROM device 5 OF=/home/bozo/projects/cdimage.iso # output file 6 # /xxxx/xxxxxxx/ Change to suit your system. 7 BLOCKSIZE=2048 8 SPEED=2 # May use higher speed if supported. 9 DEVICE=cdrom 10 # DEVICE="0,0" on older versions of cdrecord. 11 12 echo; echo "Insert source CD, but do *not* mount it." 13 echo "Press ENTER when ready. " 14 read ready # Wait for input, $ready not used. 15 16 echo; echo "Copying the source CD to $OF." 17 echo "This may take a while. Please be patient." 18 19 dd if=$CDROM of=$OF bs=$BLOCKSIZE # Raw device copy. 20 21 22 echo; echo "Remove data CD." 23 echo "Insert blank CDR." 24 echo "Press ENTER when ready. " 25 read ready # Wait for input, $ready not used. 26 27 echo "Copying $OF to CDR." 28 29 cdrecord -v -isosize speed=$SPEED dev=$DEVICE $OF 30 # Uses Joerg Schilling's "cdrecord" package (see its docs). 31 # http://www.fokus.gmd.de/nthp/employees/schilling/cdrecord.html 32 33 34 echo; echo "Done copying $OF to CDR on device $CDROM." 35 36 echo "Do you want to erase the image file (y/n)? " # Probably a huge file. 37 read answer 38 39 case "$answer" in 40 [yY]) rm -f $OF 41 echo "$OF erased." 42 ;; 43 *) echo "$OF not erased.";; 44 esac 45 46 echo 47 48 # Exercise: 49 # Change the above "case" statement to also accept "yes" and "Yes" as input. 50 51 exit 0 |
Example A-6. Collatz series
1 #!/bin/bash 2 # collatz.sh 3 4 # The notorious "hailstone" or Collatz series. 5 # ------------------------------------------- 6 # 1) Get the integer "seed" from the command line. 7 # 2) NUMBER <--- seed 8 # 3) Print NUMBER. 9 # 4) If NUMBER is even, divide by 2, or 10 # 5)+ if odd, multiply by 3 and add 1. 11 # 6) NUMBER <--- result 12 # 7) Loop back to step 3 (for specified number of iterations). 13 # 14 # The theory is that every sequence, 15 #+ no matter how large the initial value, 16 #+ eventually settles down to repeating "4,2,1..." cycles, 17 #+ even after fluctuating through a wide range of values. 18 # 19 # This is an instance of an "iterate," 20 #+ an operation that feeds its output back into the input. 21 # Sometimes the result is a "chaotic" series. 22 23 24 MAX_ITERATIONS=200 25 # For large seed numbers (>32000), increase MAX_ITERATIONS. 26 27 h=${1:-$$} # Seed 28 # Use $PID as seed, 29 #+ if not specified as command-line arg. 30 31 echo 32 echo "C($h) --- $MAX_ITERATIONS Iterations" 33 echo 34 35 for ((i=1; i<=MAX_ITERATIONS; i++)) 36 do 37 38 echo -n "$h " 39 # ^^^^^ 40 # tab 41 42 let "remainder = h % 2" 43 if [ "$remainder" -eq 0 ] # Even? 44 then 45 let "h /= 2" # Divide by 2. 46 else 47 let "h = h*3 + 1" # Multiply by 3 and add 1. 48 fi 49 50 51 COLUMNS=10 # Output 10 values per line. 52 let "line_break = i % $COLUMNS" 53 if [ "$line_break" -eq 0 ] 54 then 55 echo 56 fi 57 58 done 59 60 echo 61 62 # For more information on this mathematical function, 63 #+ see _Computers, Pattern, Chaos, and Beauty_, by Pickover, p. 185 ff., 64 #+ as listed in the bibliography. 65 66 exit 0 |
Example A-7. days-between: Days between two dates
1 #!/bin/bash 2 # days-between.sh: Number of days between two dates. 3 # Usage: ./days-between.sh [M]M/[D]D/YYYY [M]M/[D]D/YYYY 4 # 5 # Note: Script modified to account for changes in Bash, v. 2.05b +, 6 #+ that closed the loophole permitting large negative 7 #+ integer return values. 8 9 ARGS=2 # Two command line parameters expected. 10 E_PARAM_ERR=65 # Param error. 11 12 REFYR=1600 # Reference year. 13 CENTURY=100 14 DIY=365 15 ADJ_DIY=367 # Adjusted for leap year + fraction. 16 MIY=12 17 DIM=31 18 LEAPCYCLE=4 19 20 MAXRETVAL=255 # Largest permissible 21 #+ positive return value from a function. 22 23 diff= # Declare global variable for date difference. 24 value= # Declare global variable for absolute value. 25 day= # Declare globals for day, month, year. 26 month= 27 year= 28 29 30 Param_Error () # Command line parameters wrong. 31 { 32 echo "Usage: `basename $0` [M]M/[D]D/YYYY [M]M/[D]D/YYYY" 33 echo " (date must be after 1/3/1600)" 34 exit $E_PARAM_ERR 35 } 36 37 38 Parse_Date () # Parse date from command line params. 39 { 40 month=${1%%/**} 41 dm=${1%/**} # Day and month. 42 day=${dm#*/} 43 let "year = `basename $1`" # Not a filename, but works just the same. 44 } 45 46 47 check_date () # Checks for invalid date(s) passed. 48 { 49 [ "$day" -gt "$DIM" ] || [ "$month" -gt "$MIY" ] || 50 [ "$year" -lt "$REFYR" ] && Param_Error 51 # Exit script on bad value(s). 52 # Uses or-list / and-list. 53 # 54 # Exercise: Implement more rigorous date checking. 55 } 56 57 58 strip_leading_zero () # Better to strip possible leading zero(s) 59 { #+ from day and/or month 60 return ${1#0} #+ since otherwise Bash will interpret them 61 } #+ as octal values (POSIX.2, sect 2.9.2.1). 62 63 64 day_index () # Gauss' Formula: 65 { # Days from March 1, 1600 to date passed as param. 66 # ^^^^^^^^^^^^^ 67 day=$1 68 month=$2 69 year=$3 70 71 let "month = $month - 2" 72 if [ "$month" -le 0 ] 73 then 74 let "month += 12" 75 let "year -= 1" 76 fi 77 78 let "year -= $REFYR" 79 let "indexyr = $year / $CENTURY" 80 81 82 let "Days = $DIY*$year + $year/$LEAPCYCLE - $indexyr \ 83 + $indexyr/$LEAPCYCLE + $ADJ_DIY*$month/$MIY + $day - $DIM" 84 # For an in-depth explanation of this algorithm, see 85 #+ http://weblogs.asp.net/pgreborio/archive/2005/01/06/347968.aspx 86 87 88 echo $Days 89 90 } 91 92 93 calculate_difference () # Difference between two day indices. 94 { 95 let "diff = $1 - $2" # Global variable. 96 } 97 98 99 abs () # Absolute value 100 { # Uses global "value" variable. 101 if [ "$1" -lt 0 ] # If negative 102 then #+ then 103 let "value = 0 - $1" #+ change sign, 104 else #+ else 105 let "value = $1" #+ leave it alone. 106 fi 107 } 108 109 110 111 if [ $# -ne "$ARGS" ] # Require two command line params. 112 then 113 Param_Error 114 fi 115 116 Parse_Date $1 117 check_date $day $month $year # See if valid date. 118 119 strip_leading_zero $day # Remove any leading zeroes 120 day=$? #+ on day and/or month. 121 strip_leading_zero $month 122 month=$? 123 124 let "date1 = `day_index $day $month $year`" 125 126 127 Parse_Date $2 128 check_date $day $month $year 129 130 strip_leading_zero $day 131 day=$? 132 strip_leading_zero $month 133 month=$? 134 135 date2=$(day_index $day $month $year) # Command substitution. 136 137 138 calculate_difference $date1 $date2 139 140 abs $diff # Make sure it's positive. 141 diff=$value 142 143 echo $diff 144 145 exit 0 146 147 # Compare this script with 148 #+ the implementation of Gauss' Formula in a C program at: 149 #+ http://buschencrew.hypermart.net/software/datedif |
Example A-8. Making a dictionary
1 #!/bin/bash 2 # makedict.sh [make dictionary] 3 4 # Modification of /usr/sbin/mkdict (/usr/sbin/cracklib-forman) script. 5 # Original script copyright 1993, by Alec Muffett. 6 # 7 # This modified script included in this document in a manner 8 #+ consistent with the "LICENSE" document of the "Crack" package 9 #+ that the original script is a part of. 10 11 # This script processes text files to produce a sorted list 12 #+ of words found in the files. 13 # This may be useful for compiling dictionaries 14 #+ and for other lexicographic purposes. 15 16 17 E_BADARGS=65 18 19 if [ ! -r "$1" ] # Need at least one 20 then #+ valid file argument. 21 echo "Usage: $0 files-to-process" 22 exit $E_BADARGS 23 fi 24 25 26 # SORT="sort" # No longer necessary to define options 27 #+ to sort. Changed from original script. 28 29 cat $* | # Contents of specified files to stdout. 30 tr A-Z a-z | # Convert to lowercase. 31 tr ' ' '\012' | # New: change spaces to newlines. 32 # tr -cd '\012[a-z][0-9]' | # Get rid of everything non-alphanumeric 33 #+ (in original script). 34 tr -c '\012a-z' '\012' | # Rather than deleting non-alpha chars, 35 #+ change them to newlines. 36 sort | # $SORT options unnecessary now. 37 uniq | # Remove duplicates. 38 grep -v '^#' | # Delete lines beginning with a hashmark. 39 grep -v '^$' # Delete blank lines. 40 41 exit 0 |
Example A-9. Soundex conversion
1 #!/bin/bash 2 # soundex.sh: Calculate "soundex" code for names 3 4 # ======================================================= 5 # Soundex script 6 # by 7 # Mendel Cooper 8 # thegrendel@theriver.com 9 # 23 January, 2002 10 # 11 # Placed in the Public Domain. 12 # 13 # A slightly different version of this script appeared in 14 #+ Ed Schaefer's July, 2002 "Shell Corner" column 15 #+ in "Unix Review" on-line, 16 #+ http://www.unixreview.com/documents/uni1026336632258/ 17 # ======================================================= 18 19 20 ARGCOUNT=1 # Need name as argument. 21 E_WRONGARGS=70 22 23 if [ $# -ne "$ARGCOUNT" ] 24 then 25 echo "Usage: `basename $0` name" 26 exit $E_WRONGARGS 27 fi 28 29 30 assign_value () # Assigns numerical value 31 { #+ to letters of name. 32 33 val1=bfpv # 'b,f,p,v' = 1 34 val2=cgjkqsxz # 'c,g,j,k,q,s,x,z' = 2 35 val3=dt # etc. 36 val4=l 37 val5=mn 38 val6=r 39 40 # Exceptionally clever use of 'tr' follows. 41 # Try to figure out what is going on here. 42 43 value=$( echo "$1" \ 44 | tr -d wh \ 45 | tr $val1 1 | tr $val2 2 | tr $val3 3 \ 46 | tr $val4 4 | tr $val5 5 | tr $val6 6 \ 47 | tr -s 123456 \ 48 | tr -d aeiouy ) 49 50 # Assign letter values. 51 # Remove duplicate numbers, except when separated by vowels. 52 # Ignore vowels, except as separators, so delete them last. 53 # Ignore 'w' and 'h', even as separators, so delete them first. 54 # 55 # The above command substitution lays more pipe than a plumber <g>. 56 57 } 58 59 60 input_name="$1" 61 echo 62 echo "Name = $input_name" 63 64 65 # Change all characters of name input to lowercase. 66 # ------------------------------------------------ 67 name=$( echo $input_name | tr A-Z a-z ) 68 # ------------------------------------------------ 69 # Just in case argument to script is mixed case. 70 71 72 # Prefix of soundex code: first letter of name. 73 # -------------------------------------------- 74 75 76 char_pos=0 # Initialize character position. 77 prefix0=${name:$char_pos:1} 78 prefix=`echo $prefix0 | tr a-z A-Z` 79 # Uppercase 1st letter of soundex. 80 81 let "char_pos += 1" # Bump character position to 2nd letter of name. 82 name1=${name:$char_pos} 83 84 85 # ++++++++++++++++++++++++++ Exception Patch +++++++++++++++++++++++++++++++++ 86 # Now, we run both the input name and the name shifted one char to the right 87 #+ through the value-assigning function. 88 # If we get the same value out, that means that the first two characters 89 #+ of the name have the same value assigned, and that one should cancel. 90 # However, we also need to test whether the first letter of the name 91 #+ is a vowel or 'w' or 'h', because otherwise this would bollix things up. 92 93 char1=`echo $prefix | tr A-Z a-z` # First letter of name, lowercased. 94 95 assign_value $name 96 s1=$value 97 assign_value $name1 98 s2=$value 99 assign_value $char1 100 s3=$value 101 s3=9$s3 # If first letter of name is a vowel 102 #+ or 'w' or 'h', 103 #+ then its "value" will be null (unset). 104 #+ Therefore, set it to 9, an otherwise 105 #+ unused value, which can be tested for. 106 107 108 if [[ "$s1" -ne "$s2" || "$s3" -eq 9 ]] 109 then 110 suffix=$s2 111 else 112 suffix=${s2:$char_pos} 113 fi 114 # ++++++++++++++++++++++ end Exception Patch +++++++++++++++++++++++++++++++++ 115 116 117 padding=000 # Use at most 3 zeroes to pad. 118 119 120 soun=$prefix$suffix$padding # Pad with zeroes. 121 122 MAXLEN=4 # Truncate to maximum of 4 chars. 123 soundex=${soun:0:$MAXLEN} 124 125 echo "Soundex = $soundex" 126 127 echo 128 129 # The soundex code is a method of indexing and classifying names 130 #+ by grouping together the ones that sound alike. 131 # The soundex code for a given name is the first letter of the name, 132 #+ followed by a calculated three-number code. 133 # Similar sounding names should have almost the same soundex codes. 134 135 # Examples: 136 # Smith and Smythe both have a "S-530" soundex. 137 # Harrison = H-625 138 # Hargison = H-622 139 # Harriman = H-655 140 141 # This works out fairly well in practice, but there are numerous anomalies. 142 # 143 # 144 # The U.S. Census and certain other governmental agencies use soundex, 145 # as do genealogical researchers. 146 # 147 # For more information, 148 #+ see the "National Archives and Records Administration home page", 149 #+ http://www.nara.gov/genealogy/soundex/soundex.html 150 151 152 153 # Exercise: 154 # -------- 155 # Simplify the "Exception Patch" section of this script. 156 157 exit 0 |
Example A-10. Game of Life
1 #!/bin/bash 2 # life.sh: "Life in the Slow Lane" 3 # Version 0.2: Patched by Daniel Albers 4 #+ to allow non-square grids as input. 5 6 # ##################################################################### # 7 # This is the Bash script version of John Conway's "Game of Life". # 8 # "Life" is a simple implementation of cellular automata. # 9 # --------------------------------------------------------------------- # 10 # On a rectangular grid, let each "cell" be either "living" or "dead." # 11 # Designate a living cell with a dot, and a dead one with a blank space.# 12 # Begin with an arbitrarily drawn dot-and-blank grid, # 13 #+ and let this be the starting generation, "generation 0." # 14 # Determine each successive generation by the following rules: # 15 # 1) Each cell has 8 neighbors, the adjoining cells # 16 #+ left, right, top, bottom, and the 4 diagonals. # 17 # # 18 # 123 # 19 # 4*5 The * is the cell under consideration. # 20 # 678 # 21 # # 22 # 2) A living cell with either 2 or 3 living neighbors remains alive. # 23 SURVIVE=2 # 24 # 3) A dead cell with 3 living neighbors comes alive (a "birth"). # 25 BIRTH=3 # 26 # 4) All other cases result in a dead cell for the next generation. # 27 # ##################################################################### # 28 29 30 startfile=gen0 # Read the starting generation from the file "gen0" ... 31 # Default, if no other file specified when invoking script. 32 # 33 if [ -n "$1" ] # Specify another "generation 0" file. 34 then 35 startfile="$1" 36 fi 37 38 ############################################ 39 # Abort script if "startfile" not specified 40 #+ and 41 #+ default file "gen0" not present. 42 43 E_NOSTARTFILE=68 44 45 if [ ! -e "$startfile" ] 46 then 47 echo "Startfile \""$startfile"\" missing!" 48 exit $E_NOSTARTFILE 49 fi 50 ############################################ 51 52 53 ALIVE1=. 54 DEAD1=_ 55 # Represent living and dead cells in the start-up file. 56 57 # ---------------------------------------------------------- # 58 # This script uses a 10 x 10 grid (may be increased, 59 #+ but a large grid will slow execution). 60 ROWS=10 61 COLS=10 62 # Change above two variables to match grid size, as desired. 63 # ---------------------------------------------------------- # 64 65 GENERATIONS=10 # How many generations to cycle through. 66 # Adjust this upwards, 67 #+ if you have time on your hands. 68 69 NONE_ALIVE=85 # Exit status on premature bailout, 70 #+ if no cells left alive. 71 TRUE=0 72 FALSE=1 73 ALIVE=0 74 DEAD=1 75 76 avar= # Global; holds current generation. 77 generation=0 # Initialize generation count. 78 79 # ================================================================= 80 81 let "cells = $ROWS * $COLS" # How many cells. 82 83 # Arrays containing "cells." 84 declare -a initial 85 declare -a current 86 87 display () 88 { 89 90 alive=0 # How many cells alive at any given time. 91 # Initially zero. 92 93 declare -a arr 94 arr=( `echo "$1"` ) # Convert passed arg to array. 95 96 element_count=${#arr[*]} 97 98 local i 99 local rowcheck 100 101 for ((i=0; i<$element_count; i++)) 102 do 103 104 # Insert newline at end of each row. 105 let "rowcheck = $i % COLS" 106 if [ "$rowcheck" -eq 0 ] 107 then 108 echo # Newline. 109 echo -n " " # Indent. 110 fi 111 112 cell=${arr[i]} 113 114 if [ "$cell" = . ] 115 then 116 let "alive += 1" 117 fi 118 119 echo -n "$cell" | sed -e 's/_/ /g' 120 # Print out array and change underscores to spaces. 121 done 122 123 return 124 125 } 126 127 IsValid () # Test whether cell coordinate valid. 128 { 129 130 if [ -z "$1" -o -z "$2" ] # Mandatory arguments missing? 131 then 132 return $FALSE 133 fi 134 135 local row 136 local lower_limit=0 # Disallow negative coordinate. 137 local upper_limit 138 local left 139 local right 140 141 let "upper_limit = $ROWS * $COLS - 1" # Total number of cells. 142 143 144 if [ "$1" -lt "$lower_limit" -o "$1" -gt "$upper_limit" ] 145 then 146 return $FALSE # Out of array bounds. 147 fi 148 149 row=$2 150 let "left = $row * $COLS" # Left limit. 151 let "right = $left + $COLS - 1" # Right limit. 152 153 if [ "$1" -lt "$left" -o "$1" -gt "$right" ] 154 then 155 return $FALSE # Beyond row boundary. 156 fi 157 158 return $TRUE # Valid coordinate. 159 160 } 161 162 163 IsAlive () # Test whether cell is alive. 164 # Takes array, cell number, 165 { #+ state of cell as arguments. 166 GetCount "$1" $2 # Get alive cell count in neighborhood. 167 local nhbd=$? 168 169 if [ "$nhbd" -eq "$BIRTH" ] # Alive in any case. 170 then 171 return $ALIVE 172 fi 173 174 if [ "$3" = "." -a "$nhbd" -eq "$SURVIVE" ] 175 then # Alive only if previously alive. 176 return $ALIVE 177 fi 178 179 return $DEAD # Dead by default. 180 181 } 182 183 184 GetCount () # Count live cells in passed cell's neighborhood. 185 # Two arguments needed: 186 # $1) variable holding array 187 # $2) cell number 188 { 189 local cell_number=$2 190 local array 191 local top 192 local center 193 local bottom 194 local r 195 local row 196 local i 197 local t_top 198 local t_cen 199 local t_bot 200 local count=0 201 local ROW_NHBD=3 202 203 array=( `echo "$1"` ) 204 205 let "top = $cell_number - $COLS - 1" # Set up cell neighborhood. 206 let "center = $cell_number - 1" 207 let "bottom = $cell_number + $COLS - 1" 208 let "r = $cell_number / $COLS" 209 210 for ((i=0; i<$ROW_NHBD; i++)) # Traverse from left to right. 211 do 212 let "t_top = $top + $i" 213 let "t_cen = $center + $i" 214 let "t_bot = $bottom + $i" 215 216 217 let "row = $r" # Count center row. 218 IsValid $t_cen $row # Valid cell position? 219 if [ $? -eq "$TRUE" ] 220 then 221 if [ ${array[$t_cen]} = "$ALIVE1" ] # Is it alive? 222 then # Yes? 223 let "count += 1" # Increment count. 224 fi 225 fi 226 227 let "row = $r - 1" # Count top row. 228 IsValid $t_top $row 229 if [ $? -eq "$TRUE" ] 230 then 231 if [ ${array[$t_top]} = "$ALIVE1" ] # Redundancy here. 232 then # Can be optimized? 233 let "count += 1" 234 fi 235 fi 236 237 let "row = $r + 1" # Count bottom row. 238 IsValid $t_bot $row 239 if [ $? -eq "$TRUE" ] 240 then 241 if [ ${array[$t_bot]} = "$ALIVE1" ] 242 then 243 let "count += 1" 244 fi 245 fi 246 247 done 248 249 250 if [ ${array[$cell_number]} = "$ALIVE1" ] 251 then 252 let "count -= 1" # Make sure value of tested cell itself 253 fi #+ is not counted. 254 255 256 return $count 257 258 } 259 260 next_gen () # Update generation array. 261 { 262 263 local array 264 local i=0 265 266 array=( `echo "$1"` ) # Convert passed arg to array. 267 268 while [ "$i" -lt "$cells" ] 269 do 270 IsAlive "$1" $i ${array[$i]} # Is cell alive? 271 if [ $? -eq "$ALIVE" ] 272 then # If alive, then 273 array[$i]=. #+ represent the cell as a period. 274 else 275 array[$i]="_" # Otherwise underscore 276 fi #+ (will later be converted to space). 277 let "i += 1" 278 done 279 280 281 # let "generation += 1" # Increment generation count. 282 # Why was the above line commented out? 283 284 285 # Set variable to pass as parameter to "display" function. 286 avar=`echo ${array[@]}` # Convert array back to string variable. 287 display "$avar" # Display it. 288 echo; echo 289 echo "Generation $generation - $alive alive" 290 291 if [ "$alive" -eq 0 ] 292 then 293 echo 294 echo "Unexpected exit: no more cells alive!" 295 exit $NONE_ALIVE # No point in continuing 296 fi #+ if no live cells. 297 298 } 299 300 301 # ========================================================= 302 303 # main () 304 305 # Load initial array with contents of startup file. 306 initial=( `cat "$startfile" | sed -e '/#/d' | tr -d '\n' |\ 307 sed -e 's/\./\. /g' -e 's/_/_ /g'` ) 308 # Delete lines containing '#' comment character. 309 # Remove linefeeds and insert space between elements. 310 311 clear # Clear screen. 312 313 echo # Title 314 echo "=======================" 315 echo " $GENERATIONS generations" 316 echo " of" 317 echo "\"Life in the Slow Lane\"" 318 echo "=======================" 319 320 321 # -------- Display first generation. -------- 322 Gen0=`echo ${initial[@]}` 323 display "$Gen0" # Display only. 324 echo; echo 325 echo "Generation $generation - $alive alive" 326 # ------------------------------------------- 327 328 329 let "generation += 1" # Bump generation count. 330 echo 331 332 # ------- Display second generation. ------- 333 Cur=`echo ${initial[@]}` 334 next_gen "$Cur" # Update & display. 335 # ------------------------------------------ 336 337 let "generation += 1" # Increment generation count. 338 339 # ------ Main loop for displaying subsequent generations ------ 340 while [ "$generation" -le "$GENERATIONS" ] 341 do 342 Cur="$avar" 343 next_gen "$Cur" 344 let "generation += 1" 345 done 346 # ============================================================== 347 348 echo 349 350 exit 0 # CEOF:EOF 351 352 353 354 # The grid in this script has a "boundary problem." 355 # The the top, bottom, and sides border on a void of dead cells. 356 # Exercise: Change the script to have the grid wrap around, 357 # + so that the left and right sides will "touch," 358 # + as will the top and bottom. 359 # 360 # Exercise: Create a new "gen0" file to seed this script. 361 # Use a 12 x 16 grid, instead of the original 10 x 10 one. 362 # Make the necessary changes to the script, 363 #+ so it will run with the altered file. 364 # 365 # Exercise: Modify this script so that it can determine the grid size 366 #+ from the "gen0" file, and set any variables necessary 367 #+ for the script to run. 368 # This would make unnecessary any changes to variables 369 #+ in the script for an altered grid size. 370 # 371 # Exercise: Optimize this script. 372 # It has some redundant code. |
Example A-11. Data file for Game of Life
1 # gen0 2 # 3 # This is an example "generation 0" start-up file for "life.sh". 4 # -------------------------------------------------------------- 5 # The "gen0" file is a 10 x 10 grid using a period (.) for live cells, 6 #+ and an underscore (_) for dead ones. We cannot simply use spaces 7 #+ for dead cells in this file because of a peculiarity in Bash arrays. 8 # [Exercise for the reader: explain this.] 9 # 10 # Lines beginning with a '#' are comments, and the script ignores them. 11 __.__..___ 12 ___._.____ 13 ____.___.. 14 _._______. 15 ____._____ 16 ..__...___ 17 ____._____ 18 ___...____ 19 __.._..___ 20 _..___..__ |
+++
The following two scripts are by Mark Moraes of the University of Toronto. See the file Moraes-COPYRIGHT for permissions and restrictions. This file is included in the combined HTML/source tarball of the ABS Guide.
Example A-12. behead: Removing mail and news message headers
1 #! /bin/sh 2 # Strips off the header from a mail/News message i.e. till the first 3 # empty line. 4 # Author: Mark Moraes, University of Toronto 5 # See the included file "Moraes-COPYRIGHT" for copyright info. 6 7 # ==> These comments added by author of this document. 8 9 if [ $# -eq 0 ]; then 10 # ==> If no command line args present, then works on file redirected to stdin. 11 sed -e '1,/^$/d' -e '/^[ ]*$/d' 12 # --> Delete empty lines and all lines until 13 # --> first one beginning with white space. 14 else 15 # ==> If command line args present, then work on files named. 16 for i do 17 sed -e '1,/^$/d' -e '/^[ ]*$/d' $i 18 # --> Ditto, as above. 19 done 20 fi 21 22 # ==> Exercise: Add error checking and other options. 23 # ==> 24 # ==> Note that the small sed script repeats, except for the arg passed. 25 # ==> Does it make sense to embed it in a function? Why or why not? |
Example A-13. ftpget: Downloading files via ftp
1 #! /bin/sh 2 # $Id: ftpget,v 1.2 91/05/07 21:15:43 moraes Exp $ 3 # Script to perform batch anonymous ftp. Essentially converts a list of 4 # of command line arguments into input to ftp. 5 # ==> This script is nothing but a shell wrapper around "ftp" . . . 6 # Simple, and quick - written as a companion to ftplist 7 # -h specifies the remote host (default prep.ai.mit.edu) 8 # -d specifies the remote directory to cd to - you can provide a sequence 9 # of -d options - they will be cd'ed to in turn. If the paths are relative, 10 # make sure you get the sequence right. Be careful with relative paths - 11 # there are far too many symlinks nowadays. 12 # (default is the ftp login directory) 13 # -v turns on the verbose option of ftp, and shows all responses from the 14 # ftp server. 15 # -f remotefile[:localfile] gets the remote file into localfile 16 # -m pattern does an mget with the specified pattern. Remember to quote 17 # shell characters. 18 # -c does a local cd to the specified directory 19 # For example, 20 # ftpget -h expo.lcs.mit.edu -d contrib -f xplaces.shar:xplaces.sh \ 21 # -d ../pub/R3/fixes -c ~/fixes -m 'fix*' 22 # will get xplaces.shar from ~ftp/contrib on expo.lcs.mit.edu, and put it 23 # in xplaces.sh in the current working directory, and get all fixes from 24 # ~ftp/pub/R3/fixes and put them in the ~/fixes directory. 25 # Obviously, the sequence of the options is important, since the equivalent 26 # commands are executed by ftp in corresponding order 27 # 28 # Mark Moraes <moraes@csri.toronto.edu>, Feb 1, 1989 29 # 30 31 32 # ==> These comments added by author of this document. 33 34 # PATH=/local/bin:/usr/ucb:/usr/bin:/bin 35 # export PATH 36 # ==> Above 2 lines from original script probably superfluous. 37 38 E_BADARGS=65 39 40 TMPFILE=/tmp/ftp.$$ 41 # ==> Creates temp file, using process id of script ($$) 42 # ==> to construct filename. 43 44 SITE=`domainname`.toronto.edu 45 # ==> 'domainname' similar to 'hostname' 46 # ==> May rewrite this to parameterize this for general use. 47 48 usage="Usage: $0 [-h remotehost] [-d remotedirectory]... \ 49 [-f remfile:localfile]... [-c localdirectory] [-m filepattern] [-v]" 50 ftpflags="-i -n" 51 verbflag= 52 set -f # So we can use globbing in -m 53 set x `getopt vh:d:c:m:f: $*` 54 if [ $? != 0 ]; then 55 echo $usage 56 exit $E_BADARGS 57 fi 58 shift 59 trap 'rm -f ${TMPFILE} ; exit' 0 1 2 3 15 60 # ==> Signals: HUP INT (Ctl-C) QUIT TERM 61 # ==> Delete tempfile in case of abnormal exit from script. 62 echo "user anonymous ${USER-gnu}@${SITE} > ${TMPFILE}" 63 # ==> Added quotes (recommended in complex echoes). 64 echo binary >> ${TMPFILE} 65 for i in $* # ==> Parse command line args. 66 do 67 case $i in 68 -v) verbflag=-v; echo hash >> ${TMPFILE}; shift;; 69 -h) remhost=$2; shift 2;; 70 -d) echo cd $2 >> ${TMPFILE}; 71 if [ x${verbflag} != x ]; then 72 echo pwd >> ${TMPFILE}; 73 fi; 74 shift 2;; 75 -c) echo lcd $2 >> ${TMPFILE}; shift 2;; 76 -m) echo mget "$2" >> ${TMPFILE}; shift 2;; 77 -f) f1=`expr "$2" : "\([^:]*\).*"`; f2=`expr "$2" : "[^:]*:\(.*\)"`; 78 echo get ${f1} ${f2} >> ${TMPFILE}; shift 2;; 79 --) shift; break;; 80 esac 81 # ==> 'lcd' and 'mget' are ftp commands. See "man ftp" . . . 82 done 83 if [ $# -ne 0 ]; then 84 echo $usage 85 exit $E_BADARGS 86 # ==> Changed from "exit 2" to conform with style standard. 87 fi 88 if [ x${verbflag} != x ]; then 89 ftpflags="${ftpflags} -v" 90 fi 91 if [ x${remhost} = x ]; then 92 remhost=prep.ai.mit.edu 93 # ==> Change to match appropriate ftp site. 94 fi 95 echo quit >> ${TMPFILE} 96 # ==> All commands saved in tempfile. 97 98 ftp ${ftpflags} ${remhost} < ${TMPFILE} 99 # ==> Now, tempfile batch processed by ftp. 100 101 rm -f ${TMPFILE} 102 # ==> Finally, tempfile deleted (you may wish to copy it to a logfile). 103 104 105 # ==> Exercises: 106 # ==> --------- 107 # ==> 1) Add error checking. 108 # ==> 2) Add bells & whistles. 109 110 # See the included file "Moraes-COPYRIGHT" for copyright info. 111 #+ on this script. |
+
Antek Sawicki contributed the following script, which makes very clever use of the parameter substitution operators discussed in Section 9.3.
Example A-14. password: Generating random 8-character passwords
1 #!/bin/bash 2 # May need to be invoked with #!/bin/bash2 on older machines. 3 # 4 # Random password generator for Bash 2.x + 5 #+ by Antek Sawicki <tenox@tenox.tc>, 6 #+ who generously gave usage permission to the ABS Guide author. 7 # 8 # ==> Comments added by document author ==> 9 10 11 MATRIX="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" 12 # ==> Password will consist of alphanumeric characters. 13 LENGTH="8" 14 # ==> May change 'LENGTH' for longer password. 15 16 17 while [ "${n:=1}" -le "$LENGTH" ] 18 # ==> Recall that := is "default substitution" operator. 19 # ==> So, if 'n' has not been initialized, set it to 1. 20 do 21 PASS="$PASS${MATRIX:$(($RANDOM%${#MATRIX})):1}" 22 # ==> Very clever, but tricky. 23 24 # ==> Starting from the innermost nesting... 25 # ==> ${#MATRIX} returns length of array MATRIX. 26 27 # ==> $RANDOM%${#MATRIX} returns random number between 1 28 # ==> and [length of MATRIX] - 1. 29 30 # ==> ${MATRIX:$(($RANDOM%${#MATRIX})):1} 31 # ==> returns expansion of MATRIX at random position, by length 1. 32 # ==> See {var:pos:len} parameter substitution in Chapter 9. 33 # ==> and the associated examples. 34 35 # ==> PASS=... simply pastes this result onto previous PASS (concatenation). 36 37 # ==> To visualize this more clearly, uncomment the following line 38 # echo "$PASS" 39 # ==> to see PASS being built up, 40 # ==> one character at a time, each iteration of the loop. 41 42 let n+=1 43 # ==> Increment 'n' for next pass. 44 done 45 46 echo "$PASS" # ==> Or, redirect to a file, as desired. 47 48 exit 0 |
+
James R. Van Zandt contributed this script which uses named pipes and, in his words, "really exercises quoting and escaping."
Example A-15. fifo: Making daily backups, using named pipes
1 #!/bin/bash 2 # ==> Script by James R. Van Zandt, and used here with his permission. 3 4 # ==> Comments added by author of this document. 5 6 7 HERE=`uname -n` # ==> hostname 8 THERE=bilbo 9 echo "starting remote backup to $THERE at `date +%r`" 10 # ==> `date +%r` returns time in 12-hour format, i.e. "08:08:34 PM". 11 12 # make sure /pipe really is a pipe and not a plain file 13 rm -rf /pipe 14 mkfifo /pipe # ==> Create a "named pipe", named "/pipe" ... 15 16 # ==> 'su xyz' runs commands as user "xyz". 17 # ==> 'ssh' invokes secure shell (remote login client). 18 su xyz -c "ssh $THERE \"cat > /home/xyz/backup/${HERE}-daily.tar.gz\" < /pipe"& 19 cd / 20 tar -czf - bin boot dev etc home info lib man root sbin share usr var > /pipe 21 # ==> Uses named pipe, /pipe, to communicate between processes: 22 # ==> 'tar/gzip' writes to /pipe and 'ssh' reads from /pipe. 23 24 # ==> The end result is this backs up the main directories, from / on down. 25 26 # ==> What are the advantages of a "named pipe" in this situation, 27 # ==>+ as opposed to an "anonymous pipe", with |? 28 # ==> Will an anonymous pipe even work here? 29 30 # ==> Is it necessary to delete the pipe before exiting the script? 31 # ==> How could that be done? 32 33 34 exit 0 |
+
Stéphane Chazelas used the following script to demonstrate generating prime numbers without arrays.
Example A-16. Generating prime numbers using the modulo operator
1 #!/bin/bash 2 # primes.sh: Generate prime numbers, without using arrays. 3 # Script contributed by Stephane Chazelas. 4 5 # This does *not* use the classic "Sieve of Eratosthenes" algorithm, 6 #+ but instead the more intuitive method of testing each candidate number 7 #+ for factors (divisors), using the "%" modulo operator. 8 9 10 LIMIT=1000 # Primes, 2 ... 1000. 11 12 Primes() 13 { 14 (( n = $1 + 1 )) # Bump to next integer. 15 shift # Next parameter in list. 16 # echo "_n=$n i=$i_" 17 18 if (( n == LIMIT )) 19 then echo $* 20 return 21 fi 22 23 for i; do # "i" set to "@", previous values of $n. 24 # echo "-n=$n i=$i-" 25 (( i * i > n )) && break # Optimization. 26 (( n % i )) && continue # Sift out non-primes using modulo operator. 27 Primes $n $@ # Recursion inside loop. 28 return 29 done 30 31 Primes $n $@ $n # Recursion outside loop. 32 # Successively accumulate positional parameters. 33 # "$@" is the accumulating list of primes. 34 } 35 36 Primes 1 37 38 exit $? # Pipe output of the script to 'fmt' for prettier printing. 39 40 # Uncomment lines 16 and 24 to help figure out what is going on. 41 42 # Compare the speed of this algorithm for generating primes 43 #+ with the Sieve of Eratosthenes (ex68.sh). 44 45 46 # Exercise: Rewrite this script without recursion, for faster execution. |
+
Rick Boivie's revision of Jordi Sanfeliu's tree script.
Example A-17. tree: Displaying a directory tree
1 #!/bin/bash 2 # tree.sh 3 4 # Written by Rick Boivie. 5 # Used with permission. 6 # This is a revised and simplified version of a script 7 #+ by Jordi Sanfeliu (and patched by Ian Kjos). 8 # This script replaces the earlier version used in 9 #+ previous releases of the Advanced Bash Scripting Guide. 10 11 # ==> Comments added by the author of this document. 12 13 14 search () { 15 for dir in `echo *` 16 # ==> `echo *` lists all the files in current working directory, 17 #+ ==> without line breaks. 18 # ==> Similar effect to for dir in * 19 # ==> but "dir in `echo *`" will not handle filenames with blanks. 20 do 21 if [ -d "$dir" ] ; then # ==> If it is a directory (-d)... 22 zz=0 # ==> Temp variable, keeping track of directory level. 23 while [ $zz != $1 ] # Keep track of inner nested loop. 24 do 25 echo -n "| " # ==> Display vertical connector symbol, 26 # ==> with 2 spaces & no line feed in order to indent. 27 zz=`expr $zz + 1` # ==> Increment zz. 28 done 29 30 if [ -L "$dir" ] ; then # ==> If directory is a symbolic link... 31 echo "+---$dir" `ls -l $dir | sed 's/^.*'$dir' //'` 32 # ==> Display horiz. connector and list directory name, but... 33 # ==> delete date/time part of long listing. 34 else 35 echo "+---$dir" # ==> Display horizontal connector symbol... 36 # ==> and print directory name. 37 numdirs=`expr $numdirs + 1` # ==> Increment directory count. 38 if cd "$dir" ; then # ==> If can move to subdirectory... 39 search `expr $1 + 1` # with recursion ;-) 40 # ==> Function calls itself. 41 cd .. 42 fi 43 fi 44 fi 45 done 46 } 47 48 if [ $# != 0 ] ; then 49 cd $1 # move to indicated directory. 50 #else # stay in current directory 51 fi 52 53 echo "Initial directory = `pwd`" 54 numdirs=0 55 56 search 0 57 echo "Total directories = $numdirs" 58 59 exit 0 |
Patsie's version of a directory tree script.
Example A-18. tree2: Alternate directory tree script
1 #!/bin/bash 2 # tree2.sh 3 4 # Lightly modified/reformatted by ABS Guide author. 5 # Included in ABS Guide with permission of script author (thanks!). 6 7 ## Recursive file/dirsize checking script, by Patsie 8 ## 9 ## This script builds a list of files/directories and their size (du -akx) 10 ## and processes this list to a human readable tree shape 11 ## The 'du -akx' is only as good as the permissions the owner has. 12 ## So preferably run as root* to get the best results, or use only on 13 ## directories for which you have read permissions. Anything you can't 14 ## read is not in the list. 15 16 #* ABS Guide author advises caution when running scripts as root! 17 18 19 ########## THIS IS CONFIGURABLE ########## 20 21 TOP=5 # Top 5 biggest (sub)directories. 22 MAXRECURS=5 # Max 5 subdirectories/recursions deep. 23 E_BL=80 # Blank line already returned. 24 E_DIR=81 # Directory not specified. 25 26 27 ########## DON'T CHANGE ANYTHING BELOW THIS LINE ########## 28 29 PID=$$ # Our own process ID. 30 SELF=`basename $0` # Our own program name. 31 TMP="/tmp/${SELF}.${PID}.tmp" # Temporary 'du' result. 32 33 # Convert number to dotted thousand. 34 function dot { echo " $*" | 35 sed -e :a -e 's/\(.*[0-9]\)\([0-9]\{3\}\)/\1,\2/;ta' | 36 tail -c 12; } 37 38 # Usage: tree <recursion> <indent prefix> <min size> <directory> 39 function tree { 40 recurs="$1" # How deep nested are we? 41 prefix="$2" # What do we display before file/dirname? 42 minsize="$3" # What is the minumum file/dirsize? 43 dirname="$4" # Which directory are we checking? 44 45 # Get ($TOP) biggest subdirs/subfiles from TMP file. 46 LIST=`egrep "[[:space:]]${dirname}/[^/]*$" "$TMP" | 47 awk '{if($1>'$minsize') print;}' | sort -nr | head -$TOP` 48 [ -z "$LIST" ] && return # Empty list, then go back. 49 50 cnt=0 51 num=`echo "$LIST" | wc -l` # How many entries in the list. 52 53 ## Main loop 54 echo "$LIST" | while read size name; do 55 ((cnt+=1)) # Count entry number. 56 bname=`basename "$name"` # We only need a basename of the entry. 57 [ -d "$name" ] && bname="$bname/" 58 # If it's a directory, append a slash. 59 echo "`dot $size`$prefix +-$bname" 60 # Display the result. 61 # Call ourself recursively if it's a directory 62 #+ and we're not nested too deep ($MAXRECURS). 63 # The recursion goes up: $((recurs+1)) 64 # The prefix gets a space if it's the last entry, 65 #+ or a pipe if there are more entries. 66 # The minimum file/dirsize becomes 67 #+ a tenth of his parent: $((size/10)). 68 # Last argument is the full directory name to check. 69 if [ -d "$name" -a $recurs -lt $MAXRECURS ]; then 70 [ $cnt -lt $num ] \ 71 || (tree $((recurs+1)) "$prefix " $((size/10)) "$name") \ 72 && (tree $((recurs+1)) "$prefix |" $((size/10)) "$name") 73 fi 74 done 75 76 [ $? -eq 0 ] && echo " $prefix" 77 # Every time we jump back add a 'blank' line. 78 return $E_BL 79 # We return 80 to tell we added a blank line already. 80 } 81 82 ### ### 83 ### main program ### 84 ### ### 85 86 rootdir="$@" 87 [ -d "$rootdir" ] || 88 { echo "$SELF: Usage: $SELF <directory>" >&2; exit $E_DIR; } 89 # We should be called with a directory name. 90 91 echo "Building inventory list, please wait ..." 92 # Show "please wait" message. 93 du -akx "$rootdir" 1>"$TMP" 2>/dev/null 94 # Build a temporary list of all files/dirs and their size. 95 size=`tail -1 "$TMP" | awk '{print $1}'` 96 # What is our rootdirectory's size? 97 echo "`dot $size` $rootdir" 98 # Display rootdirectory's entry. 99 tree 0 "" 0 "$rootdir" 100 # Display the tree below our rootdirectory. 101 102 rm "$TMP" 2>/dev/null 103 # Clean up TMP file. 104 105 exit $? |
Noah Friedman permitted use of his string function script. It essentially reproduces some of the C-library string manipulation functions.
Example A-19. string functions: C-style string functions
1 #!/bin/bash 2 3 # string.bash --- bash emulation of string(3) library routines 4 # Author: Noah Friedman <friedman@prep.ai.mit.edu> 5 # ==> Used with his kind permission in this document. 6 # Created: 1992-07-01 7 # Last modified: 1993-09-29 8 # Public domain 9 10 # Conversion to bash v2 syntax done by Chet Ramey 11 12 # Commentary: 13 # Code: 14 15 #:docstring strcat: 16 # Usage: strcat s1 s2 17 # 18 # Strcat appends the value of variable s2 to variable s1. 19 # 20 # Example: 21 # a="foo" 22 # b="bar" 23 # strcat a b 24 # echo $a 25 # => foobar 26 # 27 #:end docstring: 28 29 ###;;;autoload ==> Autoloading of function commented out. 30 function strcat () 31 { 32 local s1_val s2_val 33 34 s1_val=${!1} # indirect variable expansion 35 s2_val=${!2} 36 eval "$1"=\'"${s1_val}${s2_val}"\' 37 # ==> eval $1='${s1_val}${s2_val}' avoids problems, 38 # ==> if one of the variables contains a single quote. 39 } 40 41 #:docstring strncat: 42 # Usage: strncat s1 s2 $n 43 # 44 # Line strcat, but strncat appends a maximum of n characters from the value 45 # of variable s2. It copies fewer if the value of variabl s2 is shorter 46 # than n characters. Echoes result on stdout. 47 # 48 # Example: 49 # a=foo 50 # b=barbaz 51 # strncat a b 3 52 # echo $a 53 # => foobar 54 # 55 #:end docstring: 56 57 ###;;;autoload 58 function strncat () 59 { 60 local s1="$1" 61 local s2="$2" 62 local -i n="$3" 63 local s1_val s2_val 64 65 s1_val=${!s1} # ==> indirect variable expansion 66 s2_val=${!s2} 67 68 if [ ${#s2_val} -gt ${n} ]; then 69 s2_val=${s2_val:0:$n} # ==> substring extraction 70 fi 71 72 eval "$s1"=\'"${s1_val}${s2_val}"\' 73 # ==> eval $1='${s1_val}${s2_val}' avoids problems, 74 # ==> if one of the variables contains a single quote. 75 } 76 77 #:docstring strcmp: 78 # Usage: strcmp $s1 $s2 79 # 80 # Strcmp compares its arguments and returns an integer less than, equal to, 81 # or greater than zero, depending on whether string s1 is lexicographically 82 # less than, equal to, or greater than string s2. 83 #:end docstring: 84 85 ###;;;autoload 86 function strcmp () 87 { 88 [ "$1" = "$2" ] && return 0 89 90 [ "${1}" '<' "${2}" ] > /dev/null && return -1 91 92 return 1 93 } 94 95 #:docstring strncmp: 96 # Usage: strncmp $s1 $s2 $n 97 # 98 # Like strcmp, but makes the comparison by examining a maximum of n 99 # characters (n less than or equal to zero yields equality). 100 #:end docstring: 101 102 ###;;;autoload 103 function strncmp () 104 { 105 if [ -z "${3}" -o "${3}" -le "0" ]; then 106 return 0 107 fi 108 109 if [ ${3} -ge ${#1} -a ${3} -ge ${#2} ]; then 110 strcmp "$1" "$2" 111 return $? 112 else 113 s1=${1:0:$3} 114 s2=${2:0:$3} 115 strcmp $s1 $s2 116 return $? 117 fi 118 } 119 120 #:docstring strlen: 121 # Usage: strlen s 122 # 123 # Strlen returns the number of characters in string literal s. 124 #:end docstring: 125 126 ###;;;autoload 127 function strlen () 128 { 129 eval echo "\${#${1}}" 130 # ==> Returns the length of the value of the variable 131 # ==> whose name is passed as an argument. 132 } 133 134 #:docstring strspn: 135 # Usage: strspn $s1 $s2 136 # 137 # Strspn returns the length of the maximum initial segment of string s1, 138 # which consists entirely of characters from string s2. 139 #:end docstring: 140 141 ###;;;autoload 142 function strspn () 143 { 144 # Unsetting IFS allows whitespace to be handled as normal chars. 145 local IFS= 146 local result="${1%%[!${2}]*}" 147 148 echo ${#result} 149 } 150 151 #:docstring strcspn: 152 # Usage: strcspn $s1 $s2 153 # 154 # Strcspn returns the length of the maximum initial segment of string s1, 155 # which consists entirely of characters not from string s2. 156 #:end docstring: 157 158 ###;;;autoload 159 function strcspn () 160 { 161 # Unsetting IFS allows whitspace to be handled as normal chars. 162 local IFS= 163 local result="${1%%[${2}]*}" 164 165 echo ${#result} 166 } 167 168 #:docstring strstr: 169 # Usage: strstr s1 s2 170 # 171 # Strstr echoes a substring starting at the first occurrence of string s2 in 172 # string s1, or nothing if s2 does not occur in the string. If s2 points to 173 # a string of zero length, strstr echoes s1. 174 #:end docstring: 175 176 ###;;;autoload 177 function strstr () 178 { 179 # if s2 points to a string of zero length, strstr echoes s1 180 [ ${#2} -eq 0 ] && { echo "$1" ; return 0; } 181 182 # strstr echoes nothing if s2 does not occur in s1 183 case "$1" in 184 *$2*) ;; 185 *) return 1;; 186 esac 187 188 # use the pattern matching code to strip off the match and everything 189 # following it 190 first=${1/$2*/} 191 192 # then strip off the first unmatched portion of the string 193 echo "${1##$first}" 194 } 195 196 #:docstring strtok: 197 # Usage: strtok s1 s2 198 # 199 # Strtok considers the string s1 to consist of a sequence of zero or more 200 # text tokens separated by spans of one or more characters from the 201 # separator string s2. The first call (with a non-empty string s1 202 # specified) echoes a string consisting of the first token on stdout. The 203 # function keeps track of its position in the string s1 between separate 204 # calls, so that subsequent calls made with the first argument an empty 205 # string will work through the string immediately following that token. In 206 # this way subsequent calls will work through the string s1 until no tokens 207 # remain. The separator string s2 may be different from call to call. 208 # When no token remains in s1, an empty value is echoed on stdout. 209 #:end docstring: 210 211 ###;;;autoload 212 function strtok () 213 { 214 : 215 } 216 217 #:docstring strtrunc: 218 # Usage: strtrunc $n $s1 {$s2} {$...} 219 # 220 # Used by many functions like strncmp to truncate arguments for comparison. 221 # Echoes the first n characters of each string s1 s2 ... on stdout. 222 #:end docstring: 223 224 ###;;;autoload 225 function strtrunc () 226 { 227 n=$1 ; shift 228 for z; do 229 echo "${z:0:$n}" 230 done 231 } 232 233 # provide string 234 235 # string.bash ends here 236 237 238 # ========================================================================== # 239 # ==> Everything below here added by the document author. 240 241 # ==> Suggested use of this script is to delete everything below here, 242 # ==> and "source" this file into your own scripts. 243 244 # strcat 245 string0=one 246 string1=two 247 echo 248 echo "Testing \"strcat\" function:" 249 echo "Original \"string0\" = $string0" 250 echo "\"string1\" = $string1" 251 strcat string0 string1 252 echo "New \"string0\" = $string0" 253 echo 254 255 # strlen 256 echo 257 echo "Testing \"strlen\" function:" 258 str=123456789 259 echo "\"str\" = $str" 260 echo -n "Length of \"str\" = " 261 strlen str 262 echo 263 264 265 266 # Exercise: 267 # -------- 268 # Add code to test all the other string functions above. 269 270 271 exit 0 |
Michael Zick's complex array example uses the md5sum check sum command to encode directory information.
Example A-20. Directory information
1 #! /bin/bash 2 # directory-info.sh 3 # Parses and lists directory information. 4 5 # NOTE: Change lines 273 and 353 per "README" file. 6 7 # Michael Zick is the author of this script. 8 # Used here with his permission. 9 10 # Controls 11 # If overridden by command arguments, they must be in the order: 12 # Arg1: "Descriptor Directory" 13 # Arg2: "Exclude Paths" 14 # Arg3: "Exclude Directories" 15 # 16 # Environment Settings override Defaults. 17 # Command arguments override Environment Settings. 18 19 # Default location for content addressed file descriptors. 20 MD5UCFS=${1:-${MD5UCFS:-'/tmpfs/ucfs'}} 21 22 # Directory paths never to list or enter 23 declare -a \ 24 EXCLUDE_PATHS=${2:-${EXCLUDE_PATHS:-'(/proc /dev /devfs /tmpfs)'}} 25 26 # Directories never to list or enter 27 declare -a \ 28 EXCLUDE_DIRS=${3:-${EXCLUDE_DIRS:-'(ucfs lost+found tmp wtmp)'}} 29 30 # Files never to list or enter 31 declare -a \ 32 EXCLUDE_FILES=${3:-${EXCLUDE_FILES:-'(core "Name with Spaces")'}} 33 34 35 # Here document used as a comment block. 36 : <<LSfieldsDoc 37 # # # # # List Filesystem Directory Information # # # # # 38 # 39 # ListDirectory "FileGlob" "Field-Array-Name" 40 # or 41 # ListDirectory -of "FileGlob" "Field-Array-Filename" 42 # '-of' meaning 'output to filename' 43 # # # # # 44 45 String format description based on: ls (GNU fileutils) version 4.0.36 46 47 Produces a line (or more) formatted: 48 inode permissions hard-links owner group ... 49 32736 -rw------- 1 mszick mszick 50 51 size day month date hh:mm:ss year path 52 2756608 Sun Apr 20 08:53:06 2003 /home/mszick/core 53 54 Unless it is formatted: 55 inode permissions hard-links owner group ... 56 266705 crw-rw---- 1 root uucp 57 58 major minor day month date hh:mm:ss year path 59 4, 68 Sun Apr 20 09:27:33 2003 /dev/ttyS4 60 NOTE: that pesky comma after the major number 61 62 NOTE: the 'path' may be multiple fields: 63 /home/mszick/core 64 /proc/982/fd/0 -> /dev/null 65 /proc/982/fd/1 -> /home/mszick/.xsession-errors 66 /proc/982/fd/13 -> /tmp/tmpfZVVOCs (deleted) 67 /proc/982/fd/7 -> /tmp/kde-mszick/ksycoca 68 /proc/982/fd/8 -> socket:[11586] 69 /proc/982/fd/9 -> pipe:[11588] 70 71 If that isn't enough to keep your parser guessing, 72 either or both of the path components may be relative: 73 ../Built-Shared -> Built-Static 74 ../linux-2.4.20.tar.bz2 -> ../../../SRCS/linux-2.4.20.tar.bz2 75 76 The first character of the 11 (10?) character permissions field: 77 's' Socket 78 'd' Directory 79 'b' Block device 80 'c' Character device 81 'l' Symbolic link 82 NOTE: Hard links not marked - test for identical inode numbers 83 on identical filesystems. 84 All information about hard linked files are shared, except 85 for the names and the name's location in the directory system. 86 NOTE: A "Hard link" is known as a "File Alias" on some systems. 87 '-' An undistingushed file 88 89 Followed by three groups of letters for: User, Group, Others 90 Character 1: '-' Not readable; 'r' Readable 91 Character 2: '-' Not writable; 'w' Writable 92 Character 3, User and Group: Combined execute and special 93 '-' Not Executable, Not Special 94 'x' Executable, Not Special 95 's' Executable, Special 96 'S' Not Executable, Special 97 Character 3, Others: Combined execute and sticky (tacky?) 98 '-' Not Executable, Not Tacky 99 'x' Executable, Not Tacky 100 't' Executable, Tacky 101 'T' Not Executable, Tacky 102 103 Followed by an access indicator 104 Haven't tested this one, it may be the eleventh character 105 or it may generate another field 106 ' ' No alternate access 107 '+' Alternate access 108 LSfieldsDoc 109 110 111 ListDirectory() 112 { 113 local -a T 114 local -i of=0 # Default return in variable 115 # OLD_IFS=$IFS # Using BASH default ' \t\n' 116 117 case "$#" in 118 3) case "$1" in 119 -of) of=1 ; shift ;; 120 * ) return 1 ;; 121 esac ;; 122 2) : ;; # Poor man's "continue" 123 *) return 1 ;; 124 esac 125 126 # NOTE: the (ls) command is NOT quoted (") 127 T=( $(ls --inode --ignore-backups --almost-all --directory \ 128 --full-time --color=none --time=status --sort=none \ 129 --format=long $1) ) 130 131 case $of in 132 # Assign T back to the array whose name was passed as $2 133 0) eval $2=\( \"\$\{T\[@\]\}\" \) ;; 134 # Write T into filename passed as $2 135 1) echo "${T[@]}" > "$2" ;; 136 esac 137 return 0 138 } 139 140 # # # # # Is that string a legal number? # # # # # 141 # 142 # IsNumber "Var" 143 # # # # # There has to be a better way, sigh... 144 145 IsNumber() 146 { 147 local -i int 148 if [ $# -eq 0 ] 149 then 150 return 1 151 else 152 (let int=$1) 2>/dev/null 153 return $? # Exit status of the let thread 154 fi 155 } 156 157 # # # # # Index Filesystem Directory Information # # # # # 158 # 159 # IndexList "Field-Array-Name" "Index-Array-Name" 160 # or 161 # IndexList -if Field-Array-Filename Index-Array-Name 162 # IndexList -of Field-Array-Name Index-Array-Filename 163 # IndexList -if -of Field-Array-Filename Index-Array-Filename 164 # # # # # 165 166 : <<IndexListDoc 167 Walk an array of directory fields produced by ListDirectory 168 169 Having suppressed the line breaks in an otherwise line oriented 170 report, build an index to the array element which starts each line. 171 172 Each line gets two index entries, the first element of each line 173 (inode) and the element that holds the pathname of the file. 174 175 The first index entry pair (Line-Number==0) are informational: 176 Index-Array-Name[0] : Number of "Lines" indexed 177 Index-Array-Name[1] : "Current Line" pointer into Index-Array-Name 178 179 The following index pairs (if any) hold element indexes into 180 the Field-Array-Name per: 181 Index-Array-Name[Line-Number * 2] : The "inode" field element. 182 NOTE: This distance may be either +11 or +12 elements. 183 Index-Array-Name[(Line-Number * 2) + 1] : The "pathname" element. 184 NOTE: This distance may be a variable number of elements. 185 Next line index pair for Line-Number+1. 186 IndexListDoc 187 188 189 190 IndexList() 191 { 192 local -a LIST # Local of listname passed 193 local -a -i INDEX=( 0 0 ) # Local of index to return 194 local -i Lidx Lcnt 195 local -i if=0 of=0 # Default to variable names 196 197 case "$#" in # Simplistic option testing 198 0) return 1 ;; 199 1) return 1 ;; 200 2) : ;; # Poor man's continue 201 3) case "$1" in 202 -if) if=1 ;; 203 -of) of=1 ;; 204 * ) return 1 ;; 205 esac ; shift ;; 206 4) if=1 ; of=1 ; shift ; shift ;; 207 *) return 1 208 esac 209 210 # Make local copy of list 211 case "$if" in 212 0) eval LIST=\( \"\$\{$1\[@\]\}\" \) ;; 213 1) LIST=( $(cat $1) ) ;; 214 esac 215 216 # Grok (grope?) the array 217 Lcnt=${#LIST[@]} 218 Lidx=0 219 until (( Lidx >= Lcnt )) 220 do 221 if IsNumber ${LIST[$Lidx]} 222 then 223 local -i inode name 224 local ft 225 inode=Lidx 226 local m=${LIST[$Lidx+2]} # Hard Links field 227 ft=${LIST[$Lidx+1]:0:1} # Fast-Stat 228 case $ft in 229 b) ((Lidx+=12)) ;; # Block device 230 c) ((Lidx+=12)) ;; # Character device 231 *) ((Lidx+=11)) ;; # Anything else 232 esac 233 name=Lidx 234 case $ft in 235 -) ((Lidx+=1)) ;; # The easy one 236 b) ((Lidx+=1)) ;; # Block device 237 c) ((Lidx+=1)) ;; # Character device 238 d) ((Lidx+=1)) ;; # The other easy one 239 l) ((Lidx+=3)) ;; # At LEAST two more fields 240 # A little more elegance here would handle pipes, 241 #+ sockets, deleted files - later. 242 *) until IsNumber ${LIST[$Lidx]} || ((Lidx >= Lcnt)) 243 do 244 ((Lidx+=1)) 245 done 246 ;; # Not required 247 esac 248 INDEX[${#INDEX[*]}]=$inode 249 INDEX[${#INDEX[*]}]=$name 250 INDEX[0]=${INDEX[0]}+1 # One more "line" found 251 # echo "Line: ${INDEX[0]} Type: $ft Links: $m Inode: \ 252 # ${LIST[$inode]} Name: ${LIST[$name]}" 253 254 else 255 ((Lidx+=1)) 256 fi 257 done 258 case "$of" in 259 0) eval $2=\( \"\$\{INDEX\[@\]\}\" \) ;; 260 1) echo "${INDEX[@]}" > "$2" ;; 261 esac 262 return 0 # What could go wrong? 263 } 264 265 # # # # # Content Identify File # # # # # 266 # 267 # DigestFile Input-Array-Name Digest-Array-Name 268 # or 269 # DigestFile -if Input-FileName Digest-Array-Name 270 # # # # # 271 272 # Here document used as a comment block. 273 : <<DigestFilesDoc 274 275 The key (no pun intended) to a Unified Content File System (UCFS) 276 is to distinguish the files in the system based on their content. 277 Distinguishing files by their name is just, so, 20th Century. 278 279 The content is distinguished by computing a checksum of that content. 280 This version uses the md5sum program to generate a 128 bit checksum 281 representative of the file's contents. 282 There is a chance that two files having different content might 283 generate the same checksum using md5sum (or any checksum). Should 284 that become a problem, then the use of md5sum can be replace by a 285 cyrptographic signature. But until then... 286 287 The md5sum program is documented as outputting three fields (and it 288 does), but when read it appears as two fields (array elements). This 289 is caused by the lack of whitespace between the second and third field. 290 So this function gropes the md5sum output and returns: 291 [0] 32 character checksum in hexidecimal (UCFS filename) 292 [1] Single character: ' ' text file, '*' binary file 293 [2] Filesystem (20th Century Style) name 294 Note: That name may be the character '-' indicating STDIN read. 295 296 DigestFilesDoc 297 298 299 300 DigestFile() 301 { 302 local if=0 # Default, variable name 303 local -a T1 T2 304 305 case "$#" in 306 3) case "$1" in 307 -if) if=1 ; shift ;; 308 * ) return 1 ;; 309 esac ;; 310 2) : ;; # Poor man's "continue" 311 *) return 1 ;; 312 esac 313 314 case $if in 315 0) eval T1=\( \"\$\{$1\[@\]\}\" \) 316 T2=( $(echo ${T1[@]} | md5sum -) ) 317 ;; 318 1) T2=( $(md5sum $1) ) 319 ;; 320 esac 321 322 case ${#T2[@]} in 323 0) return 1 ;; 324 1) return 1 ;; 325 2) case ${T2[1]:0:1} in # SanScrit-2.0.5 326 \*) T2[${#T2[@]}]=${T2[1]:1} 327 T2[1]=\* 328 ;; 329 *) T2[${#T2[@]}]=${T2[1]} 330 T2[1]=" " 331 ;; 332 esac 333 ;; 334 3) : ;; # Assume it worked 335 *) return 1 ;; 336 esac 337 338 local -i len=${#T2[0]} 339 if [ $len -ne 32 ] ; then return 1 ; fi 340 eval $2=\( \"\$\{T2\[@\]\}\" \) 341 } 342 343 # # # # # Locate File # # # # # 344 # 345 # LocateFile [-l] FileName Location-Array-Name 346 # or 347 # LocateFile [-l] -of FileName Location-Array-FileName 348 # # # # # 349 350 # A file location is Filesystem-id and inode-number 351 352 # Here document used as a comment block. 353 : <<StatFieldsDoc 354 Based on stat, version 2.2 355 stat -t and stat -lt fields 356 [0] name 357 [1] Total size 358 File - number of bytes 359 Symbolic link - string length of pathname 360 [2] Number of (512 byte) blocks allocated 361 [3] File type and Access rights (hex) 362 [4] User ID of owner 363 [5] Group ID of owner 364 [6] Device number 365 [7] Inode number 366 [8] Number of hard links 367 [9] Device type (if inode device) Major 368 [10] Device type (if inode device) Minor 369 [11] Time of last access 370 May be disabled in 'mount' with noatime 371 atime of files changed by exec, read, pipe, utime, mknod (mmap?) 372 atime of directories changed by addition/deletion of files 373 [12] Time of last modification 374 mtime of files changed by write, truncate, utime, mknod 375 mtime of directories changed by addtition/deletion of files 376 [13] Time of last change 377 ctime reflects time of changed inode information (owner, group 378 permissions, link count 379 -*-*- Per: 380 Return code: 0 381 Size of array: 14 382 Contents of array 383 Element 0: /home/mszick 384 Element 1: 4096 385 Element 2: 8 386 Element 3: 41e8 387 Element 4: 500 388 Element 5: 500 389 Element 6: 303 390 Element 7: 32385 391 Element 8: 22 392 Element 9: 0 393 Element 10: 0 394 Element 11: 1051221030 395 Element 12: 1051214068 396 Element 13: 1051214068 397 398 For a link in the form of linkname -> realname 399 stat -t linkname returns the linkname (link) information 400 stat -lt linkname returns the realname information 401 402 stat -tf and stat -ltf fields 403 [0] name 404 [1] ID-0? # Maybe someday, but Linux stat structure 405 [2] ID-0? # does not have either LABEL nor UUID 406 # fields, currently information must come 407 # from file-system specific utilities 408 These will be munged into: 409 [1] UUID if possible 410 [2] Volume Label if possible 411 Note: 'mount -l' does return the label and could return the UUID 412 413 [3] Maximum length of filenames 414 [4] Filesystem type 415 [5] Total blocks in the filesystem 416 [6] Free blocks 417 [7] Free blocks for non-root user(s) 418 [8] Block size of the filesystem 419 [9] Total inodes 420 [10] Free inodes 421 422 -*-*- Per: 423 Return code: 0 424 Size of array: 11 425 Contents of array 426 Element 0: /home/mszick 427 Element 1: 0 428 Element 2: 0 429 Element 3: 255 430 Element 4: ef53 431 Element 5: 2581445 432 Element 6: 2277180 433 Element 7: 2146050 434 Element 8: 4096 435 Element 9: 1311552 436 Element 10: 1276425 437 438 StatFieldsDoc 439 440 441 # LocateFile [-l] FileName Location-Array-Name 442 # LocateFile [-l] -of FileName Location-Array-FileName 443 444 LocateFile() 445 { 446 local -a LOC LOC1 LOC2 447 local lk="" of=0 448 449 case "$#" in 450 0) return 1 ;; 451 1) return 1 ;; 452 2) : ;; 453 *) while (( "$#" > 2 )) 454 do 455 case "$1" in 456 -l) lk=-1 ;; 457 -of) of=1 ;; 458 *) return 1 ;; 459 esac 460 shift 461 done ;; 462 esac 463 464 # More Sanscrit-2.0.5 465 # LOC1=( $(stat -t $lk $1) ) 466 # LOC2=( $(stat -tf $lk $1) ) 467 # Uncomment above two lines if system has "stat" command installed. 468 LOC=( ${LOC1[@]:0:1} ${LOC1[@]:3:11} 469 ${LOC2[@]:1:2} ${LOC2[@]:4:1} ) 470 471 case "$of" in 472 0) eval $2=\( \"\$\{LOC\[@\]\}\" \) ;; 473 1) echo "${LOC[@]}" > "$2" ;; 474 esac 475 return 0 476 # Which yields (if you are lucky, and have "stat" installed) 477 # -*-*- Location Discriptor -*-*- 478 # Return code: 0 479 # Size of array: 15 480 # Contents of array 481 # Element 0: /home/mszick 20th Century name 482 # Element 1: 41e8 Type and Permissions 483 # Element 2: 500 User 484 # Element 3: 500 Group 485 # Element 4: 303 Device 486 # Element 5: 32385 inode 487 # Element 6: 22 Link count 488 # Element 7: 0 Device Major 489 # Element 8: 0 Device Minor 490 # Element 9: 1051224608 Last Access 491 # Element 10: 1051214068 Last Modify 492 # Element 11: 1051214068 Last Status 493 # Element 12: 0 UUID (to be) 494 # Element 13: 0 Volume Label (to be) 495 # Element 14: ef53 Filesystem type 496 } 497 498 499 500 # And then there was some test code 501 502 ListArray() # ListArray Name 503 { 504 local -a Ta 505 506 eval Ta=\( \"\$\{$1\[@\]\}\" \) 507 echo 508 echo "-*-*- List of Array -*-*-" 509 echo "Size of array $1: ${#Ta[*]}" 510 echo "Contents of array $1:" 511 for (( i=0 ; i<${#Ta[*]} ; i++ )) 512 do 513 echo -e "\tElement $i: ${Ta[$i]}" 514 done 515 return 0 516 } 517 518 declare -a CUR_DIR 519 # For small arrays 520 ListDirectory "${PWD}" CUR_DIR 521 ListArray CUR_DIR 522 523 declare -a DIR_DIG 524 DigestFile CUR_DIR DIR_DIG 525 echo "The new \"name\" (checksum) for ${CUR_DIR[9]} is ${DIR_DIG[0]}" 526 527 declare -a DIR_ENT 528 # BIG_DIR # For really big arrays - use a temporary file in ramdisk 529 # BIG-DIR # ListDirectory -of "${CUR_DIR[11]}/*" "/tmpfs/junk2" 530 ListDirectory "${CUR_DIR[11]}/*" DIR_ENT 531 532 declare -a DIR_IDX 533 # BIG-DIR # IndexList -if "/tmpfs/junk2" DIR_IDX 534 IndexList DIR_ENT DIR_IDX 535 536 declare -a IDX_DIG 537 # BIG-DIR # DIR_ENT=( $(cat /tmpfs/junk2) ) 538 # BIG-DIR # DigestFile -if /tmpfs/junk2 IDX_DIG 539 DigestFile DIR_ENT IDX_DIG 540 # Small (should) be able to parallize IndexList & DigestFile 541 # Large (should) be able to parallize IndexList & DigestFile & the assignment 542 echo "The \"name\" (checksum) for the contents of ${PWD} is ${IDX_DIG[0]}" 543 544 declare -a FILE_LOC 545 LocateFile ${PWD} FILE_LOC 546 ListArray FILE_LOC 547 548 exit 0 |
Stéphane Chazelas demonstrates object-oriented programming in a Bash script.
Example A-21. Object-oriented database
1 #!/bin/bash 2 # obj-oriented.sh: Object-oriented programming in a shell script. 3 # Script by Stephane Chazelas. 4 5 # Important Note: 6 # --------- ---- 7 # If running this script under version 3 or later of Bash, 8 #+ replace all periods in function names with a "legal" character, 9 #+ for example, an underscore. 10 11 12 person.new() # Looks almost like a class declaration in C++. 13 { 14 local obj_name=$1 name=$2 firstname=$3 birthdate=$4 15 16 eval "$obj_name.set_name() { 17 eval \"$obj_name.get_name() { 18 echo \$1 19 }\" 20 }" 21 22 eval "$obj_name.set_firstname() { 23 eval \"$obj_name.get_firstname() { 24 echo \$1 25 }\" 26 }" 27 28 eval "$obj_name.set_birthdate() { 29 eval \"$obj_name.get_birthdate() { 30 echo \$1 31 }\" 32 eval \"$obj_name.show_birthdate() { 33 echo \$(date -d \"1/1/1970 0:0:\$1 GMT\") 34 }\" 35 eval \"$obj_name.get_age() { 36 echo \$(( (\$(date +%s) - \$1) / 3600 / 24 / 365 )) 37 }\" 38 }" 39 40 $obj_name.set_name $name 41 $obj_name.set_firstname $firstname 42 $obj_name.set_birthdate $birthdate 43 } 44 45 echo 46 47 person.new self Bozeman Bozo 101272413 48 # Create an instance of "person.new" (actually passing args to the function). 49 50 self.get_firstname # Bozo 51 self.get_name # Bozeman 52 self.get_age # 28 53 self.get_birthdate # 101272413 54 self.show_birthdate # Sat Mar 17 20:13:33 MST 1973 55 56 echo 57 58 # typeset -f 59 #+ to see the created functions (careful, it scrolls off the page). 60 61 exit 0 |
Mariusz Gniazdowski contributed a hash library for use in scripts.
Example A-22. Library of hash functions
1 # Hash: 2 # Hash function library 3 # Author: Mariusz Gniazdowski <mgniazd-at-gmail.com> 4 # Date: 2005-04-07 5 6 # Functions making emulating hashes in Bash a little less painful. 7 8 9 # Limitations: 10 # * Only global variables are supported. 11 # * Each hash instance generates one global variable per value. 12 # * Variable names collisions are possible 13 #+ if you define variable like __hash__hashname_key 14 # * Keys must use chars that can be part of a Bash variable name 15 #+ (no dashes, periods, etc.). 16 # * The hash is created as a variable: 17 # ... hashname_keyname 18 # So if somone will create hashes like: 19 # myhash_ + mykey = myhash__mykey 20 # myhash + _mykey = myhash__mykey 21 # Then there will be a collision. 22 # (This should not pose a major problem.) 23 24 25 Hash_config_varname_prefix=__hash__ 26 27 28 # Emulates: hash[key]=value 29 # 30 # Params: 31 # 1 - hash 32 # 2 - key 33 # 3 - value 34 function hash_set { 35 eval "${Hash_config_varname_prefix}${1}_${2}=\"${3}\"" 36 } 37 38 39 # Emulates: value=hash[key] 40 # 41 # Params: 42 # 1 - hash 43 # 2 - key 44 # 3 - value (name of global variable to set) 45 function hash_get_into { 46 eval "$3=\"\$${Hash_config_varname_prefix}${1}_${2}\"" 47 } 48 49 50 # Emulates: echo hash[key] 51 # 52 # Params: 53 # 1 - hash 54 # 2 - key 55 # 3 - echo params (like -n, for example) 56 function hash_echo { 57 eval "echo $3 \"\$${Hash_config_varname_prefix}${1}_${2}\"" 58 } 59 60 61 # Emulates: hash1[key1]=hash2[key2] 62 # 63 # Params: 64 # 1 - hash1 65 # 2 - key1 66 # 3 - hash2 67 # 4 - key2 68 function hash_copy { 69 eval "${Hash_config_varname_prefix}${1}_${2}\ 70 =\"\$${Hash_config_varname_prefix}${3}_${4}\"" 71 } 72 73 74 # Emulates: hash[keyN-1]=hash[key2]=...hash[key1] 75 # 76 # Copies first key to rest of keys. 77 # 78 # Params: 79 # 1 - hash1 80 # 2 - key1 81 # 3 - key2 82 # . . . 83 # N - keyN 84 function hash_dup { 85 local hashName="$1" keyName="$2" 86 shift 2 87 until [ ${#} -le 0 ]; do 88 eval "${Hash_config_varname_prefix}${hashName}_${1}\ 89 =\"\$${Hash_config_varname_prefix}${hashName}_${keyName}\"" 90 shift; 91 done; 92 } 93 94 95 # Emulates: unset hash[key] 96 # 97 # Params: 98 # 1 - hash 99 # 2 - key 100 function hash_unset { 101 eval "unset ${Hash_config_varname_prefix}${1}_${2}" 102 } 103 104 105 # Emulates something similar to: ref=&hash[key] 106 # 107 # The reference is name of the variable in which value is held. 108 # 109 # Params: 110 # 1 - hash 111 # 2 - key 112 # 3 - ref - Name of global variable to set. 113 function hash_get_ref_into { 114 eval "$3=\"${Hash_config_varname_prefix}${1}_${2}\"" 115 } 116 117 118 # Emulates something similar to: echo &hash[key] 119 # 120 # That reference is name of variable in which value is held. 121 # 122 # Params: 123 # 1 - hash 124 # 2 - key 125 # 3 - echo params (like -n for example) 126 function hash_echo_ref { 127 eval "echo $3 \"${Hash_config_varname_prefix}${1}_${2}\"" 128 } 129 130 131 132 # Emulates something similar to: $$hash[key](param1, param2, ...) 133 # 134 # Params: 135 # 1 - hash 136 # 2 - key 137 # 3,4, ... - Function parameters 138 function hash_call { 139 local hash key 140 hash=$1 141 key=$2 142 shift 2 143 eval "eval \"\$${Hash_config_varname_prefix}${hash}_${key} \\\"\\\$@\\\"\"" 144 } 145 146 147 # Emulates something similar to: isset(hash[key]) or hash[key]==NULL 148 # 149 # Params: 150 # 1 - hash 151 # 2 - key 152 # Returns: 153 # 0 - there is such key 154 # 1 - there is no such key 155 function hash_is_set { 156 eval "if [[ \"\${${Hash_config_varname_prefix}${1}_${2}-a}\" = \"a\" && 157 \"\${${Hash_config_varname_prefix}${1}_${2}-b}\" = \"b\" ]] 158 then return 1; else return 0; fi" 159 } 160 161 162 # Emulates something similar to: 163 # foreach($hash as $key => $value) { fun($key,$value); } 164 # 165 # It is possible to write different variations of this function. 166 # Here we use a function call to make it as "generic" as possible. 167 # 168 # Params: 169 # 1 - hash 170 # 2 - function name 171 function hash_foreach { 172 local keyname oldIFS="$IFS" 173 IFS=' ' 174 for i in $(eval "echo \${!${Hash_config_varname_prefix}${1}_*}"); do 175 keyname=$(eval "echo \${i##${Hash_config_varname_prefix}${1}_}") 176 eval "$2 $keyname \"\$$i\"" 177 done 178 IFS="$oldIFS" 179 } 180 181 # NOTE: In lines 103 and 116, ampersand changed. 182 # But, it doesn't matter, because these are comment lines anyhow. |
Here is an example script using the foregoing hash library.
Example A-23. Colorizing text using hash functions
1 #!/bin/bash 2 # hash-example.sh: Colorizing text. 3 # Author: Mariusz Gniazdowski <mgniazd-at-gmail.com> 4 5 . Hash.lib # Load the library of functions. 6 7 hash_set colors red "\033[0;31m" 8 hash_set colors blue "\033[0;34m" 9 hash_set colors light_blue "\033[1;34m" 10 hash_set colors light_red "\033[1;31m" 11 hash_set colors cyan "\033[0;36m" 12 hash_set colors light_green "\033[1;32m" 13 hash_set colors light_gray "\033[0;37m" 14 hash_set colors green "\033[0;32m" 15 hash_set colors yellow "\033[1;33m" 16 hash_set colors light_purple "\033[1;35m" 17 hash_set colors purple "\033[0;35m" 18 hash_set colors reset_color "\033[0;00m" 19 20 21 # $1 - keyname 22 # $2 - value 23 try_colors() { 24 echo -en "$2" 25 echo "This line is $1." 26 } 27 hash_foreach colors try_colors 28 hash_echo colors reset_color -en 29 30 echo -e '\nLet us overwrite some colors with yellow.\n' 31 # It's hard to read yellow text on some terminals. 32 hash_dup colors yellow red light_green blue green light_gray cyan 33 hash_foreach colors try_colors 34 hash_echo colors reset_color -en 35 36 echo -e '\nLet us delete them and try colors once more . . .\n' 37 38 for i in red light_green blue green light_gray cyan; do 39 hash_unset colors $i 40 done 41 hash_foreach colors try_colors 42 hash_echo colors reset_color -en 43 44 hash_set other txt "Other examples . . ." 45 hash_echo other txt 46 hash_get_into other txt text 47 echo $text 48 49 hash_set other my_fun try_colors 50 hash_call other my_fun purple "`hash_echo colors purple`" 51 hash_echo colors reset_color -en 52 53 echo; echo "Back to normal?"; echo 54 55 exit $? 56 57 # On some terminals, the "light" colors print in bold, 58 # and end up looking darker than the normal ones. 59 # Why is this? 60 |
An example illustrating the mechanics of hashing, but from a different point of view.
Example A-24. More on hash functions
1 #!/bin/bash 2 # $Id: ha.sh,v 1.2 2005/04/21 23:24:26 oliver Exp $ 3 # Copyright 2005 Oliver Beckstein 4 # Released under the GNU Public License 5 # Author of script granted permission for inclusion in ABS Guide. 6 # (Thank you!) 7 8 #---------------------------------------------------------------- 9 # pseudo hash based on indirect parameter expansion 10 # API: access through functions: 11 # 12 # create the hash: 13 # 14 # newhash Lovers 15 # 16 # add entries (note single quotes for spaces) 17 # 18 # addhash Lovers Tristan Isolde 19 # addhash Lovers 'Romeo Montague' 'Juliet Capulet' 20 # 21 # access value by key 22 # 23 # gethash Lovers Tristan ----> Isolde 24 # 25 # show all keys 26 # 27 # keyshash Lovers ----> 'Tristan' 'Romeo Montague' 28 # 29 # 30 # Convention: instead of perls' foo{bar} = boing' syntax, 31 # use 32 # '_foo_bar=boing' (two underscores, no spaces) 33 # 34 # 1) store key in _NAME_keys[] 35 # 2) store value in _NAME_values[] using the same integer index 36 # The integer index for the last entry is _NAME_ptr 37 # 38 # NOTE: No error or sanity checks, just bare bones. 39 40 41 function _inihash () { 42 # private function 43 # call at the beginning of each procedure 44 # defines: _keys _values _ptr 45 # 46 # Usage: _inihash NAME 47 local name=$1 48 _keys=_${name}_keys 49 _values=_${name}_values 50 _ptr=_${name}_ptr 51 } 52 53 function newhash () { 54 # Usage: newhash NAME 55 # NAME should not contain spaces or dots. 56 # Actually: it must be a legal name for a Bash variable. 57 # We rely on Bash automatically recognising arrays. 58 local name=$1 59 local _keys _values _ptr 60 _inihash ${name} 61 eval ${_ptr}=0 62 } 63 64 65 function addhash () { 66 # Usage: addhash NAME KEY 'VALUE with spaces' 67 # arguments with spaces need to be quoted with single quotes '' 68 local name=$1 k="$2" v="$3" 69 local _keys _values _ptr 70 _inihash ${name} 71 72 #echo "DEBUG(addhash): ${_ptr}=${!_ptr}" 73 74 eval let ${_ptr}=${_ptr}+1 75 eval "$_keys[${!_ptr}]=\"${k}\"" 76 eval "$_values[${!_ptr}]=\"${v}\"" 77 } 78 79 function gethash () { 80 # Usage: gethash NAME KEY 81 # Returns boing 82 # ERR=0 if entry found, 1 otherwise 83 # That's not a proper hash -- 84 #+ we simply linearly search through the keys. 85 local name=$1 key="$2" 86 local _keys _values _ptr 87 local k v i found h 88 _inihash ${name} 89 90 # _ptr holds the highest index in the hash 91 found=0 92 93 for i in $(seq 1 ${!_ptr}); do 94 h="\${${_keys}[${i}]}" # Safer to do it in two steps, 95 eval k=${h} #+ especially when quoting for spaces. 96 if [ "${k}" = "${key}" ]; then found=1; break; fi 97 done; 98 99 [ ${found} = 0 ] && return 1; 100 # else: i is the index that matches the key 101 h="\${${_values}[${i}]}" 102 eval echo "${h}" 103 return 0; 104 } 105 106 function keyshash () { 107 # Usage: keyshash NAME 108 # Returns list of all keys defined for hash name. 109 local name=$1 key="$2" 110 local _keys _values _ptr 111 local k i h 112 _inihash ${name} 113 114 # _ptr holds the highest index in the hash 115 for i in $(seq 1 ${!_ptr}); do 116 h="\${${_keys}[${i}]}" # Safer to do it in two steps, 117 eval k=${h} #+ especially when quoting for spaces. 118 echo -n "'${k}' " 119 done; 120 } 121 122 123 # ----------------------------------------------------------------------- 124 125 # Now, let's test it. 126 # (Per comments at the beginning of the script.) 127 newhash Lovers 128 addhash Lovers Tristan Isolde 129 addhash Lovers 'Romeo Montague' 'Juliet Capulet' 130 131 # Output results. 132 echo 133 gethash Lovers Tristan # Isolde 134 echo 135 keyshash Lovers # 'Tristan' 'Romeo Montague' 136 echo; echo 137 138 139 exit 0 140 141 # Exercise: 142 # -------- 143 144 # Add error checks to the functions. |
Now for a script that installs and mounts those cute USB keychain solid-state "hard drives."
Example A-25. Mounting USB keychain storage devices
1 #!/bin/bash 2 # ==> usb.sh 3 # ==> Script for mounting and installing pen/keychain USB storage devices. 4 # ==> Runs as root at system startup (see below). 5 # ==> 6 # ==> Newer Linux distros (2004 or later) autodetect 7 # ==> and install USB pen drives, and therefore don't need this script. 8 # ==> But, it's still instructive. 9 10 # This code is free software covered by GNU GPL license version 2 or above. 11 # Please refer to http://www.gnu.org/ for the full license text. 12 # 13 # Some code lifted from usb-mount by Michael Hamilton's usb-mount (LGPL) 14 #+ see http://users.actrix.co.nz/michael/usbmount.html 15 # 16 # INSTALL 17 # ------- 18 # Put this in /etc/hotplug/usb/diskonkey. 19 # Then look in /etc/hotplug/usb.distmap, and copy all usb-storage entries 20 #+ into /etc/hotplug/usb.usermap, substituting "usb-storage" for "diskonkey". 21 # Otherwise this code is only run during the kernel module invocation/removal 22 #+ (at least in my tests), which defeats the purpose. 23 # 24 # TODO 25 # ---- 26 # Handle more than one diskonkey device at one time (e.g. /dev/diskonkey1 27 #+ and /mnt/diskonkey1), etc. The biggest problem here is the handling in 28 #+ devlabel, which I haven't yet tried. 29 # 30 # AUTHOR and SUPPORT 31 # ------------------ 32 # Konstantin Riabitsev, <icon linux duke edu>. 33 # Send any problem reports to my email address at the moment. 34 # 35 # ==> Comments added by ABS Guide author. 36 37 38 39 SYMLINKDEV=/dev/diskonkey 40 MOUNTPOINT=/mnt/diskonkey 41 DEVLABEL=/sbin/devlabel 42 DEVLABELCONFIG=/etc/sysconfig/devlabel 43 IAM=$0 44 45 ## 46 # Functions lifted near-verbatim from usb-mount code. 47 # 48 function allAttachedScsiUsb { 49 find /proc/scsi/ -path '/proc/scsi/usb-storage*' -type f | 50 xargs grep -l 'Attached: Yes' 51 } 52 function scsiDevFromScsiUsb { 53 echo $1 | awk -F"[-/]" '{ n=$(NF-1); 54 print "/dev/sd" substr("abcdefghijklmnopqrstuvwxyz", n+1, 1) }' 55 } 56 57 if [ "${ACTION}" = "add" ] && [ -f "${DEVICE}" ]; then 58 ## 59 # lifted from usbcam code. 60 # 61 if [ -f /var/run/console.lock ]; then 62 CONSOLEOWNER=`cat /var/run/console.lock` 63 elif [ -f /var/lock/console.lock ]; then 64 CONSOLEOWNER=`cat /var/lock/console.lock` 65 else 66 CONSOLEOWNER= 67 fi 68 for procEntry in $(allAttachedScsiUsb); do 69 scsiDev=$(scsiDevFromScsiUsb $procEntry) 70 # Some bug with usb-storage? 71 # Partitions are not in /proc/partitions until they are accessed 72 #+ somehow. 73 /sbin/fdisk -l $scsiDev >/dev/null 74 ## 75 # Most devices have partitioning info, so the data would be on 76 #+ /dev/sd?1. However, some stupider ones don't have any partitioning 77 #+ and use the entire device for data storage. This tries to 78 #+ guess semi-intelligently if we have a /dev/sd?1 and if not, then 79 #+ it uses the entire device and hopes for the better. 80 # 81 if grep -q `basename $scsiDev`1 /proc/partitions; then 82 part="$scsiDev""1" 83 else 84 part=$scsiDev 85 fi 86 ## 87 # Change ownership of the partition to the console user so they can 88 #+ mount it. 89 # 90 if [ ! -z "$CONSOLEOWNER" ]; then 91 chown $CONSOLEOWNER:disk $part 92 fi 93 ## 94 # This checks if we already have this UUID defined with devlabel. 95 # If not, it then adds the device to the list. 96 # 97 prodid=`$DEVLABEL printid -d $part` 98 if ! grep -q $prodid $DEVLABELCONFIG; then 99 # cross our fingers and hope it works 100 $DEVLABEL add -d $part -s $SYMLINKDEV 2>/dev/null 101 fi 102 ## 103 # Check if the mount point exists and create if it doesn't. 104 # 105 if [ ! -e $MOUNTPOINT ]; then 106 mkdir -p $MOUNTPOINT 107 fi 108 ## 109 # Take care of /etc/fstab so mounting is easy. 110 # 111 if ! grep -q "^$SYMLINKDEV" /etc/fstab; then 112 # Add an fstab entry 113 echo -e \ 114 "$SYMLINKDEV\t\t$MOUNTPOINT\t\tauto\tnoauto,owner,kudzu 0 0" \ 115 >> /etc/fstab 116 fi 117 done 118 if [ ! -z "$REMOVER" ]; then 119 ## 120 # Make sure this script is triggered on device removal. 121 # 122 mkdir -p `dirname $REMOVER` 123 ln -s $IAM $REMOVER 124 fi 125 elif [ "${ACTION}" = "remove" ]; then 126 ## 127 # If the device is mounted, unmount it cleanly. 128 # 129 if grep -q "$MOUNTPOINT" /etc/mtab; then 130 # unmount cleanly 131 umount -l $MOUNTPOINT 132 fi 133 ## 134 # Remove it from /etc/fstab if it's there. 135 # 136 if grep -q "^$SYMLINKDEV" /etc/fstab; then 137 grep -v "^$SYMLINKDEV" /etc/fstab > /etc/.fstab.new 138 mv -f /etc/.fstab.new /etc/fstab 139 fi 140 fi 141 142 exit 0 |
Converting a text file to HTML format.
Example A-26. Converting to HTML
1 #!/bin/bash 2 # tohtml.sh [v. 0.2, reldate: 06/26/80, still buggy] 3 4 # Convert a text file to HTML format. 5 # Author: Mendel Cooper 6 # License: GPL3 7 # Usage: sh tohtml.sh < textfile > htmlfile 8 # Script can easily be modified to accept source and target filenames. 9 10 # Assumptions: 11 # 1) Paragraphs in (target) text file are separated by a blank line. 12 # 2) Jpeg images (*.jpg) are located in "images" subdirectory. 13 # In the target file, the image names are enclosed in square brackets, 14 # for example, [image01.jpg]. 15 # 3) Emphasized (italic) phrases begin with a space+underscore 16 #+ or the first character on the line is an underscore, 17 #+ and end with an underscore+space or underscore+end-of-line. 18 19 20 # Settings 21 FNTSIZE=2 # Small-medium font size 22 IMGDIR="images" # Image directory 23 # Headers 24 HDR01='<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">' 25 HDR02='<!-- Converted to HTML by ***tohtml.sh*** script -->' 26 HDR03='<!-- script author: M. Leo Cooper <thegrendel@theriver.com> -->' 27 HDR10='<html>' 28 HDR11='<head>' 29 HDR11a='</head>' 30 HDR12a='<title>' 31 HDR12b='</title>' 32 HDR121='<META NAME="GENERATOR" CONTENT="tohtml.sh script">' 33 HDR13='<body bgcolor="#dddddd">' # Change background color to suit. 34 HDR14a='<font size=' 35 HDR14b='>' 36 # Footers 37 FTR10='</body>' 38 FTR11='</html>' 39 # Tags 40 BOLD="<b>" 41 CENTER="<center>" 42 END_CENTER="</center>" 43 LF="<br>" 44 45 46 write_headers () 47 { 48 echo "$HDR01" 49 echo 50 echo "$HDR02" 51 echo "$HDR03" 52 echo 53 echo 54 echo "$HDR10" 55 echo "$HDR11" 56 echo "$HDR121" 57 echo "$HDR11a" 58 echo "$HDR13" 59 echo 60 echo -n "$HDR14a" 61 echo -n "$FNTSIZE" 62 echo "$HDR14b" 63 echo 64 echo "$BOLD" # Everything in bold (more easily readable). 65 } 66 67 68 process_text () 69 { 70 while read line # Read one line at a time. 71 do 72 { 73 if [ ! "$line" ] # Blank line? 74 then # Then new paragraph must follow. 75 echo 76 echo "$LF" # Insert two <br> tags. 77 echo "$LF" 78 echo 79 continue # Skip the underscore test. 80 else # Otherwise . . . 81 82 if [[ "$line" =~ "\[*jpg\]" ]] # Is a graphic? 83 then # Strip away brackets. 84 temp=$( echo "$line" | sed -e 's/\[//' -e 's/\]//' ) 85 line=""$CENTER" <img src="\"$IMGDIR"/$temp\"> "$END_CENTER" " 86 # Add image tag. 87 # And, center it. 88 fi 89 90 fi 91 92 93 echo "$line" | grep -q _ 94 if [ "$?" -eq 0 ] # If line contains underscore ... 95 then 96 # =================================================== 97 # Convert underscored phrase to italics. 98 temp=$( echo "$line" | 99 sed -e 's/ _/ <i>/' -e 's/_/<\/i> /' | 100 sed -e 's/^_/<i>/' -e 's/_/<\/i>/' ) 101 # Process only underscores prefixed by space, 102 #+ or at beginning or end of line. 103 # Do not convert underscores embedded within a word! 104 line="$temp" 105 # Slows script execution. Can be optimized? 106 # =================================================== 107 fi 108 109 110 111 echo 112 echo "$line" 113 echo 114 } # End while 115 done 116 } # End process_text () 117 118 119 write_footers () # Termination tags. 120 { 121 echo "$FTR10" 122 echo "$FTR11" 123 } 124 125 126 # main () { 127 # ========= 128 write_headers 129 process_text 130 write_footers 131 # ========= 132 # } 133 134 exit $? 135 136 # Exercises: 137 # --------- 138 # 1) Fixup: Check for closing underscore before a comma or period. 139 # 2) Add a test for the presence of a closing underscore 140 #+ in phrases to be italicized. |
Here is something to warm the hearts of webmasters and mistresses: a script that saves weblogs.
Example A-27. Preserving weblogs
1 #!/bin/bash 2 # archiveweblogs.sh v1.0 3 4 # Troy Engel <tengel@fluid.com> 5 # Slightly modified by document author. 6 # Used with permission. 7 # 8 # This script will preserve the normally rotated and 9 #+ thrown away weblogs from a default RedHat/Apache installation. 10 # It will save the files with a date/time stamp in the filename, 11 #+ bzipped, to a given directory. 12 # 13 # Run this from crontab nightly at an off hour, 14 #+ as bzip2 can suck up some serious CPU on huge logs: 15 # 0 2 * * * /opt/sbin/archiveweblogs.sh 16 17 18 PROBLEM=66 19 20 # Set this to your backup dir. 21 BKP_DIR=/opt/backups/weblogs 22 23 # Default Apache/RedHat stuff 24 LOG_DAYS="4 3 2 1" 25 LOG_DIR=/var/log/httpd 26 LOG_FILES="access_log error_log" 27 28 # Default RedHat program locations 29 LS=/bin/ls 30 MV=/bin/mv 31 ID=/usr/bin/id 32 CUT=/bin/cut 33 COL=/usr/bin/column 34 BZ2=/usr/bin/bzip2 35 36 # Are we root? 37 USER=`$ID -u` 38 if [ "X$USER" != "X0" ]; then 39 echo "PANIC: Only root can run this script!" 40 exit $PROBLEM 41 fi 42 43 # Backup dir exists/writable? 44 if [ ! -x $BKP_DIR ]; then 45 echo "PANIC: $BKP_DIR doesn't exist or isn't writable!" 46 exit $PROBLEM 47 fi 48 49 # Move, rename and bzip2 the logs 50 for logday in $LOG_DAYS; do 51 for logfile in $LOG_FILES; do 52 MYFILE="$LOG_DIR/$logfile.$logday" 53 if [ -w $MYFILE ]; then 54 DTS=`$LS -lgo --time-style=+%Y%m%d $MYFILE | $COL -t | $CUT -d ' ' -f7` 55 $MV $MYFILE $BKP_DIR/$logfile.$DTS 56 $BZ2 $BKP_DIR/$logfile.$DTS 57 else 58 # Only spew an error if the file exits (ergo non-writable). 59 if [ -f $MYFILE ]; then 60 echo "ERROR: $MYFILE not writable. Skipping." 61 fi 62 fi 63 done 64 done 65 66 exit 0 |
How to keep the shell from expanding and reinterpreting text strings.
Example A-28. Protecting literal strings
1 #! /bin/bash 2 # protect_literal.sh 3 4 # set -vx 5 6 :<<-'_Protect_Literal_String_Doc' 7 8 Copyright (c) Michael S. Zick, 2003; All Rights Reserved 9 License: Unrestricted reuse in any form, for any purpose. 10 Warranty: None 11 Revision: $ID$ 12 13 Documentation redirected to the Bash no-operation. 14 Bash will '/dev/null' this block when the script is first read. 15 (Uncomment the above set command to see this action.) 16 17 Remove the first (Sha-Bang) line when sourcing this as a library 18 procedure. Also comment out the example use code in the two 19 places where shown. 20 21 22 Usage: 23 _protect_literal_str 'Whatever string meets your ${fancy}' 24 Just echos the argument to standard out, hard quotes 25 restored. 26 27 $(_protect_literal_str 'Whatever string meets your ${fancy}') 28 as the right-hand-side of an assignment statement. 29 30 Does: 31 As the right-hand-side of an assignment, preserves the 32 hard quotes protecting the contents of the literal during 33 assignment. 34 35 Notes: 36 The strange names (_*) are used to avoid trampling on 37 the user's chosen names when this is sourced as a 38 library. 39 40 _Protect_Literal_String_Doc 41 42 # The 'for illustration' function form 43 44 _protect_literal_str() { 45 46 # Pick an un-used, non-printing character as local IFS. 47 # Not required, but shows that we are ignoring it. 48 local IFS=$'\x1B' # \ESC character 49 50 # Enclose the All-Elements-Of in hard quotes during assignment. 51 local tmp=$'\x27'$@$'\x27' 52 # local tmp=$'\''$@$'\'' # Even uglier. 53 54 local len=${#tmp} # Info only. 55 echo $tmp is $len long. # Output AND information. 56 } 57 58 # This is the short-named version. 59 _pls() { 60 local IFS=$'x1B' # \ESC character (not required) 61 echo $'\x27'$@$'\x27' # Hard quoted parameter glob 62 } 63 64 # :<<-'_Protect_Literal_String_Test' 65 # # # Remove the above "# " to disable this code. # # # 66 67 # See how that looks when printed. 68 echo 69 echo "- - Test One - -" 70 _protect_literal_str 'Hello $user' 71 _protect_literal_str 'Hello "${username}"' 72 echo 73 74 # Which yields: 75 # - - Test One - - 76 # 'Hello $user' is 13 long. 77 # 'Hello "${username}"' is 21 long. 78 79 # Looks as expected, but why all of the trouble? 80 # The difference is hidden inside the Bash internal order 81 #+ of operations. 82 # Which shows when you use it on the RHS of an assignment. 83 84 # Declare an array for test values. 85 declare -a arrayZ 86 87 # Assign elements with various types of quotes and escapes. 88 arrayZ=( zero "$(_pls 'Hello ${Me}')" 'Hello ${You}' "\'Pass: ${pw}\'" ) 89 90 # Now list that array and see what is there. 91 echo "- - Test Two - -" 92 for (( i=0 ; i<${#arrayZ[*]} ; i++ )) 93 do 94 echo Element $i: ${arrayZ[$i]} is: ${#arrayZ[$i]} long. 95 done 96 echo 97 98 # Which yields: 99 # - - Test Two - - 100 # Element 0: zero is: 4 long. # Our marker element 101 # Element 1: 'Hello ${Me}' is: 13 long. # Our "$(_pls '...' )" 102 # Element 2: Hello ${You} is: 12 long. # Quotes are missing 103 # Element 3: \'Pass: \' is: 10 long. # ${pw} expanded to nothing 104 105 # Now make an assignment with that result. 106 declare -a array2=( ${arrayZ[@]} ) 107 108 # And print what happened. 109 echo "- - Test Three - -" 110 for (( i=0 ; i<${#array2[*]} ; i++ )) 111 do 112 echo Element $i: ${array2[$i]} is: ${#array2[$i]} long. 113 done 114 echo 115 116 # Which yields: 117 # - - Test Three - - 118 # Element 0: zero is: 4 long. # Our marker element. 119 # Element 1: Hello ${Me} is: 11 long. # Intended result. 120 # Element 2: Hello is: 5 long. # ${You} expanded to nothing. 121 # Element 3: 'Pass: is: 6 long. # Split on the whitespace. 122 # Element 4: ' is: 1 long. # The end quote is here now. 123 124 # Our Element 1 has had its leading and trailing hard quotes stripped. 125 # Although not shown, leading and trailing whitespace is also stripped. 126 # Now that the string contents are set, Bash will always, internally, 127 #+ hard quote the contents as required during its operations. 128 129 # Why? 130 # Considering our "$(_pls 'Hello ${Me}')" construction: 131 # " ... " -> Expansion required, strip the quotes. 132 # $( ... ) -> Replace with the result of..., strip this. 133 # _pls ' ... ' -> called with literal arguments, strip the quotes. 134 # The result returned includes hard quotes; BUT the above processing 135 #+ has already been done, so they become part of the value assigned. 136 # 137 # Similarly, during further usage of the string variable, the ${Me} 138 #+ is part of the contents (result) and survives any operations 139 # (Until explicitly told to evaluate the string). 140 141 # Hint: See what happens when the hard quotes ($'\x27') are replaced 142 #+ with soft quotes ($'\x22') in the above procedures. 143 # Interesting also is to remove the addition of any quoting. 144 145 # _Protect_Literal_String_Test 146 # # # Remove the above "# " to disable this code. # # # 147 148 exit 0 |
But, what if you want the shell to expand and reinterpret strings?
Example A-29. Unprotecting literal strings
1 #! /bin/bash 2 # unprotect_literal.sh 3 4 # set -vx 5 6 :<<-'_UnProtect_Literal_String_Doc' 7 8 Copyright (c) Michael S. Zick, 2003; All Rights Reserved 9 License: Unrestricted reuse in any form, for any purpose. 10 Warranty: None 11 Revision: $ID$ 12 13 Documentation redirected to the Bash no-operation. Bash will 14 '/dev/null' this block when the script is first read. 15 (Uncomment the above set command to see this action.) 16 17 Remove the first (Sha-Bang) line when sourcing this as a library 18 procedure. Also comment out the example use code in the two 19 places where shown. 20 21 22 Usage: 23 Complement of the "$(_pls 'Literal String')" function. 24 (See the protect_literal.sh example.) 25 26 StringVar=$(_upls ProtectedSringVariable) 27 28 Does: 29 When used on the right-hand-side of an assignment statement; 30 makes the substitions embedded in the protected string. 31 32 Notes: 33 The strange names (_*) are used to avoid trampling on 34 the user's chosen names when this is sourced as a 35 library. 36 37 38 _UnProtect_Literal_String_Doc 39 40 _upls() { 41 local IFS=$'x1B' # \ESC character (not required) 42 eval echo $@ # Substitution on the glob. 43 } 44 45 # :<<-'_UnProtect_Literal_String_Test' 46 # # # Remove the above "# " to disable this code. # # # 47 48 49 _pls() { 50 local IFS=$'x1B' # \ESC character (not required) 51 echo $'\x27'$@$'\x27' # Hard quoted parameter glob 52 } 53 54 # Declare an array for test values. 55 declare -a arrayZ 56 57 # Assign elements with various types of quotes and escapes. 58 arrayZ=( zero "$(_pls 'Hello ${Me}')" 'Hello ${You}' "\'Pass: ${pw}\'" ) 59 60 # Now make an assignment with that result. 61 declare -a array2=( ${arrayZ[@]} ) 62 63 # Which yielded: 64 # - - Test Three - - 65 # Element 0: zero is: 4 long # Our marker element. 66 # Element 1: Hello ${Me} is: 11 long # Intended result. 67 # Element 2: Hello is: 5 long # ${You} expanded to nothing. 68 # Element 3: 'Pass: is: 6 long # Split on the whitespace. 69 # Element 4: ' is: 1 long # The end quote is here now. 70 71 # set -vx 72 73 # Initialize 'Me' to something for the embedded ${Me} substitution. 74 # This needs to be done ONLY just prior to evaluating the 75 #+ protected string. 76 # (This is why it was protected to begin with.) 77 78 Me="to the array guy." 79 80 # Set a string variable destination to the result. 81 newVar=$(_upls ${array2[1]}) 82 83 # Show what the contents are. 84 echo $newVar 85 86 # Do we really need a function to do this? 87 newerVar=$(eval echo ${array2[1]}) 88 echo $newerVar 89 90 # I guess not, but the _upls function gives us a place to hang 91 #+ the documentation on. 92 # This helps when we forget what a # construction like: 93 #+ $(eval echo ... ) means. 94 95 # What if Me isn't set when the protected string is evaluated? 96 unset Me 97 newestVar=$(_upls ${array2[1]}) 98 echo $newestVar 99 100 # Just gone, no hints, no runs, no errors. 101 102 # Why in the world? 103 # Setting the contents of a string variable containing character 104 #+ sequences that have a meaning in Bash is a general problem in 105 #+ script programming. 106 # 107 # This problem is now solved in eight lines of code 108 #+ (and four pages of description). 109 110 # Where is all this going? 111 # Dynamic content Web pages as an array of Bash strings. 112 # Content set per request by a Bash 'eval' command 113 #+ on the stored page template. 114 # Not intended to replace PHP, just an interesting thing to do. 115 ### 116 # Don't have a webserver application? 117 # No problem, check the example directory of the Bash source; 118 #+ there is a Bash script for that also. 119 120 # _UnProtect_Literal_String_Test 121 # # # Remove the above "# " to disable this code. # # # 122 123 exit 0 |
This interesting script helps hunt down spammers.
Example A-30. Spammer Identification
1 #!/bin/bash 2 3 # $Id: is_spammer.bash,v 1.12.2.11 2004/10/01 21:42:33 mszick Exp $ 4 # Above line is RCS info. 5 6 # The latest version of this script is available from http://www.morethan.org. 7 # 8 # Spammer-identification 9 # by Michael S. Zick 10 # Used in the ABS Guide with permission. 11 12 13 14 ####################################################### 15 # Documentation 16 # See also "Quickstart" at end of script. 17 ####################################################### 18 19 :<<-'__is_spammer_Doc_' 20 21 Copyright (c) Michael S. Zick, 2004 22 License: Unrestricted reuse in any form, for any purpose. 23 Warranty: None -{Its a script; the user is on their own.}- 24 25 Impatient? 26 Application code: goto "# # # Hunt the Spammer' program code # # #" 27 Example output: ":<<-'_is_spammer_outputs_'" 28 How to use: Enter script name without arguments. 29 Or goto "Quickstart" at end of script. 30 31 Provides 32 Given a domain name or IP(v4) address as input: 33 34 Does an exhaustive set of queries to find the associated 35 network resources (short of recursing into TLDs). 36 37 Checks the IP(v4) addresses found against Blacklist 38 nameservers. 39 40 If found to be a blacklisted IP(v4) address, 41 reports the blacklist text records. 42 (Usually hyper-links to the specific report.) 43 44 Requires 45 A working Internet connection. 46 (Exercise: Add check and/or abort if not on-line when running script.) 47 Bash with arrays (2.05b+). 48 49 The external program 'dig' -- 50 a utility program provided with the 'bind' set of programs. 51 Specifically, the version which is part of Bind series 9.x 52 See: http://www.isc.org 53 54 All usages of 'dig' are limited to wrapper functions, 55 which may be rewritten as required. 56 See: dig_wrappers.bash for details. 57 ("Additional documentation" -- below) 58 59 Usage 60 Script requires a single argument, which may be: 61 1) A domain name; 62 2) An IP(v4) address; 63 3) A filename, with one name or address per line. 64 65 Script accepts an optional second argument, which may be: 66 1) A Blacklist server name; 67 2) A filename, with one Blacklist server name per line. 68 69 If the second argument is not provided, the script uses 70 a built-in set of (free) Blacklist servers. 71 72 See also, the Quickstart at the end of this script (after 'exit'). 73 74 Return Codes 75 0 - All OK 76 1 - Script failure 77 2 - Something is Blacklisted 78 79 Optional environment variables 80 SPAMMER_TRACE 81 If set to a writable file, 82 script will log an execution flow trace. 83 84 SPAMMER_DATA 85 If set to a writable file, script will dump its 86 discovered data in the form of GraphViz file. 87 See: http://www.research.att.com/sw/tools/graphviz 88 89 SPAMMER_LIMIT 90 Limits the depth of resource tracing. 91 92 Default is 2 levels. 93 94 A setting of 0 (zero) means 'unlimited' . . . 95 Caution: script might recurse the whole Internet! 96 97 A limit of 1 or 2 is most useful when processing 98 a file of domain names and addresses. 99 A higher limit can be useful when hunting spam gangs. 100 101 102 Additional documentation 103 Download the archived set of scripts 104 explaining and illustrating the function contained within this script. 105 http://personal.riverusers.com/mszick_clf.tar.bz2 106 107 108 Study notes 109 This script uses a large number of functions. 110 Nearly all general functions have their own example script. 111 Each of the example scripts have tutorial level comments. 112 113 Scripting project 114 Add support for IP(v6) addresses. 115 IP(v6) addresses are recognized but not processed. 116 117 Advanced project 118 Add the reverse lookup detail to the discovered information. 119 120 Report the delegation chain and abuse contacts. 121 122 Modify the GraphViz file output to include the 123 newly discovered information. 124 125 __is_spammer_Doc_ 126 127 ####################################################### 128 129 130 131 132 #### Special IFS settings used for string parsing. #### 133 134 # Whitespace == :Space:Tab:Line Feed:Carriage Return: 135 WSP_IFS=$'\x20'$'\x09'$'\x0A'$'\x0D' 136 137 # No Whitespace == Line Feed:Carriage Return 138 NO_WSP=$'\x0A'$'\x0D' 139 140 # Field separator for dotted decimal IP addresses 141 ADR_IFS=${NO_WSP}'.' 142 143 # Array to dotted string conversions 144 DOT_IFS='.'${WSP_IFS} 145 146 # # # Pending operations stack machine # # # 147 # This set of functions described in func_stack.bash. 148 # (See "Additional documentation" above.) 149 # # # 150 151 # Global stack of pending operations. 152 declare -f -a _pending_ 153 # Global sentinel for stack runners 154 declare -i _p_ctrl_ 155 # Global holder for currently executing function 156 declare -f _pend_current_ 157 158 # # # Debug version only - remove for regular use # # # 159 # 160 # The function stored in _pend_hook_ is called 161 # immediately before each pending function is 162 # evaluated. Stack clean, _pend_current_ set. 163 # 164 # This thingy demonstrated in pend_hook.bash. 165 declare -f _pend_hook_ 166 # # # 167 168 # The do nothing function 169 pend_dummy() { : ; } 170 171 # Clear and initialize the function stack. 172 pend_init() { 173 unset _pending_[@] 174 pend_func pend_stop_mark 175 _pend_hook_='pend_dummy' # Debug only. 176 } 177 178 # Discard the top function on the stack. 179 pend_pop() { 180 if [ ${#_pending_[@]} -gt 0 ] 181 then 182 local -i _top_ 183 _top_=${#_pending_[@]}-1 184 unset _pending_[$_top_] 185 fi 186 } 187 188 # pend_func function_name [$(printf '%q\n' arguments)] 189 pend_func() { 190 local IFS=${NO_WSP} 191 set -f 192 _pending_[${#_pending_[@]}]=$@ 193 set +f 194 } 195 196 # The function which stops the release: 197 pend_stop_mark() { 198 _p_ctrl_=0 199 } 200 201 pend_mark() { 202 pend_func pend_stop_mark 203 } 204 205 # Execute functions until 'pend_stop_mark' . . . 206 pend_release() { 207 local -i _top_ # Declare _top_ as integer. 208 _p_ctrl_=${#_pending_[@]} 209 while [ ${_p_ctrl_} -gt 0 ] 210 do 211 _top_=${#_pending_[@]}-1 212 _pend_current_=${_pending_[$_top_]} 213 unset _pending_[$_top_] 214 $_pend_hook_ # Debug only. 215 eval $_pend_current_ 216 done 217 } 218 219 # Drop functions until 'pend_stop_mark' . . . 220 pend_drop() { 221 local -i _top_ 222 local _pd_ctrl_=${#_pending_[@]} 223 while [ ${_pd_ctrl_} -gt 0 ] 224 do 225 _top_=$_pd_ctrl_-1 226 if [ "${_pending_[$_top_]}" == 'pend_stop_mark' ] 227 then 228 unset _pending_[$_top_] 229 break 230 else 231 unset _pending_[$_top_] 232 _pd_ctrl_=$_top_ 233 fi 234 done 235 if [ ${#_pending_[@]} -eq 0 ] 236 then 237 pend_func pend_stop_mark 238 fi 239 } 240 241 #### Array editors #### 242 243 # This function described in edit_exact.bash. 244 # (See "Additional documentation," above.) 245 # edit_exact <excludes_array_name> <target_array_name> 246 edit_exact() { 247 [ $# -eq 2 ] || 248 [ $# -eq 3 ] || return 1 249 local -a _ee_Excludes 250 local -a _ee_Target 251 local _ee_x 252 local _ee_t 253 local IFS=${NO_WSP} 254 set -f 255 eval _ee_Excludes=\( \$\{$1\[@\]\} \) 256 eval _ee_Target=\( \$\{$2\[@\]\} \) 257 local _ee_len=${#_ee_Target[@]} # Original length. 258 local _ee_cnt=${#_ee_Excludes[@]} # Exclude list length. 259 [ ${_ee_len} -ne 0 ] || return 0 # Can't edit zero length. 260 [ ${_ee_cnt} -ne 0 ] || return 0 # Can't edit zero length. 261 for (( x = 0; x < ${_ee_cnt} ; x++ )) 262 do 263 _ee_x=${_ee_Excludes[$x]} 264 for (( n = 0 ; n < ${_ee_len} ; n++ )) 265 do 266 _ee_t=${_ee_Target[$n]} 267 if [ x"${_ee_t}" == x"${_ee_x}" ] 268 then 269 unset _ee_Target[$n] # Discard match. 270 [ $# -eq 2 ] && break # If 2 arguments, then done. 271 fi 272 done 273 done 274 eval $2=\( \$\{_ee_Target\[@\]\} \) 275 set +f 276 return 0 277 } 278 279 # This function described in edit_by_glob.bash. 280 # edit_by_glob <excludes_array_name> <target_array_name> 281 edit_by_glob() { 282 [ $# -eq 2 ] || 283 [ $# -eq 3 ] || return 1 284 local -a _ebg_Excludes 285 local -a _ebg_Target 286 local _ebg_x 287 local _ebg_t 288 local IFS=${NO_WSP} 289 set -f 290 eval _ebg_Excludes=\( \$\{$1\[@\]\} \) 291 eval _ebg_Target=\( \$\{$2\[@\]\} \) 292 local _ebg_len=${#_ebg_Target[@]} 293 local _ebg_cnt=${#_ebg_Excludes[@]} 294 [ ${_ebg_len} -ne 0 ] || return 0 295 [ ${_ebg_cnt} -ne 0 ] || return 0 296 for (( x = 0; x < ${_ebg_cnt} ; x++ )) 297 do 298 _ebg_x=${_ebg_Excludes[$x]} 299 for (( n = 0 ; n < ${_ebg_len} ; n++ )) 300 do 301 [ $# -eq 3 ] && _ebg_x=${_ebg_x}'*' # Do prefix edit 302 if [ ${_ebg_Target[$n]:=} ] #+ if defined & set. 303 then 304 _ebg_t=${_ebg_Target[$n]/#${_ebg_x}/} 305 [ ${#_ebg_t} -eq 0 ] && unset _ebg_Target[$n] 306 fi 307 done 308 done 309 eval $2=\( \$\{_ebg_Target\[@\]\} \) 310 set +f 311 return 0 312 } 313 314 # This function described in unique_lines.bash. 315 # unique_lines <in_name> <out_name> 316 unique_lines() { 317 [ $# -eq 2 ] || return 1 318 local -a _ul_in 319 local -a _ul_out 320 local -i _ul_cnt 321 local -i _ul_pos 322 local _ul_tmp 323 local IFS=${NO_WSP} 324 set -f 325 eval _ul_in=\( \$\{$1\[@\]\} \) 326 _ul_cnt=${#_ul_in[@]} 327 for (( _ul_pos = 0 ; _ul_pos < ${_ul_cnt} ; _ul_pos++ )) 328 do 329 if [ ${_ul_in[${_ul_pos}]:=} ] # If defined & not empty 330 then 331 _ul_tmp=${_ul_in[${_ul_pos}]} 332 _ul_out[${#_ul_out[@]}]=${_ul_tmp} 333 for (( zap = _ul_pos ; zap < ${_ul_cnt} ; zap++ )) 334 do 335 [ ${_ul_in[${zap}]:=} ] && 336 [ 'x'${_ul_in[${zap}]} == 'x'${_ul_tmp} ] && 337 unset _ul_in[${zap}] 338 done 339 fi 340 done 341 eval $2=\( \$\{_ul_out\[@\]\} \) 342 set +f 343 return 0 344 } 345 346 # This function described in char_convert.bash. 347 # to_lower <string> 348 to_lower() { 349 [ $# -eq 1 ] || return 1 350 local _tl_out 351 _tl_out=${1//A/a} 352 _tl_out=${_tl_out//B/b} 353 _tl_out=${_tl_out//C/c} 354 _tl_out=${_tl_out//D/d} 355 _tl_out=${_tl_out//E/e} 356 _tl_out=${_tl_out//F/f} 357 _tl_out=${_tl_out//G/g} 358 _tl_out=${_tl_out//H/h} 359 _tl_out=${_tl_out//I/i} 360 _tl_out=${_tl_out//J/j} 361 _tl_out=${_tl_out//K/k} 362 _tl_out=${_tl_out//L/l} 363 _tl_out=${_tl_out//M/m} 364 _tl_out=${_tl_out//N/n} 365 _tl_out=${_tl_out//O/o} 366 _tl_out=${_tl_out//P/p} 367 _tl_out=${_tl_out//Q/q} 368 _tl_out=${_tl_out//R/r} 369 _tl_out=${_tl_out//S/s} 370 _tl_out=${_tl_out//T/t} 371 _tl_out=${_tl_out//U/u} 372 _tl_out=${_tl_out//V/v} 373 _tl_out=${_tl_out//W/w} 374 _tl_out=${_tl_out//X/x} 375 _tl_out=${_tl_out//Y/y} 376 _tl_out=${_tl_out//Z/z} 377 echo ${_tl_out} 378 return 0 379 } 380 381 #### Application helper functions #### 382 383 # Not everybody uses dots as separators (APNIC, for example). 384 # This function described in to_dot.bash 385 # to_dot <string> 386 to_dot() { 387 [ $# -eq 1 ] || return 1 388 echo ${1//[#|@|%]/.} 389 return 0 390 } 391 392 # This function described in is_number.bash. 393 # is_number <input> 394 is_number() { 395 [ "$#" -eq 1 ] || return 1 # is blank? 396 [ x"$1" == 'x0' ] && return 0 # is zero? 397 local -i tst 398 let tst=$1 2>/dev/null # else is numeric! 399 return $? 400 } 401 402 # This function described in is_address.bash. 403 # is_address <input> 404 is_address() { 405 [ $# -eq 1 ] || return 1 # Blank ==> false 406 local -a _ia_input 407 local IFS=${ADR_IFS} 408 _ia_input=( $1 ) 409 if [ ${#_ia_input[@]} -eq 4 ] && 410 is_number ${_ia_input[0]} && 411 is_number ${_ia_input[1]} && 412 is_number ${_ia_input[2]} && 413 is_number ${_ia_input[3]} && 414 [ ${_ia_input[0]} -lt 256 ] && 415 [ ${_ia_input[1]} -lt 256 ] && 416 [ ${_ia_input[2]} -lt 256 ] && 417 [ ${_ia_input[3]} -lt 256 ] 418 then 419 return 0 420 else 421 return 1 422 fi 423 } 424 425 # This function described in split_ip.bash. 426 # split_ip <IP_address> 427 #+ <array_name_norm> [<array_name_rev>] 428 split_ip() { 429 [ $# -eq 3 ] || # Either three 430 [ $# -eq 2 ] || return 1 #+ or two arguments 431 local -a _si_input 432 local IFS=${ADR_IFS} 433 _si_input=( $1 ) 434 IFS=${WSP_IFS} 435 eval $2=\(\ \$\{_si_input\[@\]\}\ \) 436 if [ $# -eq 3 ] 437 then 438 # Build query order array. 439 local -a _dns_ip 440 _dns_ip[0]=${_si_input[3]} 441 _dns_ip[1]=${_si_input[2]} 442 _dns_ip[2]=${_si_input[1]} 443 _dns_ip[3]=${_si_input[0]} 444 eval $3=\(\ \$\{_dns_ip\[@\]\}\ \) 445 fi 446 return 0 447 } 448 449 # This function described in dot_array.bash. 450 # dot_array <array_name> 451 dot_array() { 452 [ $# -eq 1 ] || return 1 # Single argument required. 453 local -a _da_input 454 eval _da_input=\(\ \$\{$1\[@\]\}\ \) 455 local IFS=${DOT_IFS} 456 local _da_output=${_da_input[@]} 457 IFS=${WSP_IFS} 458 echo ${_da_output} 459 return 0 460 } 461 462 # This function described in file_to_array.bash 463 # file_to_array <file_name> <line_array_name> 464 file_to_array() { 465 [ $# -eq 2 ] || return 1 # Two arguments required. 466 local IFS=${NO_WSP} 467 local -a _fta_tmp_ 468 _fta_tmp_=( $(cat $1) ) 469 eval $2=\( \$\{_fta_tmp_\[@\]\} \) 470 return 0 471 } 472 473 # Columnized print of an array of multi-field strings. 474 # col_print <array_name> <min_space> < 475 #+ tab_stop [tab_stops]> 476 col_print() { 477 [ $# -gt 2 ] || return 0 478 local -a _cp_inp 479 local -a _cp_spc 480 local -a _cp_line 481 local _cp_min 482 local _cp_mcnt 483 local _cp_pos 484 local _cp_cnt 485 local _cp_tab 486 local -i _cp 487 local -i _cpf 488 local _cp_fld 489 # WARNING: FOLLOWING LINE NOT BLANK -- IT IS QUOTED SPACES. 490 local _cp_max=' ' 491 set -f 492 local IFS=${NO_WSP} 493 eval _cp_inp=\(\ \$\{$1\[@\]\}\ \) 494 [ ${#_cp_inp[@]} -gt 0 ] || return 0 # Empty is easy. 495 _cp_mcnt=$2 496 _cp_min=${_cp_max:1:${_cp_mcnt}} 497 shift 498 shift 499 _cp_cnt=$# 500 for (( _cp = 0 ; _cp < _cp_cnt ; _cp++ )) 501 do 502 _cp_spc[${#_cp_spc[@]}]="${_cp_max:2:$1}" #" 503 shift 504 done 505 _cp_cnt=${#_cp_inp[@]} 506 for (( _cp = 0 ; _cp < _cp_cnt ; _cp++ )) 507 do 508 _cp_pos=1 509 IFS=${NO_WSP}$'\x20' 510 _cp_line=( ${_cp_inp[${_cp}]} ) 511 IFS=${NO_WSP} 512 for (( _cpf = 0 ; _cpf < ${#_cp_line[@]} ; _cpf++ )) 513 do 514 _cp_tab=${_cp_spc[${_cpf}]:${_cp_pos}} 515 if [ ${#_cp_tab} -lt ${_cp_mcnt} ] 516 then 517 _cp_tab="${_cp_min}" 518 fi 519 echo -n "${_cp_tab}" 520 (( _cp_pos = ${_cp_pos} + ${#_cp_tab} )) 521 _cp_fld="${_cp_line[${_cpf}]}" 522 echo -n ${_cp_fld} 523 (( _cp_pos = ${_cp_pos} + ${#_cp_fld} )) 524 done 525 echo 526 done 527 set +f 528 return 0 529 } 530 531 # # # # 'Hunt the Spammer' data flow # # # # 532 533 # Application return code 534 declare -i _hs_RC 535 536 # Original input, from which IP addresses are removed 537 # After which, domain names to check 538 declare -a uc_name 539 540 # Original input IP addresses are moved here 541 # After which, IP addresses to check 542 declare -a uc_address 543 544 # Names against which address expansion run 545 # Ready for name detail lookup 546 declare -a chk_name 547 548 # Addresses against which name expansion run 549 # Ready for address detail lookup 550 declare -a chk_address 551 552 # Recursion is depth-first-by-name. 553 # The expand_input_address maintains this list 554 #+ to prohibit looking up addresses twice during 555 #+ domain name recursion. 556 declare -a been_there_addr 557 been_there_addr=( '127.0.0.1' ) # Whitelist localhost 558 559 # Names which we have checked (or given up on) 560 declare -a known_name 561 562 # Addresses which we have checked (or given up on) 563 declare -a known_address 564 565 # List of zero or more Blacklist servers to check. 566 # Each 'known_address' will be checked against each server, 567 #+ with negative replies and failures suppressed. 568 declare -a list_server 569 570 # Indirection limit - set to zero == no limit 571 indirect=${SPAMMER_LIMIT:=2} 572 573 # # # # 'Hunt the Spammer' information output data # # # # 574 575 # Any domain name may have multiple IP addresses. 576 # Any IP address may have multiple domain names. 577 # Therefore, track unique address-name pairs. 578 declare -a known_pair 579 declare -a reverse_pair 580 581 # In addition to the data flow variables; known_address 582 #+ known_name and list_server, the following are output to the 583 #+ external graphics interface file. 584 585 # Authority chain, parent -> SOA fields. 586 declare -a auth_chain 587 588 # Reference chain, parent name -> child name 589 declare -a ref_chain 590 591 # DNS chain - domain name -> address 592 declare -a name_address 593 594 # Name and service pairs - domain name -> service 595 declare -a name_srvc 596 597 # Name and resource pairs - domain name -> Resource Record 598 declare -a name_resource 599 600 # Parent and Child pairs - parent name -> child name 601 # This MAY NOT be the same as the ref_chain followed! 602 declare -a parent_child 603 604 # Address and Blacklist hit pairs - address->server 605 declare -a address_hits 606 607 # Dump interface file data 608 declare -f _dot_dump 609 _dot_dump=pend_dummy # Initially a no-op 610 611 # Data dump is enabled by setting the environment variable SPAMMER_DATA 612 #+ to the name of a writable file. 613 declare _dot_file 614 615 # Helper function for the dump-to-dot-file function 616 # dump_to_dot <array_name> <prefix> 617 dump_to_dot() { 618 local -a _dda_tmp 619 local -i _dda_cnt 620 local _dda_form=' '${2}'%04u %s\n' 621 local IFS=${NO_WSP} 622 eval _dda_tmp=\(\ \$\{$1\[@\]\}\ \) 623 _dda_cnt=${#_dda_tmp[@]} 624 if [ ${_dda_cnt} -gt 0 ] 625 then 626 for (( _dda = 0 ; _dda < _dda_cnt ; _dda++ )) 627 do 628 printf "${_dda_form}" \ 629 "${_dda}" "${_dda_tmp[${_dda}]}" >>${_dot_file} 630 done 631 fi 632 } 633 634 # Which will also set _dot_dump to this function . . . 635 dump_dot() { 636 local -i _dd_cnt 637 echo '# Data vintage: '$(date -R) >${_dot_file} 638 echo '# ABS Guide: is_spammer.bash; v2, 2004-msz' >>${_dot_file} 639 echo >>${_dot_file} 640 echo 'digraph G {' >>${_dot_file} 641 642 if [ ${#known_name[@]} -gt 0 ] 643 then 644 echo >>${_dot_file} 645 echo '# Known domain name nodes' >>${_dot_file} 646 _dd_cnt=${#known_name[@]} 647 for (( _dd = 0 ; _dd < _dd_cnt ; _dd++ )) 648 do 649 printf ' N%04u [label="%s"] ;\n' \ 650 "${_dd}" "${known_name[${_dd}]}" >>${_dot_file} 651 done 652 fi 653 654 if [ ${#known_address[@]} -gt 0 ] 655 then 656 echo >>${_dot_file} 657 echo '# Known address nodes' >>${_dot_file} 658 _dd_cnt=${#known_address[@]} 659 for (( _dd = 0 ; _dd < _dd_cnt ; _dd++ )) 660 do 661 printf ' A%04u [label="%s"] ;\n' \ 662 "${_dd}" "${known_address[${_dd}]}" >>${_dot_file} 663 done 664 fi 665 666 echo >>${_dot_file} 667 echo '/*' >>${_dot_file} 668 echo ' * Known relationships :: User conversion to' >>${_dot_file} 669 echo ' * graphic form by hand or program required.' >>${_dot_file} 670 echo ' *' >>${_dot_file} 671 672 if [ ${#auth_chain[@]} -gt 0 ] 673 then 674 echo >>${_dot_file} 675 echo '# Authority ref. edges followed & field source.' >>${_dot_file} 676 dump_to_dot auth_chain AC 677 fi 678 679 if [ ${#ref_chain[@]} -gt 0 ] 680 then 681 echo >>${_dot_file} 682 echo '# Name ref. edges followed and field source.' >>${_dot_file} 683 dump_to_dot ref_chain RC 684 fi 685 686 if [ ${#name_address[@]} -gt 0 ] 687 then 688 echo >>${_dot_file} 689 echo '# Known name->address edges' >>${_dot_file} 690 dump_to_dot name_address NA 691 fi 692 693 if [ ${#name_srvc[@]} -gt 0 ] 694 then 695 echo >>${_dot_file} 696 echo '# Known name->service edges' >>${_dot_file} 697 dump_to_dot name_srvc NS 698 fi 699 700 if [ ${#name_resource[@]} -gt 0 ] 701 then 702 echo >>${_dot_file} 703 echo '# Known name->resource edges' >>${_dot_file} 704 dump_to_dot name_resource NR 705 fi 706 707 if [ ${#parent_child[@]} -gt 0 ] 708 then 709 echo >>${_dot_file} 710 echo '# Known parent->child edges' >>${_dot_file} 711 dump_to_dot parent_child PC 712 fi 713 714 if [ ${#list_server[@]} -gt 0 ] 715 then 716 echo >>${_dot_file} 717 echo '# Known Blacklist nodes' >>${_dot_file} 718 _dd_cnt=${#list_server[@]} 719 for (( _dd = 0 ; _dd < _dd_cnt ; _dd++ )) 720 do 721 printf ' LS%04u [label="%s"] ;\n' \ 722 "${_dd}" "${list_server[${_dd}]}" >>${_dot_file} 723 done 724 fi 725 726 unique_lines address_hits address_hits 727 if [ ${#address_hits[@]} -gt 0 ] 728 then 729 echo >>${_dot_file} 730 echo '# Known address->Blacklist_hit edges' >>${_dot_file} 731 echo '# CAUTION: dig warnings can trigger false hits.' >>${_dot_file} 732 dump_to_dot address_hits AH 733 fi 734 echo >>${_dot_file} 735 echo ' *' >>${_dot_file} 736 echo ' * That is a lot of relationships. Happy graphing.' >>${_dot_file} 737 echo ' */' >>${_dot_file} 738 echo '}' >>${_dot_file} 739 return 0 740 } 741 742 # # # # 'Hunt the Spammer' execution flow # # # # 743 744 # Execution trace is enabled by setting the 745 #+ environment variable SPAMMER_TRACE to the name of a writable file. 746 declare -a _trace_log 747 declare _log_file 748 749 # Function to fill the trace log 750 trace_logger() { 751 _trace_log[${#_trace_log[@]}]=${_pend_current_} 752 } 753 754 # Dump trace log to file function variable. 755 declare -f _log_dump 756 _log_dump=pend_dummy # Initially a no-op. 757 758 # Dump the trace log to a file. 759 dump_log() { 760 local -i _dl_cnt 761 _dl_cnt=${#_trace_log[@]} 762 for (( _dl = 0 ; _dl < _dl_cnt ; _dl++ )) 763 do 764 echo ${_trace_log[${_dl}]} >> ${_log_file} 765 done 766 _dl_cnt=${#_pending_[@]} 767 if [ ${_dl_cnt} -gt 0 ] 768 then 769 _dl_cnt=${_dl_cnt}-1 770 echo '# # # Operations stack not empty # # #' >> ${_log_file} 771 for (( _dl = ${_dl_cnt} ; _dl >= 0 ; _dl-- )) 772 do 773 echo ${_pending_[${_dl}]} >> ${_log_file} 774 done 775 fi 776 } 777 778 # # # Utility program 'dig' wrappers # # # 779 # 780 # These wrappers are derived from the 781 #+ examples shown in dig_wrappers.bash. 782 # 783 # The major difference is these return 784 #+ their results as a list in an array. 785 # 786 # See dig_wrappers.bash for details and 787 #+ use that script to develop any changes. 788 # 789 # # # 790 791 # Short form answer: 'dig' parses answer. 792 793 # Forward lookup :: Name -> Address 794 # short_fwd <domain_name> <array_name> 795 short_fwd() { 796 local -a _sf_reply 797 local -i _sf_rc 798 local -i _sf_cnt 799 IFS=${NO_WSP} 800 echo -n '.' 801 # echo 'sfwd: '${1} 802 _sf_reply=( $(dig +short ${1} -c in -t a 2>/dev/null) ) 803 _sf_rc=$? 804 if [ ${_sf_rc} -ne 0 ] 805 then 806 _trace_log[${#_trace_log[@]}]='## Lookup error '${_sf_rc}' on '${1}' ##' 807 # [ ${_sf_rc} -ne 9 ] && pend_drop 808 return ${_sf_rc} 809 else 810 # Some versions of 'dig' return warnings on stdout. 811 _sf_cnt=${#_sf_reply[@]} 812 for (( _sf = 0 ; _sf < ${_sf_cnt} ; _sf++ )) 813 do 814 [ 'x'${_sf_reply[${_sf}]:0:2} == 'x;;' ] && 815 unset _sf_reply[${_sf}] 816 done 817 eval $2=\( \$\{_sf_reply\[@\]\} \) 818 fi 819 return 0 820 } 821 822 # Reverse lookup :: Address -> Name 823 # short_rev <ip_address> <array_name> 824 short_rev() { 825 local -a _sr_reply 826 local -i _sr_rc 827 local -i _sr_cnt 828 IFS=${NO_WSP} 829 echo -n '.' 830 # echo 'srev: '${1} 831 _sr_reply=( $(dig +short -x ${1} 2>/dev/null) ) 832 _sr_rc=$? 833 if [ ${_sr_rc} -ne 0 ] 834 then 835 _trace_log[${#_trace_log[@]}]='## Lookup error '${_sr_rc}' on '${1}' ##' 836 # [ ${_sr_rc} -ne 9 ] && pend_drop 837 return ${_sr_rc} 838 else 839 # Some versions of 'dig' return warnings on stdout. 840 _sr_cnt=${#_sr_reply[@]} 841 for (( _sr = 0 ; _sr < ${_sr_cnt} ; _sr++ )) 842 do 843 [ 'x'${_sr_reply[${_sr}]:0:2} == 'x;;' ] && 844 unset _sr_reply[${_sr}] 845 done 846 eval $2=\( \$\{_sr_reply\[@\]\} \) 847 fi 848 return 0 849 } 850 851 # Special format lookup used to query blacklist servers. 852 # short_text <ip_address> <array_name> 853 short_text() { 854 local -a _st_reply 855 local -i _st_rc 856 local -i _st_cnt 857 IFS=${NO_WSP} 858 # echo 'stxt: '${1} 859 _st_reply=( $(dig +short ${1} -c in -t txt 2>/dev/null) ) 860 _st_rc=$? 861 if [ ${_st_rc} -ne 0 ] 862 then 863 _trace_log[${#_trace_log[@]}]='##Text lookup error '${_st_rc}' on '${1}'##' 864 # [ ${_st_rc} -ne 9 ] && pend_drop 865 return ${_st_rc} 866 else 867 # Some versions of 'dig' return warnings on stdout. 868 _st_cnt=${#_st_reply[@]} 869 for (( _st = 0 ; _st < ${#_st_cnt} ; _st++ )) 870 do 871 [ 'x'${_st_reply[${_st}]:0:2} == 'x;;' ] && 872 unset _st_reply[${_st}] 873 done 874 eval $2=\( \$\{_st_reply\[@\]\} \) 875 fi 876 return 0 877 } 878 879 # The long forms, a.k.a., the parse it yourself versions 880 881 # RFC 2782 Service lookups 882 # dig +noall +nofail +answer _ldap._tcp.openldap.org -t srv 883 # _<service>._<protocol>.<domain_name> 884 # _ldap._tcp.openldap.org. 3600 IN SRV 0 0 389 ldap.openldap.org. 885 # domain TTL Class SRV Priority Weight Port Target 886 887 # Forward lookup :: Name -> poor man's zone transfer 888 # long_fwd <domain_name> <array_name> 889 long_fwd() { 890 local -a _lf_reply 891 local -i _lf_rc 892 local -i _lf_cnt 893 IFS=${NO_WSP} 894 echo -n ':' 895 # echo 'lfwd: '${1} 896 _lf_reply=( $( 897 dig +noall +nofail +answer +authority +additional \ 898 ${1} -t soa ${1} -t mx ${1} -t any 2>/dev/null) ) 899 _lf_rc=$? 900 if [ ${_lf_rc} -ne 0 ] 901 then 902 _trace_log[${#_trace_log[@]}]='# Zone lookup err '${_lf_rc}' on '${1}' #' 903 # [ ${_lf_rc} -ne 9 ] && pend_drop 904 return ${_lf_rc} 905 else 906 # Some versions of 'dig' return warnings on stdout. 907 _lf_cnt=${#_lf_reply[@]} 908 for (( _lf = 0 ; _lf < ${_lf_cnt} ; _lf++ )) 909 do 910 [ 'x'${_lf_reply[${_lf}]:0:2} == 'x;;' ] && 911 unset _lf_reply[${_lf}] 912 done 913 eval $2=\( \$\{_lf_reply\[@\]\} \) 914 fi 915 return 0 916 } 917 # The reverse lookup domain name corresponding to the IPv6 address: 918 # 4321:0:1:2:3:4:567:89ab 919 # would be (nibble, I.E: Hexdigit) reversed: 920 # b.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1.0.0.0.0.0.0.0.1.2.3.4.IP6.ARPA. 921 922 # Reverse lookup :: Address -> poor man's delegation chain 923 # long_rev <rev_ip_address> <array_name> 924 long_rev() { 925 local -a _lr_reply 926 local -i _lr_rc 927 local -i _lr_cnt 928 local _lr_dns 929 _lr_dns=${1}'.in-addr.arpa.' 930 IFS=${NO_WSP} 931 echo -n ':' 932 # echo 'lrev: '${1} 933 _lr_reply=( $( 934 dig +noall +nofail +answer +authority +additional \ 935 ${_lr_dns} -t soa ${_lr_dns} -t any 2>/dev/null) ) 936 _lr_rc=$? 937 if [ ${_lr_rc} -ne 0 ] 938 then 939 _trace_log[${#_trace_log[@]}]='# Deleg lkp error '${_lr_rc}' on '${1}' #' 940 # [ ${_lr_rc} -ne 9 ] && pend_drop 941 return ${_lr_rc} 942 else 943 # Some versions of 'dig' return warnings on stdout. 944 _lr_cnt=${#_lr_reply[@]} 945 for (( _lr = 0 ; _lr < ${_lr_cnt} ; _lr++ )) 946 do 947 [ 'x'${_lr_reply[${_lr}]:0:2} == 'x;;' ] && 948 unset _lr_reply[${_lr}] 949 done 950 eval $2=\( \$\{_lr_reply\[@\]\} \) 951 fi 952 return 0 953 } 954 955 # # # Application specific functions # # # 956 957 # Mung a possible name; suppresses root and TLDs. 958 # name_fixup <string> 959 name_fixup(){ 960 local -a _nf_tmp 961 local -i _nf_end 962 local _nf_str 963 local IFS 964 _nf_str=$(to_lower ${1}) 965 _nf_str=$(to_dot ${_nf_str}) 966 _nf_end=${#_nf_str}-1 967 [ ${_nf_str:${_nf_end}} != '.' ] && 968 _nf_str=${_nf_str}'.' 969 IFS=${ADR_IFS} 970 _nf_tmp=( ${_nf_str} ) 971 IFS=${WSP_IFS} 972 _nf_end=${#_nf_tmp[@]} 973 case ${_nf_end} in 974 0) # No dots, only dots. 975 echo 976 return 1 977 ;; 978 1) # Only a TLD. 979 echo 980 return 1 981 ;; 982 2) # Maybe okay. 983 echo ${_nf_str} 984 return 0 985 # Needs a lookup table? 986 if [ ${#_nf_tmp[1]} -eq 2 ] 987 then # Country coded TLD. 988 echo 989 return 1 990 else 991 echo ${_nf_str} 992 return 0 993 fi 994 ;; 995 esac 996 echo ${_nf_str} 997 return 0 998 } 999 1000 # Grope and mung original input(s). 1001 split_input() { 1002 [ ${#uc_name[@]} -gt 0 ] || return 0 1003 local -i _si_cnt 1004 local -i _si_len 1005 local _si_str 1006 unique_lines uc_name uc_name 1007 _si_cnt=${#uc_name[@]} 1008 for (( _si = 0 ; _si < _si_cnt ; _si++ )) 1009 do 1010 _si_str=${uc_name[$_si]} 1011 if is_address ${_si_str} 1012 then 1013 uc_address[${#uc_address[@]}]=${_si_str} 1014 unset uc_name[$_si] 1015 else 1016 if ! uc_name[$_si]=$(name_fixup ${_si_str}) 1017 then 1018 unset ucname[$_si] 1019 fi 1020 fi 1021 done 1022 uc_name=( ${uc_name[@]} ) 1023 _si_cnt=${#uc_name[@]} 1024 _trace_log[${#_trace_log[@]}]='#Input '${_si_cnt}' unchkd name input(s).#' 1025 _si_cnt=${#uc_address[@]} 1026 _trace_log[${#_trace_log[@]}]='#Input '${_si_cnt}' unchkd addr input(s).#' 1027 return 0 1028 } 1029 1030 # # # Discovery functions -- recursively interlocked by external data # # # 1031 # # # The leading 'if list is empty; return 0' in each is required. # # # 1032 1033 # Recursion limiter 1034 # limit_chk() <next_level> 1035 limit_chk() { 1036 local -i _lc_lmt 1037 # Check indirection limit. 1038 if [ ${indirect} -eq 0 ] || [ $# -eq 0 ] 1039 then 1040 # The 'do-forever' choice 1041 echo 1 # Any value will do. 1042 return 0 # OK to continue. 1043 else 1044 # Limiting is in effect. 1045 if [ ${indirect} -lt ${1} ] 1046 then 1047 echo ${1} # Whatever. 1048 return 1 # Stop here. 1049 else 1050 _lc_lmt=${1}+1 # Bump the given limit. 1051 echo ${_lc_lmt} # Echo it. 1052 return 0 # OK to continue. 1053 fi 1054 fi 1055 } 1056 1057 # For each name in uc_name: 1058 # Move name to chk_name. 1059 # Add addresses to uc_address. 1060 # Pend expand_input_address. 1061 # Repeat until nothing new found. 1062 # expand_input_name <indirection_limit> 1063 expand_input_name() { 1064 [ ${#uc_name[@]} -gt 0 ] || return 0 1065 local -a _ein_addr 1066 local -a _ein_new 1067 local -i _ucn_cnt 1068 local -i _ein_cnt 1069 local _ein_tst 1070 _ucn_cnt=${#uc_name[@]} 1071 1072 if ! _ein_cnt=$(limit_chk ${1}) 1073 then 1074 return 0 1075 fi 1076 1077 for (( _ein = 0 ; _ein < _ucn_cnt ; _ein++ )) 1078 do 1079 if short_fwd ${uc_name[${_ein}]} _ein_new 1080 then 1081 for (( _ein_cnt = 0 ; _ein_cnt < ${#_ein_new[@]}; _ein_cnt++ )) 1082 do 1083 _ein_tst=${_ein_new[${_ein_cnt}]} 1084 if is_address ${_ein_tst} 1085 then 1086 _ein_addr[${#_ein_addr[@]}]=${_ein_tst} 1087 fi 1088 done 1089 fi 1090 done 1091 unique_lines _ein_addr _ein_addr # Scrub duplicates. 1092 edit_exact chk_address _ein_addr # Scrub pending detail. 1093 edit_exact known_address _ein_addr # Scrub already detailed. 1094 if [ ${#_ein_addr[@]} -gt 0 ] # Anything new? 1095 then 1096 uc_address=( ${uc_address[@]} ${_ein_addr[@]} ) 1097 pend_func expand_input_address ${1} 1098 _trace_log[${#_trace_log[@]}]='#Add '${#_ein_addr[@]}' unchkd addr inp.#' 1099 fi 1100 edit_exact chk_name uc_name # Scrub pending detail. 1101 edit_exact known_name uc_name # Scrub already detailed. 1102 if [ ${#uc_name[@]} -gt 0 ] 1103 then 1104 chk_name=( ${chk_name[@]} ${uc_name[@]} ) 1105 pend_func detail_each_name ${1} 1106 fi 1107 unset uc_name[@] 1108 return 0 1109 } 1110 1111 # For each address in uc_address: 1112 # Move address to chk_address. 1113 # Add names to uc_name. 1114 # Pend expand_input_name. 1115 # Repeat until nothing new found. 1116 # expand_input_address <indirection_limit> 1117 expand_input_address() { 1118 [ ${#uc_address[@]} -gt 0 ] || return 0 1119 local -a _eia_addr 1120 local -a _eia_name 1121 local -a _eia_new 1122 local -i _uca_cnt 1123 local -i _eia_cnt 1124 local _eia_tst 1125 unique_lines uc_address _eia_addr 1126 unset uc_address[@] 1127 edit_exact been_there_addr _eia_addr 1128 _uca_cnt=${#_eia_addr[@]} 1129 [ ${_uca_cnt} -gt 0 ] && 1130 been_there_addr=( ${been_there_addr[@]} ${_eia_addr[@]} ) 1131 1132 for (( _eia = 0 ; _eia < _uca_cnt ; _eia++ )) 1133 do 1134 if short_rev ${_eia_addr[${_eia}]} _eia_new 1135 then 1136 for (( _eia_cnt = 0 ; _eia_cnt < ${#_eia_new[@]} ; _eia_cnt++ )) 1137 do 1138 _eia_tst=${_eia_new[${_eia_cnt}]} 1139 if _eia_tst=$(name_fixup ${_eia_tst}) 1140 then 1141 _eia_name[${#_eia_name[@]}]=${_eia_tst} 1142 fi 1143 done 1144 fi 1145 done 1146 unique_lines _eia_name _eia_name # Scrub duplicates. 1147 edit_exact chk_name _eia_name # Scrub pending detail. 1148 edit_exact known_name _eia_name # Scrub already detailed. 1149 if [ ${#_eia_name[@]} -gt 0 ] # Anything new? 1150 then 1151 uc_name=( ${uc_name[@]} ${_eia_name[@]} ) 1152 pend_func expand_input_name ${1} 1153 _trace_log[${#_trace_log[@]}]='#Add '${#_eia_name[@]}' unchkd name inp.#' 1154 fi 1155 edit_exact chk_address _eia_addr # Scrub pending detail. 1156 edit_exact known_address _eia_addr # Scrub already detailed. 1157 if [ ${#_eia_addr[@]} -gt 0 ] # Anything new? 1158 then 1159 chk_address=( ${chk_address[@]} ${_eia_addr[@]} ) 1160 pend_func detail_each_address ${1} 1161 fi 1162 return 0 1163 } 1164 1165 # The parse-it-yourself zone reply. 1166 # The input is the chk_name list. 1167 # detail_each_name <indirection_limit> 1168 detail_each_name() { 1169 [ ${#chk_name[@]} -gt 0 ] || return 0 1170 local -a _den_chk # Names to check 1171 local -a _den_name # Names found here 1172 local -a _den_address # Addresses found here 1173 local -a _den_pair # Pairs found here 1174 local -a _den_rev # Reverse pairs found here 1175 local -a _den_tmp # Line being parsed 1176 local -a _den_auth # SOA contact being parsed 1177 local -a _den_new # The zone reply 1178 local -a _den_pc # Parent-Child gets big fast 1179 local -a _den_ref # So does reference chain 1180 local -a _den_nr # Name-Resource can be big 1181 local -a _den_na # Name-Address 1182 local -a _den_ns # Name-Service 1183 local -a _den_achn # Chain of Authority 1184 local -i _den_cnt # Count of names to detail 1185 local -i _den_lmt # Indirection limit 1186 local _den_who # Named being processed 1187 local _den_rec # Record type being processed 1188 local _den_cont # Contact domain 1189 local _den_str # Fixed up name string 1190 local _den_str2 # Fixed up reverse 1191 local IFS=${WSP_IFS} 1192 1193 # Local, unique copy of names to check 1194 unique_lines chk_name _den_chk 1195 unset chk_name[@] # Done with globals. 1196 1197 # Less any names already known 1198 edit_exact known_name _den_chk 1199 _den_cnt=${#_den_chk[@]} 1200 1201 # If anything left, add to known_name. 1202 [ ${_den_cnt} -gt 0 ] && 1203 known_name=( ${known_name[@]} ${_den_chk[@]} ) 1204 1205 # for the list of (previously) unknown names . . . 1206 for (( _den = 0 ; _den < _den_cnt ; _den++ )) 1207 do 1208 _den_who=${_den_chk[${_den}]} 1209 if long_fwd ${_den_who} _den_new 1210 then 1211 unique_lines _den_new _den_new 1212 if [ ${#_den_new[@]} -eq 0 ] 1213 then 1214 _den_pair[${#_den_pair[@]}]='0.0.0.0 '${_den_who} 1215 fi 1216 1217 # Parse each line in the reply. 1218 for (( _line = 0 ; _line < ${#_den_new[@]} ; _line++ )) 1219 do 1220 IFS=${NO_WSP}$'\x09'$'\x20' 1221 _den_tmp=( ${_den_new[${_line}]} ) 1222 IFS=${WSP_IFS} 1223 # If usable record and not a warning message . . . 1224 if [ ${#_den_tmp[@]} -gt 4 ] && [ 'x'${_den_tmp[0]} != 'x;;' ] 1225 then 1226 _den_rec=${_den_tmp[3]} 1227 _den_nr[${#_den_nr[@]}]=${_den_who}' '${_den_rec} 1228 # Begin at RFC1033 (+++) 1229 case ${_den_rec} in 1230 1231 #<name> [<ttl>] [<class>] SOA <origin> <person> 1232 SOA) # Start Of Authority 1233 if _den_str=$(name_fixup ${_den_tmp[0]}) 1234 then 1235 _den_name[${#_den_name[@]}]=${_den_str} 1236 _den_achn[${#_den_achn[@]}]=${_den_who}' '${_den_str}' SOA' 1237 # SOA origin -- domain name of master zone record 1238 if _den_str2=$(name_fixup ${_den_tmp[4]}) 1239 then 1240 _den_name[${#_den_name[@]}]=${_den_str2} 1241 _den_achn[${#_den_achn[@]}]=${_den_who}' '${_den_str2}' SOA.O' 1242 fi 1243 # Responsible party e-mail address (possibly bogus). 1244 # Possibility of first.last@domain.name ignored. 1245 set -f 1246 if _den_str2=$(name_fixup ${_den_tmp[5]}) 1247 then 1248 IFS=${ADR_IFS} 1249 _den_auth=( ${_den_str2} ) 1250 IFS=${WSP_IFS} 1251 if [ ${#_den_auth[@]} -gt 2 ] 1252 then 1253 _den_cont=${_den_auth[1]} 1254 for (( _auth = 2 ; _auth < ${#_den_auth[@]} ; _auth++ )) 1255 do 1256 _den_cont=${_den_cont}'.'${_den_auth[${_auth}]} 1257 done 1258 _den_name[${#_den_name[@]}]=${_den_cont}'.' 1259 _den_achn[${#_den_achn[@]}]=${_den_who}' '${_den_cont}'. SOA.C' 1260 fi 1261 fi 1262 set +f 1263 fi 1264 ;; 1265 1266 1267 A) # IP(v4) Address Record 1268 if _den_str=$(name_fixup ${_den_tmp[0]}) 1269 then 1270 _den_name[${#_den_name[@]}]=${_den_str} 1271 _den_pair[${#_den_pair[@]}]=${_den_tmp[4]}' '${_den_str} 1272 _den_na[${#_den_na[@]}]=${_den_str}' '${_den_tmp[4]} 1273 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str}' A' 1274 else 1275 _den_pair[${#_den_pair[@]}]=${_den_tmp[4]}' unknown.domain' 1276 _den_na[${#_den_na[@]}]='unknown.domain '${_den_tmp[4]} 1277 _den_ref[${#_den_ref[@]}]=${_den_who}' unknown.domain A' 1278 fi 1279 _den_address[${#_den_address[@]}]=${_den_tmp[4]} 1280 _den_pc[${#_den_pc[@]}]=${_den_who}' '${_den_tmp[4]} 1281 ;; 1282 1283 NS) # Name Server Record 1284 # Domain name being serviced (may be other than current) 1285 if _den_str=$(name_fixup ${_den_tmp[0]}) 1286 then 1287 _den_name[${#_den_name[@]}]=${_den_str} 1288 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str}' NS' 1289 1290 # Domain name of service provider 1291 if _den_str2=$(name_fixup ${_den_tmp[4]}) 1292 then 1293 _den_name[${#_den_name[@]}]=${_den_str2} 1294 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str2}' NSH' 1295 _den_ns[${#_den_ns[@]}]=${_den_str2}' NS' 1296 _den_pc[${#_den_pc[@]}]=${_den_str}' '${_den_str2} 1297 fi 1298 fi 1299 ;; 1300 1301 MX) # Mail Server Record 1302 # Domain name being serviced (wildcards not handled here) 1303 if _den_str=$(name_fixup ${_den_tmp[0]}) 1304 then 1305 _den_name[${#_den_name[@]}]=${_den_str} 1306 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str}' MX' 1307 fi 1308 # Domain name of service provider 1309 if _den_str=$(name_fixup ${_den_tmp[5]}) 1310 then 1311 _den_name[${#_den_name[@]}]=${_den_str} 1312 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str}' MXH' 1313 _den_ns[${#_den_ns[@]}]=${_den_str}' MX' 1314 _den_pc[${#_den_pc[@]}]=${_den_who}' '${_den_str} 1315 fi 1316 ;; 1317 1318 PTR) # Reverse address record 1319 # Special name 1320 if _den_str=$(name_fixup ${_den_tmp[0]}) 1321 then 1322 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str}' PTR' 1323 # Host name (not a CNAME) 1324 if _den_str2=$(name_fixup ${_den_tmp[4]}) 1325 then 1326 _den_rev[${#_den_rev[@]}]=${_den_str}' '${_den_str2} 1327 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str2}' PTRH' 1328 _den_pc[${#_den_pc[@]}]=${_den_who}' '${_den_str} 1329 fi 1330 fi 1331 ;; 1332 1333 AAAA) # IP(v6) Address Record 1334 if _den_str=$(name_fixup ${_den_tmp[0]}) 1335 then 1336 _den_name[${#_den_name[@]}]=${_den_str} 1337 _den_pair[${#_den_pair[@]}]=${_den_tmp[4]}' '${_den_str} 1338 _den_na[${#_den_na[@]}]=${_den_str}' '${_den_tmp[4]} 1339 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str}' AAAA' 1340 else 1341 _den_pair[${#_den_pair[@]}]=${_den_tmp[4]}' unknown.domain' 1342 _den_na[${#_den_na[@]}]='unknown.domain '${_den_tmp[4]} 1343 _den_ref[${#_den_ref[@]}]=${_den_who}' unknown.domain' 1344 fi 1345 # No processing for IPv6 addresses 1346 _den_pc[${#_den_pc[@]}]=${_den_who}' '${_den_tmp[4]} 1347 ;; 1348 1349 CNAME) # Alias name record 1350 # Nickname 1351 if _den_str=$(name_fixup ${_den_tmp[0]}) 1352 then 1353 _den_name[${#_den_name[@]}]=${_den_str} 1354 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str}' CNAME' 1355 _den_pc[${#_den_pc[@]}]=${_den_who}' '${_den_str} 1356 fi 1357 # Hostname 1358 if _den_str=$(name_fixup ${_den_tmp[4]}) 1359 then 1360 _den_name[${#_den_name[@]}]=${_den_str} 1361 _den_ref[${#_den_ref[@]}]=${_den_who}' '${_den_str}' CHOST' 1362 _den_pc[${#_den_pc[@]}]=${_den_who}' '${_den_str} 1363 fi 1364 ;; 1365 # TXT) 1366 # ;; 1367 esac 1368 fi 1369 done 1370 else # Lookup error == 'A' record 'unknown address' 1371 _den_pair[${#_den_pair[@]}]='0.0.0.0 '${_den_who} 1372 fi 1373 done 1374 1375 # Control dot array growth. 1376 unique_lines _den_achn _den_achn # Works best, all the same. 1377 edit_exact auth_chain _den_achn # Works best, unique items. 1378 if [ ${#_den_achn[@]} -gt 0 ] 1379 then 1380 IFS=${NO_WSP} 1381 auth_chain=( ${auth_chain[@]} ${_den_achn[@]} ) 1382 IFS=${WSP_IFS} 1383 fi 1384 1385 unique_lines _den_ref _den_ref # Works best, all the same. 1386 edit_exact ref_chain _den_ref # Works best, unique items. 1387 if [ ${#_den_ref[@]} -gt 0 ] 1388 then 1389 IFS=${NO_WSP} 1390 ref_chain=( ${ref_chain[@]} ${_den_ref[@]} ) 1391 IFS=${WSP_IFS} 1392 fi 1393 1394 unique_lines _den_na _den_na 1395 edit_exact name_address _den_na 1396 if [ ${#_den_na[@]} -gt 0 ] 1397 then 1398 IFS=${NO_WSP} 1399 name_address=( ${name_address[@]} ${_den_na[@]} ) 1400 IFS=${WSP_IFS} 1401 fi 1402 1403 unique_lines _den_ns _den_ns 1404 edit_exact name_srvc _den_ns 1405 if [ ${#_den_ns[@]} -gt 0 ] 1406 then 1407 IFS=${NO_WSP} 1408 name_srvc=( ${name_srvc[@]} ${_den_ns[@]} ) 1409 IFS=${WSP_IFS} 1410 fi 1411 1412 unique_lines _den_nr _den_nr 1413 edit_exact name_resource _den_nr 1414 if [ ${#_den_nr[@]} -gt 0 ] 1415 then 1416 IFS=${NO_WSP} 1417 name_resource=( ${name_resource[@]} ${_den_nr[@]} ) 1418 IFS=${WSP_IFS} 1419 fi 1420 1421 unique_lines _den_pc _den_pc 1422 edit_exact parent_child _den_pc 1423 if [ ${#_den_pc[@]} -gt 0 ] 1424 then 1425 IFS=${NO_WSP} 1426 parent_child=( ${parent_child[@]} ${_den_pc[@]} ) 1427 IFS=${WSP_IFS} 1428 fi 1429 1430 # Update list known_pair (Address and Name). 1431 unique_lines _den_pair _den_pair 1432 edit_exact known_pair _den_pair 1433 if [ ${#_den_pair[@]} -gt 0 ] # Anything new? 1434 then 1435 IFS=${NO_WSP} 1436 known_pair=( ${known_pair[@]} ${_den_pair[@]} ) 1437 IFS=${WSP_IFS} 1438 fi 1439 1440 # Update list of reverse pairs. 1441 unique_lines _den_rev _den_rev 1442 edit_exact reverse_pair _den_rev 1443 if [ ${#_den_rev[@]} -gt 0 ] # Anything new? 1444 then 1445 IFS=${NO_WSP} 1446 reverse_pair=( ${reverse_pair[@]} ${_den_rev[@]} ) 1447 IFS=${WSP_IFS} 1448 fi 1449 1450 # Check indirection limit -- give up if reached. 1451 if ! _den_lmt=$(limit_chk ${1}) 1452 then 1453 return 0 1454 fi 1455 1456 # Execution engine is LIFO. Order of pend operations is important. 1457 # Did we define any new addresses? 1458 unique_lines _den_address _den_address # Scrub duplicates. 1459 edit_exact known_address _den_address # Scrub already processed. 1460 edit_exact un_address _den_address # Scrub already waiting. 1461 if [ ${#_den_address[@]} -gt 0 ] # Anything new? 1462 then 1463 uc_address=( ${uc_address[@]} ${_den_address[@]} ) 1464 pend_func expand_input_address ${_den_lmt} 1465 _trace_log[${#_trace_log[@]}]='# Add '${#_den_address[@]}' unchkd addr. #' 1466 fi 1467 1468 # Did we find any new names? 1469 unique_lines _den_name _den_name # Scrub duplicates. 1470 edit_exact known_name _den_name # Scrub already processed. 1471 edit_exact uc_name _den_name # Scrub already waiting. 1472 if [ ${#_den_name[@]} -gt 0 ] # Anything new? 1473 then 1474 uc_name=( ${uc_name[@]} ${_den_name[@]} ) 1475 pend_func expand_input_name ${_den_lmt} 1476 _trace_log[${#_trace_log[@]}]='#Added '${#_den_name[@]}' unchkd name#' 1477 fi 1478 return 0 1479 } 1480 1481 # The parse-it-yourself delegation reply 1482 # Input is the chk_address list. 1483 # detail_each_address <indirection_limit> 1484 detail_each_address() { 1485 [ ${#chk_address[@]} -gt 0 ] || return 0 1486 unique_lines chk_address chk_address 1487 edit_exact known_address chk_address 1488 if [ ${#chk_address[@]} -gt 0 ] 1489 then 1490 known_address=( ${known_address[@]} ${chk_address[@]} ) 1491 unset chk_address[@] 1492 fi 1493 return 0 1494 } 1495 1496 # # # Application specific output functions # # # 1497 1498 # Pretty print the known pairs. 1499 report_pairs() { 1500 echo 1501 echo 'Known network pairs.' 1502 col_print known_pair 2 5 30 1503 1504 if [ ${#auth_chain[@]} -gt 0 ] 1505 then 1506 echo 1507 echo 'Known chain of authority.' 1508 col_print auth_chain 2 5 30 55 1509 fi 1510 1511 if [ ${#reverse_pair[@]} -gt 0 ] 1512 then 1513 echo 1514 echo 'Known reverse pairs.' 1515 col_print reverse_pair 2 5 55 1516 fi 1517 return 0 1518 } 1519 1520 # Check an address against the list of blacklist servers. 1521 # A good place to capture for GraphViz: address->status(server(reports)) 1522 # check_lists <ip_address> 1523 check_lists() { 1524 [ $# -eq 1 ] || return 1 1525 local -a _cl_fwd_addr 1526 local -a _cl_rev_addr 1527 local -a _cl_reply 1528 local -i _cl_rc 1529 local -i _ls_cnt 1530 local _cl_dns_addr 1531 local _cl_lkup 1532 1533 split_ip ${1} _cl_fwd_addr _cl_rev_addr 1534 _cl_dns_addr=$(dot_array _cl_rev_addr)'.' 1535 _ls_cnt=${#list_server[@]} 1536 echo ' Checking address '${1} 1537 for (( _cl = 0 ; _cl < _ls_cnt ; _cl++ )) 1538 do 1539 _cl_lkup=${_cl_dns_addr}${list_server[${_cl}]} 1540 if short_text ${_cl_lkup} _cl_reply 1541 then 1542 if [ ${#_cl_reply[@]} -gt 0 ] 1543 then 1544 echo ' Records from '${list_server[${_cl}]} 1545 address_hits[${#address_hits[@]}]=${1}' '${list_server[${_cl}]} 1546 _hs_RC=2 1547 for (( _clr = 0 ; _clr < ${#_cl_reply[@]} ; _clr++ )) 1548 do 1549 echo ' '${_cl_reply[${_clr}]} 1550 done 1551 fi 1552 fi 1553 done 1554 return 0 1555 } 1556 1557 # # # The usual application glue # # # 1558 1559 # Who did it? 1560 credits() { 1561 echo 1562 echo 'Advanced Bash Scripting Guide: is_spammer.bash, v2, 2004-msz' 1563 } 1564 1565 # How to use it? 1566 # (See also, "Quickstart" at end of script.) 1567 usage() { 1568 cat <<-'_usage_statement_' 1569 The script is_spammer.bash requires either one or two arguments. 1570 1571 arg 1) May be one of: 1572 a) A domain name 1573 b) An IPv4 address 1574 c) The name of a file with any mix of names 1575 and addresses, one per line. 1576 1577 arg 2) May be one of: 1578 a) A Blacklist server domain name 1579 b) The name of a file with Blacklist server 1580 domain names, one per line. 1581 c) If not present, a default list of (free) 1582 Blacklist servers is used. 1583 d) If a filename of an empty, readable, file 1584 is given, 1585 Blacklist server lookup is disabled. 1586 1587 All script output is written to stdout. 1588 1589 Return codes: 0 -> All OK, 1 -> Script failure, 1590 2 -> Something is Blacklisted. 1591 1592 Requires the external program 'dig' from the 'bind-9' 1593 set of DNS programs. See: http://www.isc.org 1594 1595 The domain name lookup depth limit defaults to 2 levels. 1596 Set the environment variable SPAMMER_LIMIT to change. 1597 SPAMMER_LIMIT=0 means 'unlimited' 1598 1599 Limit may also be set on the command line. 1600 If arg#1 is an integer, the limit is set to that value 1601 and then the above argument rules are applied. 1602 1603 Setting the environment variable 'SPAMMER_DATA' to a filename 1604 will cause the script to write a GraphViz graphic file. 1605 1606 For the development version; 1607 Setting the environment variable 'SPAMMER_TRACE' to a filename 1608 will cause the execution engine to log a function call trace. 1609 1610 _usage_statement_ 1611 } 1612 1613 # The default list of Blacklist servers: 1614 # Many choices, see: http://www.spews.org/lists.html 1615 1616 declare -a default_servers 1617 # See: http://www.spamhaus.org (Conservative, well maintained) 1618 default_servers[0]='sbl-xbl.spamhaus.org' 1619 # See: http://ordb.org (Open mail relays) 1620 default_servers[1]='relays.ordb.org' 1621 # See: http://www.spamcop.net/ (You can report spammers here) 1622 default_servers[2]='bl.spamcop.net' 1623 # See: http://www.spews.org (An 'early detect' system) 1624 default_servers[3]='l2.spews.dnsbl.sorbs.net' 1625 # See: http://www.dnsbl.us.sorbs.net/using.shtml 1626 default_servers[4]='dnsbl.sorbs.net' 1627 # See: http://dsbl.org/usage (Various mail relay lists) 1628 default_servers[5]='list.dsbl.org' 1629 default_servers[6]='multihop.dsbl.org' 1630 default_servers[7]='unconfirmed.dsbl.org' 1631 1632 # User input argument #1 1633 setup_input() { 1634 if [ -e ${1} ] && [ -r ${1} ] # Name of readable file 1635 then 1636 file_to_array ${1} uc_name 1637 echo 'Using filename >'${1}'< as input.' 1638 else 1639 if is_address ${1} # IP address? 1640 then 1641 uc_address=( ${1} ) 1642 echo 'Starting with address >'${1}'<' 1643 else # Must be a name. 1644 uc_name=( ${1} ) 1645 echo 'Starting with domain name >'${1}'<' 1646 fi 1647 fi 1648 return 0 1649 } 1650 1651 # User input argument #2 1652 setup_servers() { 1653 if [ -e ${1} ] && [ -r ${1} ] # Name of a readable file 1654 then 1655 file_to_array ${1} list_server 1656 echo 'Using filename >'${1}'< as blacklist server list.' 1657 else 1658 list_server=( ${1} ) 1659 echo 'Using blacklist server >'${1}'<' 1660 fi 1661 return 0 1662 } 1663 1664 # User environment variable SPAMMER_TRACE 1665 live_log_die() { 1666 if [ ${SPAMMER_TRACE:=} ] # Wants trace log? 1667 then 1668 if [ ! -e ${SPAMMER_TRACE} ] 1669 then 1670 if ! touch ${SPAMMER_TRACE} 2>/dev/null 1671 then 1672 pend_func echo $(printf '%q\n' \ 1673 'Unable to create log file >'${SPAMMER_TRACE}'<') 1674 pend_release 1675 exit 1 1676 fi 1677 _log_file=${SPAMMER_TRACE} 1678 _pend_hook_=trace_logger 1679 _log_dump=dump_log 1680 else 1681 if [ ! -w ${SPAMMER_TRACE} ] 1682 then 1683 pend_func echo $(printf '%q\n' \ 1684 'Unable to write log file >'${SPAMMER_TRACE}'<') 1685 pend_release 1686 exit 1 1687 fi 1688 _log_file=${SPAMMER_TRACE} 1689 echo '' > ${_log_file} 1690 _pend_hook_=trace_logger 1691 _log_dump=dump_log 1692 fi 1693 fi 1694 return 0 1695 } 1696 1697 # User environment variable SPAMMER_DATA 1698 data_capture() { 1699 if [ ${SPAMMER_DATA:=} ] # Wants a data dump? 1700 then 1701 if [ ! -e ${SPAMMER_DATA} ] 1702 then 1703 if ! touch ${SPAMMER_DATA} 2>/dev/null 1704 then 1705 pend_func echo $(printf '%q]n' \ 1706 'Unable to create data output file >'${SPAMMER_DATA}'<') 1707 pend_release 1708 exit 1 1709 fi 1710 _dot_file=${SPAMMER_DATA} 1711 _dot_dump=dump_dot 1712 else 1713 if [ ! -w ${SPAMMER_DATA} ] 1714 then 1715 pend_func echo $(printf '%q\n' \ 1716 'Unable to write data output file >'${SPAMMER_DATA}'<') 1717 pend_release 1718 exit 1 1719 fi 1720 _dot_file=${SPAMMER_DATA} 1721 _dot_dump=dump_dot 1722 fi 1723 fi 1724 return 0 1725 } 1726 1727 # Grope user specified arguments. 1728 do_user_args() { 1729 if [ $# -gt 0 ] && is_number $1 1730 then 1731 indirect=$1 1732 shift 1733 fi 1734 1735 case $# in # Did user treat us well? 1736 1) 1737 if ! setup_input $1 # Needs error checking. 1738 then 1739 pend_release 1740 $_log_dump 1741 exit 1 1742 fi 1743 list_server=( ${default_servers[@]} ) 1744 _list_cnt=${#list_server[@]} 1745 echo 'Using default blacklist server list.' 1746 echo 'Search depth limit: '${indirect} 1747 ;; 1748 2) 1749 if ! setup_input $1 # Needs error checking. 1750 then 1751 pend_release 1752 $_log_dump 1753 exit 1 1754 fi 1755 if ! setup_servers $2 # Needs error checking. 1756 then 1757 pend_release 1758 $_log_dump 1759 exit 1 1760 fi 1761 echo 'Search depth limit: '${indirect} 1762 ;; 1763 *) 1764 pend_func usage 1765 pend_release 1766 $_log_dump 1767 exit 1 1768 ;; 1769 esac 1770 return 0 1771 } 1772 1773 # A general purpose debug tool. 1774 # list_array <array_name> 1775 list_array() { 1776 [ $# -eq 1 ] || return 1 # One argument required. 1777 1778 local -a _la_lines 1779 set -f 1780 local IFS=${NO_WSP} 1781 eval _la_lines=\(\ \$\{$1\[@\]\}\ \) 1782 echo 1783 echo "Element count "${#_la_lines[@]}" array "${1} 1784 local _ln_cnt=${#_la_lines[@]} 1785 1786 for (( _i = 0; _i < ${_ln_cnt}; _i++ )) 1787 do 1788 echo 'Element '$_i' >'${_la_lines[$_i]}'<' 1789 done 1790 set +f 1791 return 0 1792 } 1793 1794 # # # 'Hunt the Spammer' program code # # # 1795 pend_init # Ready stack engine. 1796 pend_func credits # Last thing to print. 1797 1798 # # # Deal with user # # # 1799 live_log_die # Setup debug trace log. 1800 data_capture # Setup data capture file. 1801 echo 1802 do_user_args $@ 1803 1804 # # # Haven't exited yet - There is some hope # # # 1805 # Discovery group - Execution engine is LIFO - pend 1806 # in reverse order of execution. 1807 _hs_RC=0 # Hunt the Spammer return code 1808 pend_mark 1809 pend_func report_pairs # Report name-address pairs. 1810 1811 # The two detail_* are mutually recursive functions. 1812 # They also pend expand_* functions as required. 1813 # These two (the last of ???) exit the recursion. 1814 pend_func detail_each_address # Get all resources of addresses. 1815 pend_func detail_each_name # Get all resources of names. 1816 1817 # The two expand_* are mutually recursive functions, 1818 #+ which pend additional detail_* functions as required. 1819 pend_func expand_input_address 1 # Expand input names by address. 1820 pend_func expand_input_name 1 # #xpand input addresses by name. 1821 1822 # Start with a unique set of names and addresses. 1823 pend_func unique_lines uc_address uc_address 1824 pend_func unique_lines uc_name uc_name 1825 1826 # Separate mixed input of names and addresses. 1827 pend_func split_input 1828 pend_release 1829 1830 # # # Pairs reported -- Unique list of IP addresses found 1831 echo 1832 _ip_cnt=${#known_address[@]} 1833 if [ ${#list_server[@]} -eq 0 ] 1834 then 1835 echo 'Blacklist server list empty, none checked.' 1836 else 1837 if [ ${_ip_cnt} -eq 0 ] 1838 then 1839 echo 'Known address list empty, none checked.' 1840 else 1841 _ip_cnt=${_ip_cnt}-1 # Start at top. 1842 echo 'Checking Blacklist servers.' 1843 for (( _ip = _ip_cnt ; _ip >= 0 ; _ip-- )) 1844 do 1845 pend_func check_lists $( printf '%q\n' ${known_address[$_ip]} ) 1846 done 1847 fi 1848 fi 1849 pend_release 1850 $_dot_dump # Graphics file dump 1851 $_log_dump # Execution trace 1852 echo 1853 1854 1855 ############################## 1856 # Example output from script # 1857 ############################## 1858 :<<-'_is_spammer_outputs_' 1859 1860 ./is_spammer.bash 0 web4.alojamentos7.com 1861 1862 Starting with domain name >web4.alojamentos7.com< 1863 Using default blacklist server list. 1864 Search depth limit: 0 1865 .:....::::...:::...:::.......::..::...:::.......:: 1866 Known network pairs. 1867 66.98.208.97 web4.alojamentos7.com. 1868 66.98.208.97 ns1.alojamentos7.com. 1869 69.56.202.147 ns2.alojamentos.ws. 1870 66.98.208.97 alojamentos7.com. 1871 66.98.208.97 web.alojamentos7.com. 1872 69.56.202.146 ns1.alojamentos.ws. 1873 69.56.202.146 alojamentos.ws. 1874 66.235.180.113 ns1.alojamentos.org. 1875 66.235.181.192 ns2.alojamentos.org. 1876 66.235.180.113 alojamentos.org. 1877 66.235.180.113 web6.alojamentos.org. 1878 216.234.234.30 ns1.theplanet.com. 1879 12.96.160.115 ns2.theplanet.com. 1880 216.185.111.52 mail1.theplanet.com. 1881 69.56.141.4 spooling.theplanet.com. 1882 216.185.111.40 theplanet.com. 1883 216.185.111.40 www.theplanet.com. 1884 216.185.111.52 mail.theplanet.com. 1885 1886 Checking Blacklist servers. 1887 Checking address 66.98.208.97 1888 Records from dnsbl.sorbs.net 1889 "Spam Received See: http://www.dnsbl.sorbs.net/lookup.shtml?66.98.208.97" 1890 Checking address 69.56.202.147 1891 Checking address 69.56.202.146 1892 Checking address 66.235.180.113 1893 Checking address 66.235.181.192 1894 Checking address 216.185.111.40 1895 Checking address 216.234.234.30 1896 Checking address 12.96.160.115 1897 Checking address 216.185.111.52 1898 Checking address 69.56.141.4 1899 1900 Advanced Bash Scripting Guide: is_spammer.bash, v2, 2004-msz 1901 1902 _is_spammer_outputs_ 1903 1904 exit ${_hs_RC} 1905 1906 #################################################### 1907 # The script ignores everything from here on down # 1908 #+ because of the 'exit' command, just above. # 1909 #################################################### 1910 1911 1912 1913 Quickstart 1914 ========== 1915 1916 Prerequisites 1917 1918 Bash version 2.05b or 3.00 (bash --version) 1919 A version of Bash which supports arrays. Array 1920 support is included by default Bash configurations. 1921 1922 'dig,' version 9.x.x (dig $HOSTNAME, see first line of output) 1923 A version of dig which supports the +short options. 1924 See: dig_wrappers.bash for details. 1925 1926 1927 Optional Prerequisites 1928 1929 'named,' a local DNS caching program. Any flavor will do. 1930 Do twice: dig $HOSTNAME 1931 Check near bottom of output for: SERVER: 127.0.0.1#53 1932 That means you have one running. 1933 1934 1935 Optional Graphics Support 1936 1937 'date,' a standard *nix thing. (date -R) 1938 1939 dot Program to convert graphic description file to a 1940 diagram. (dot -V) 1941 A part of the Graph-Viz set of programs. 1942 See: [http://www.research.att.com/sw/tools/graphviz||GraphViz] 1943 1944 'dotty,' a visual editor for graphic description files. 1945 Also a part of the Graph-Viz set of programs. 1946 1947 1948 1949 1950 Quick Start 1951 1952 In the same directory as the is_spammer.bash script; 1953 Do: ./is_spammer.bash 1954 1955 Usage Details 1956 1957 1. Blacklist server choices. 1958 1959 (a) To use default, built-in list: Do nothing. 1960 1961 (b) To use your own list: 1962 1963 i. Create a file with a single Blacklist server 1964 domain name per line. 1965 1966 ii. Provide that filename as the last argument to 1967 the script. 1968 1969 (c) To use a single Blacklist server: Last argument 1970 to the script. 1971 1972 (d) To disable Blacklist lookups: 1973 1974 i. Create an empty file (touch spammer.nul) 1975 Your choice of filename. 1976 1977 ii. Provide the filename of that empty file as the 1978 last argument to the script. 1979 1980 2. Search depth limit. 1981 1982 (a) To use the default value of 2: Do nothing. 1983 1984 (b) To set a different limit: 1985 A limit of 0 means: no limit. 1986 1987 i. export SPAMMER_LIMIT=1 1988 or whatever limit you want. 1989 1990 ii. OR provide the desired limit as the first 1991 argument to the script. 1992 1993 3. Optional execution trace log. 1994 1995 (a) To use the default setting of no log output: Do nothing. 1996 1997 (b) To write an execution trace log: 1998 export SPAMMER_TRACE=spammer.log 1999 or whatever filename you want. 2000 2001 4. Optional graphic description file. 2002 2003 (a) To use the default setting of no graphic file: Do nothing. 2004 2005 (b) To write a Graph-Viz graphic description file: 2006 export SPAMMER_DATA=spammer.dot 2007 or whatever filename you want. 2008 2009 5. Where to start the search. 2010 2011 (a) Starting with a single domain name: 2012 2013 i. Without a command line search limit: First 2014 argument to script. 2015 2016 ii. With a command line search limit: Second 2017 argument to script. 2018 2019 (b) Starting with a single IP address: 2020 2021 i. Without a command line search limit: First 2022 argument to script. 2023 2024 ii. With a command line search limit: Second 2025 argument to script. 2026 2027 (c) Starting with (mixed) multiple name(s) and/or address(es): 2028 Create a file with one name or address per line. 2029 Your choice of filename. 2030 2031 i. Without a command line search limit: Filename as 2032 first argument to script. 2033 2034 ii. With a command line search limit: Filename as 2035 second argument to script. 2036 2037 6. What to do with the display output. 2038 2039 (a) To view display output on screen: Do nothing. 2040 2041 (b) To save display output to a file: Redirect stdout to a filename. 2042 2043 (c) To discard display output: Redirect stdout to /dev/null. 2044 2045 7. Temporary end of decision making. 2046 press RETURN 2047 wait (optionally, watch the dots and colons). 2048 2049 8. Optionally check the return code. 2050 2051 (a) Return code 0: All OK 2052 2053 (b) Return code 1: Script setup failure 2054 2055 (c) Return code 2: Something was blacklisted. 2056 2057 9. Where is my graph (diagram)? 2058 2059 The script does not directly produce a graph (diagram). 2060 It only produces a graphic description file. You can 2061 process the graphic descriptor file that was output 2062 with the 'dot' program. 2063 2064 Until you edit that descriptor file, to describe the 2065 relationships you want shown, all that you will get is 2066 a bunch of labeled name and address nodes. 2067 2068 All of the script's discovered relationships are within 2069 a comment block in the graphic descriptor file, each 2070 with a descriptive heading. 2071 2072 The editing required to draw a line between a pair of 2073 nodes from the information in the descriptor file may 2074 be done with a text editor. 2075 2076 Given these lines somewhere in the descriptor file: 2077 2078 # Known domain name nodes 2079 2080 N0000 [label="guardproof.info."] ; 2081 2082 N0002 [label="third.guardproof.info."] ; 2083 2084 2085 2086 # Known address nodes 2087 2088 A0000 [label="61.141.32.197"] ; 2089 2090 2091 2092 /* 2093 2094 # Known name->address edges 2095 2096 NA0000 third.guardproof.info. 61.141.32.197 2097 2098 2099 2100 # Known parent->child edges 2101 2102 PC0000 guardproof.info. third.guardproof.info. 2103 2104 */ 2105 2106 Turn that into the following lines by substituting node 2107 identifiers into the relationships: 2108 2109 # Known domain name nodes 2110 2111 N0000 [label="guardproof.info."] ; 2112 2113 N0002 [label="third.guardproof.info."] ; 2114 2115 2116 2117 # Known address nodes 2118 2119 A0000 [label="61.141.32.197"] ; 2120 2121 2122 2123 # PC0000 guardproof.info. third.guardproof.info. 2124 2125 N0000->N0002 ; 2126 2127 2128 2129 # NA0000 third.guardproof.info. 61.141.32.197 2130 2131 N0002->A0000 ; 2132 2133 2134 2135 /* 2136 2137 # Known name->address edges 2138 2139 NA0000 third.guardproof.info. 61.141.32.197 2140 2141 2142 2143 # Known parent->child edges 2144 2145 PC0000 guardproof.info. third.guardproof.info. 2146 2147 */ 2148 2149 Process that with the 'dot' program, and you have your 2150 first network diagram. 2151 2152 In addition to the conventional graphic edges, the 2153 descriptor file includes similar format pair-data that 2154 describes services, zone records (sub-graphs?), 2155 blacklisted addresses, and other things which might be 2156 interesting to include in your graph. This additional 2157 information could be displayed as different node 2158 shapes, colors, line sizes, etc. 2159 2160 The descriptor file can also be read and edited by a 2161 Bash script (of course). You should be able to find 2162 most of the functions required within the 2163 "is_spammer.bash" script. 2164 2165 # End Quickstart. 2166 2167 2168 2169 Additional Note 2170 ========== ==== 2171 2172 Michael Zick points out that there is a "makeviz.bash" interactive 2173 Web site at rediris.es. Can't give the full URL, since this is not 2174 a publically accessible site. |
Another anti-spam script.
Example A-31. Spammer Hunt
1 #!/bin/bash 2 # whx.sh: "whois" spammer lookup 3 # Author: Walter Dnes 4 # Slight revisions (first section) by ABS Guide author. 5 # Used in ABS Guide with permission. 6 7 # Needs version 3.x or greater of Bash to run (because of =~ operator). 8 # Commented by script author and ABS Guide author. 9 10 11 12 E_BADARGS=65 # Missing command-line arg. 13 E_NOHOST=66 # Host not found. 14 E_TIMEOUT=67 # Host lookup timed out. 15 E_UNDEF=68 # Some other (undefined) error. 16 HOSTWAIT=10 # Specify up to 10 seconds for host query reply. 17 # The actual wait may be a bit longer. 18 OUTFILE=whois.txt # Output file. 19 PORT=4321 20 21 22 if [ -z "$1" ] # Check for (required) command-line arg. 23 then 24 echo "Usage: $0 domain name or IP address" 25 exit $E_BADARGS 26 fi 27 28 29 if [[ "$1" =~ "[a-zA-Z][a-zA-Z]$" ]] # Ends in two alpha chars? 30 then # It's a domain name && must do host lookup. 31 IPADDR=$(host -W $HOSTWAIT $1 | awk '{print $4}') 32 # Doing host lookup to get IP address. 33 # Extract final field. 34 else 35 IPADDR="$1" # Command-line arg was IP address. 36 fi 37 38 echo; echo "IP Address is: "$IPADDR""; echo 39 40 if [ -e "$OUTFILE" ] 41 then 42 rm -f "$OUTFILE" 43 echo "Stale output file \"$OUTFILE\" removed."; echo 44 fi 45 46 47 # Sanity checks. 48 # (This section needs more work.) 49 # =============================== 50 if [ -z "$IPADDR" ] 51 # No response. 52 then 53 echo "Host not found!" 54 exit $E_NOHOST # Bail out. 55 fi 56 57 if [[ "$IPADDR" =~ "^[;;]" ]] 58 # ;; connection timed out; no servers could be reached 59 then 60 echo "Host lookup timed out!" 61 exit $E_TIMEOUT # Bail out. 62 fi 63 64 if [[ "$IPADDR" =~ "[(NXDOMAIN)]$" ]] 65 # Host xxxxxxxxx.xxx not found: 3(NXDOMAIN) 66 then 67 echo "Host not found!" 68 exit $E_NOHOST # Bail out. 69 fi 70 71 if [[ "$IPADDR" =~ "[(SERVFAIL)]$" ]] 72 # Host xxxxxxxxx.xxx not found: 2(SERVFAIL) 73 then 74 echo "Host not found!" 75 exit $E_NOHOST # Bail out. 76 fi 77 78 79 80 81 # ======================== Main body of script ======================== 82 83 AFRINICquery() { 84 # Define the function that queries AFRINIC. Echo a notification to the 85 #+ screen, and then run the actual query, redirecting output to $OUTFILE. 86 87 echo "Searching for $IPADDR in whois.afrinic.net" 88 whois -h whois.afrinic.net "$IPADDR" > $OUTFILE 89 90 # Check for presence of reference to an rwhois. 91 # Warn about non-functional rwhois.infosat.net server 92 #+ and attempt rwhois query. 93 if grep -e "^remarks: .*rwhois\.[^ ]\+" "$OUTFILE" 94 then 95 echo " " >> $OUTFILE 96 echo "***" >> $OUTFILE 97 echo "***" >> $OUTFILE 98 echo "Warning: rwhois.infosat.net was not working as of 2005/02/02" >> $OUTFILE 99 echo " when this script was written." >> $OUTFILE 100 echo "***" >> $OUTFILE 101 echo "***" >> $OUTFILE 102 echo " " >> $OUTFILE 103 RWHOIS=`grep "^remarks: .*rwhois\.[^ ]\+" "$OUTFILE" | tail -n 1 |\ 104 sed "s/\(^.*\)\(rwhois\..*\)\(:4.*\)/\2/"` 105 whois -h ${RWHOIS}:${PORT} "$IPADDR" >> $OUTFILE 106 fi 107 } 108 109 APNICquery() { 110 echo "Searching for $IPADDR in whois.apnic.net" 111 whois -h whois.apnic.net "$IPADDR" > $OUTFILE 112 113 # Just about every country has its own internet registrar. 114 # I don't normally bother consulting them, because the regional registry 115 #+ usually supplies sufficient information. 116 # There are a few exceptions, where the regional registry simply 117 #+ refers to the national registry for direct data. 118 # These are Japan and South Korea in APNIC, and Brasil in LACNIC. 119 # The following if statement checks $OUTFILE (whois.txt) for the presence 120 #+ of "KR" (South Korea) or "JP" (Japan) in the country field. 121 # If either is found, the query is re-run against the appropriate 122 #+ national registry. 123 124 if grep -E "^country:[ ]+KR$" "$OUTFILE" 125 then 126 echo "Searching for $IPADDR in whois.krnic.net" 127 whois -h whois.krnic.net "$IPADDR" >> $OUTFILE 128 elif grep -E "^country:[ ]+JP$" "$OUTFILE" 129 then 130 echo "Searching for $IPADDR in whois.nic.ad.jp" 131 whois -h whois.nic.ad.jp "$IPADDR"/e >> $OUTFILE 132 fi 133 } 134 135 ARINquery() { 136 echo "Searching for $IPADDR in whois.arin.net" 137 whois -h whois.arin.net "$IPADDR" > $OUTFILE 138 139 # Several large internet providers listed by ARIN have their own 140 #+ internal whois service, referred to as "rwhois". 141 # A large block of IP addresses is listed with the provider 142 #+ under the ARIN registry. 143 # To get the IP addresses of 2nd-level ISPs or other large customers, 144 #+ one has to refer to the rwhois server on port 4321. 145 # I originally started with a bunch of "if" statements checking for 146 #+ the larger providers. 147 # This approach is unwieldy, and there's always another rwhois server 148 #+ that I didn't know about. 149 # A more elegant approach is to check $OUTFILE for a reference 150 #+ to a whois server, parse that server name out of the comment section, 151 #+ and re-run the query against the appropriate rwhois server. 152 # The parsing looks a bit ugly, with a long continued line inside 153 #+ backticks. 154 # But it only has to be done once, and will work as new servers are added. 155 #@ ABS Guide author comment: it isn't all that ugly, and is, in fact, 156 #@+ an instructive use of Regular Expressions. 157 158 if grep -E "^Comment: .*rwhois.[^ ]+" "$OUTFILE" 159 then 160 RWHOIS=`grep -e "^Comment:.*rwhois\.[^ ]\+" "$OUTFILE" | tail -n 1 |\ 161 sed "s/^\(.*\)\(rwhois\.[^ ]\+\)\(.*$\)/\2/"` 162 echo "Searching for $IPADDR in ${RWHOIS}" 163 whois -h ${RWHOIS}:${PORT} "$IPADDR" >> $OUTFILE 164 fi 165 } 166 167 LACNICquery() { 168 echo "Searching for $IPADDR in whois.lacnic.net" 169 whois -h whois.lacnic.net "$IPADDR" > $OUTFILE 170 171 # The following if statement checks $OUTFILE (whois.txt) for the presence of 172 #+ "BR" (Brasil) in the country field. 173 # If it is found, the query is re-run against whois.registro.br. 174 175 if grep -E "^country:[ ]+BR$" "$OUTFILE" 176 then 177 echo "Searching for $IPADDR in whois.registro.br" 178 whois -h whois.registro.br "$IPADDR" >> $OUTFILE 179 fi 180 } 181 182 RIPEquery() { 183 echo "Searching for $IPADDR in whois.ripe.net" 184 whois -h whois.ripe.net "$IPADDR" > $OUTFILE 185 } 186 187 # Initialize a few variables. 188 # * slash8 is the most significant octet 189 # * slash16 consists of the two most significant octets 190 # * octet2 is the second most significant octet 191 192 193 194 195 slash8=`echo $IPADDR | cut -d. -f 1` 196 if [ -z "$slash8" ] # Yet another sanity check. 197 then 198 echo "Undefined error!" 199 exit $E_UNDEF 200 fi 201 slash16=`echo $IPADDR | cut -d. -f 1-2` 202 # ^ Period specified as 'cut" delimiter. 203 if [ -z "$slash16" ] 204 then 205 echo "Undefined error!" 206 exit $E_UNDEF 207 fi 208 octet2=`echo $slash16 | cut -d. -f 2` 209 if [ -z "$octet2" ] 210 then 211 echo "Undefined error!" 212 exit $E_UNDEF 213 fi 214 215 216 # Check for various odds and ends of reserved space. 217 # There is no point in querying for those addresses. 218 219 if [ $slash8 == 0 ]; then 220 echo $IPADDR is '"This Network"' space\; Not querying 221 elif [ $slash8 == 10 ]; then 222 echo $IPADDR is RFC1918 space\; Not querying 223 elif [ $slash8 == 14 ]; then 224 echo $IPADDR is '"Public Data Network"' space\; Not querying 225 elif [ $slash8 == 127 ]; then 226 echo $IPADDR is loopback space\; Not querying 227 elif [ $slash16 == 169.254 ]; then 228 echo $IPADDR is link-local space\; Not querying 229 elif [ $slash8 == 172 ] && [ $octet2 -ge 16 ] && [ $octet2 -le 31 ];then 230 echo $IPADDR is RFC1918 space\; Not querying 231 elif [ $slash16 == 192.168 ]; then 232 echo $IPADDR is RFC1918 space\; Not querying 233 elif [ $slash8 -ge 224 ]; then 234 echo $IPADDR is either Multicast or reserved space\; Not querying 235 elif [ $slash8 -ge 200 ] && [ $slash8 -le 201 ]; then LACNICquery "$IPADDR" 236 elif [ $slash8 -ge 202 ] && [ $slash8 -le 203 ]; then APNICquery "$IPADDR" 237 elif [ $slash8 -ge 210 ] && [ $slash8 -le 211 ]; then APNICquery "$IPADDR" 238 elif [ $slash8 -ge 218 ] && [ $slash8 -le 223 ]; then APNICquery "$IPADDR" 239 240 # If we got this far without making a decision, query ARIN. 241 # If a reference is found in $OUTFILE to APNIC, AFRINIC, LACNIC, or RIPE, 242 #+ query the appropriate whois server. 243 244 else 245 ARINquery "$IPADDR" 246 if grep "whois.afrinic.net" "$OUTFILE"; then 247 AFRINICquery "$IPADDR" 248 elif grep -E "^OrgID:[ ]+RIPE$" "$OUTFILE"; then 249 RIPEquery "$IPADDR" 250 elif grep -E "^OrgID:[ ]+APNIC$" "$OUTFILE"; then 251 APNICquery "$IPADDR" 252 elif grep -E "^OrgID:[ ]+LACNIC$" "$OUTFILE"; then 253 LACNICquery "$IPADDR" 254 fi 255 fi 256 257 #@ --------------------------------------------------------------- 258 # Try also: 259 # wget http://logi.cc/nw/whois.php3?ACTION=doQuery&DOMAIN=$IPADDR 260 #@ --------------------------------------------------------------- 261 262 # We've now finished the querying. 263 # Echo a copy of the final result to the screen. 264 265 cat $OUTFILE 266 # Or "less $OUTFILE" . . . 267 268 269 exit 0 270 271 #@ ABS Guide author comments: 272 #@ Nothing fancy here, but still a very useful tool for hunting spammers. 273 #@ Sure, the script can be cleaned up some, and it's still a bit buggy, 274 #@+ (exercise for reader), but all the same, it's a nice piece of coding 275 #@+ by Walter Dnes. 276 #@ Thank you! |
"Little Monster's" front end to wget.
Example A-32. Making wget easier to use
1 #!/bin/bash 2 # wgetter2.bash 3 4 # Author: Little Monster [monster@monstruum.co.uk] 5 # ==> Used in ABS Guide with permission of script author. 6 # ==> This script still needs debugging and fixups (exercise for reader). 7 # ==> It could also use some additional editing in the comments. 8 9 10 # This is wgetter2 -- 11 #+ a Bash script to make wget a bit more friendly, and save typing. 12 13 # Carefully crafted by Little Monster. 14 # More or less complete on 02/02/2005. 15 # If you think this script can be improved, 16 #+ email me at: monster@monstruum.co.uk 17 # ==> and cc: to the author of the ABS Guide, please. 18 # This script is licenced under the GPL. 19 # You are free to copy, alter and re-use it, 20 #+ but please don't try to claim you wrote it. 21 # Log your changes here instead. 22 23 # ======================================================================= 24 # changelog: 25 26 # 07/02/2005. Fixups by Little Monster. 27 # 02/02/2005. Minor additions by Little Monster. 28 # (See after # +++++++++++ ) 29 # 29/01/2005. Minor stylistic edits and cleanups by author of ABS Guide. 30 # Added exit error codes. 31 # 22/11/2004. Finished initial version of second version of wgetter: 32 # wgetter2 is born. 33 # 01/12/2004. Changed 'runn' function so it can be run 2 ways -- 34 # either ask for a file name or have one input on the CL. 35 # 01/12/2004. Made sensible handling of no URL's given. 36 # 01/12/2004. Made loop of main options, so you don't 37 # have to keep calling wgetter 2 all the time. 38 # Runs as a session instead. 39 # 01/12/2004. Added looping to 'runn' function. 40 # Simplified and improved. 41 # 01/12/2004. Added state to recursion setting. 42 # Enables re-use of previous value. 43 # 05/12/2004. Modified the file detection routine in the 'runn' function 44 # so it's not fooled by empty values, and is cleaner. 45 # 01/02/2004. Added cookie finding routine from later version (which 46 # isn't ready yet), so as not to have hard-coded paths. 47 # ======================================================================= 48 49 # Error codes for abnormal exit. 50 E_USAGE=67 # Usage message, then quit. 51 E_NO_OPTS=68 # No command-line args entered. 52 E_NO_URLS=69 # No URLs passed to script. 53 E_NO_SAVEFILE=70 # No save filename passed to script. 54 E_USER_EXIT=71 # User decides to quit. 55 56 57 # Basic default wget command we want to use. 58 # This is the place to change it, if required. 59 # NB: if using a proxy, set http_proxy = yourproxy in .wgetrc. 60 # Otherwise delete --proxy=on, below. 61 # ==================================================================== 62 CommandA="wget -nc -c -t 5 --progress=bar --random-wait --proxy=on -r" 63 # ==================================================================== 64 65 66 67 # -------------------------------------------------------------------- 68 # Set some other variables and explain them. 69 70 pattern=" -A .jpg,.JPG,.jpeg,.JPEG,.gif,.GIF,.htm,.html,.shtml,.php" 71 # wget's option to only get certain types of file. 72 # comment out if not using 73 today=`date +%F` # Used for a filename. 74 home=$HOME # Set HOME to an internal variable. 75 # In case some other path is used, change it here. 76 depthDefault=3 # Set a sensible default recursion. 77 Depth=$depthDefault # Otherwise user feedback doesn't tie in properly. 78 RefA="" # Set blank referring page. 79 Flag="" # Default to not saving anything, 80 #+ or whatever else might be wanted in future. 81 lister="" # Used for passing a list of urls directly to wget. 82 Woptions="" # Used for passing wget some options for itself. 83 inFile="" # Used for the run function. 84 newFile="" # Used for the run function. 85 savePath="$home/w-save" 86 Config="$home/.wgetter2rc" 87 # This is where some variables can be stored, 88 #+ if permanently changed from within the script. 89 Cookie_List="$home/.cookielist" 90 # So we know where the cookies are kept . . . 91 cFlag="" # Part of the cookie file selection routine. 92 93 # Define the options available. Easy to change letters here if needed. 94 # These are the optional options; you don't just wait to be asked. 95 96 save=s # Save command instead of executing it. 97 cook=c # Change cookie file for this session. 98 help=h # Usage guide. 99 list=l # Pass wget the -i option and URL list. 100 runn=r # Run saved commands as an argument to the option. 101 inpu=i # Run saved commands interactively. 102 wopt=w # Allow to enter options to pass directly to wget. 103 # -------------------------------------------------------------------- 104 105 106 if [ -z "$1" ]; then # Make sure we get something for wget to eat. 107 echo "You must at least enter a URL or option!" 108 echo "-$help for usage." 109 exit $E_NO_OPTS 110 fi 111 112 113 114 # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 115 # added added added added added added added added added added added added 116 117 if [ ! -e "$Config" ]; then # See if configuration file exists. 118 echo "Creating configuration file, $Config" 119 echo "# This is the configuration file for wgetter2" > "$Config" 120 echo "# Your customised settings will be saved in this file" >> "$Config" 121 else 122 source $Config # Import variables we set outside the script. 123 fi 124 125 if [ ! -e "$Cookie_List" ]; then 126 # Set up a list of cookie files, if there isn't one. 127 echo "Hunting for cookies . . ." 128 find -name cookies.txt >> $Cookie_List # Create the list of cookie files. 129 fi # Isolate this in its own 'if' statement, 130 #+ in case we got interrupted while searching. 131 132 if [ -z "$cFlag" ]; then # If we haven't already done this . . . 133 echo # Make a nice space after the command prompt. 134 echo "Looks like you haven't set up your source of cookies yet." 135 n=0 # Make sure the counter 136 #+ doesn't contain random values. 137 while read; do 138 Cookies[$n]=$REPLY # Put the cookie files we found into an array. 139 echo "$n) ${Cookies[$n]}" # Create a menu. 140 n=$(( n + 1 )) # Increment the counter. 141 done < $Cookie_List # Feed the read statement. 142 echo "Enter the number of the cookie file you want to use." 143 echo "If you won't be using cookies, just press RETURN." 144 echo 145 echo "I won't be asking this again. Edit $Config" 146 echo "If you decide to change at a later date" 147 echo "or use the -${cook} option for per session changes." 148 read 149 if [ ! -z $REPLY ]; then # User didn't just press return. 150 Cookie=" --load-cookies ${Cookies[$REPLY]}" 151 # Set the variable here as well as in the config file. 152 153 echo "Cookie=\" --load-cookies ${Cookies[$REPLY]}\"" >> $Config 154 fi 155 echo "cFlag=1" >> $Config # So we know not to ask again. 156 fi 157 158 # end added section end added section end added section end added section 159 # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 160 161 162 163 # Another variable. 164 # This one may or may not be subject to variation. 165 # A bit like the small print. 166 CookiesON=$Cookie 167 # echo "cookie file is $CookiesON" # For debugging. 168 # echo "home is ${home}" # For debugging. 169 # Got caught with this one! 170 171 172 wopts() 173 { 174 echo "Enter options to pass to wget." 175 echo "It is assumed you know what you're doing." 176 echo 177 echo "You can pass their arguments here too." 178 # That is to say, everything passed here is passed to wget. 179 180 read Wopts 181 # Read in the options to be passed to wget. 182 183 Woptions=" $Wopts" 184 # ^ Why the leading space? 185 # Assign to another variable. 186 # Just for fun, or something . . . 187 188 echo "passing options ${Wopts} to wget" 189 # Mainly for debugging. 190 # Is cute. 191 192 return 193 } 194 195 196 save_func() 197 { 198 echo "Settings will be saved." 199 if [ ! -d $savePath ]; then # See if directory exists. 200 mkdir $savePath # Create the directory to save things in 201 #+ if it isn't already there. 202 fi 203 204 Flag=S 205 # Tell the final bit of code what to do. 206 # Set a flag since stuff is done in main. 207 208 return 209 } 210 211 212 usage() # Tell them how it works. 213 { 214 echo "Welcome to wgetter. This is a front end to wget." 215 echo "It will always run wget with these options:" 216 echo "$CommandA" 217 echo "and the pattern to match: $pattern \ 218 (which you can change at the top of this script)." 219 echo "It will also ask you for recursion depth, \ 220 and if you want to use a referring page." 221 echo "Wgetter accepts the following options:" 222 echo "" 223 echo "-$help : Display this help." 224 echo "-$save : Save the command to a file $savePath/wget-($today) \ 225 instead of running it." 226 echo "-$runn : Run saved wget commands instead of starting a new one -" 227 echo "Enter filename as argument to this option." 228 echo "-$inpu : Run saved wget commands interactively --" 229 echo "The script will ask you for the filename." 230 echo "-$cook : Change the cookies file for this session." 231 echo "-$list : Tell wget to use URL's from a list instead of \ 232 from the command line." 233 echo "-$wopt : Pass any other options direct to wget." 234 echo "" 235 echo "See the wget man page for additional options \ 236 you can pass to wget." 237 echo "" 238 239 exit $E_USAGE # End here. Don't process anything else. 240 } 241 242 243 244 list_func() # Gives the user the option to use the -i option to wget, 245 #+ and a list of URLs. 246 { 247 while [ 1 ]; do 248 echo "Enter the name of the file containing URL's (press q to change 249 your mind)." 250 read urlfile 251 if [ ! -e "$urlfile" ] && [ "$urlfile" != q ]; then 252 # Look for a file, or the quit option. 253 echo "That file does not exist!" 254 elif [ "$urlfile" = q ]; then # Check quit option. 255 echo "Not using a url list." 256 return 257 else 258 echo "using $urlfile." 259 echo "If you gave url's on the command line, I'll use those first." 260 # Report wget standard behaviour to the user. 261 lister=" -i $urlfile" # This is what we want to pass to wget. 262 return 263 fi 264 done 265 } 266 267 268 cookie_func() # Give the user the option to use a different cookie file. 269 { 270 while [ 1 ]; do 271 echo "Change the cookies file. Press return if you don't want to change 272 it." 273 read Cookies 274 # NB: this is not the same as Cookie, earlier. 275 # There is an 's' on the end. 276 # Bit like chocolate chips. 277 if [ -z "$Cookies" ]; then # Escape clause for wusses. 278 return 279 elif [ ! -e "$Cookies" ]; then 280 echo "File does not exist. Try again." # Keep em going . . . 281 else 282 CookiesON=" --load-cookies $Cookies" # File is good -- use it! 283 return 284 fi 285 done 286 } 287 288 289 290 run_func() 291 { 292 if [ -z "$OPTARG" ]; then 293 # Test to see if we used the in-line option or the query one. 294 if [ ! -d "$savePath" ]; then # If directory doesn't exist . . . 295 echo "$savePath does not appear to exist." 296 echo "Please supply path and filename of saved wget commands:" 297 read newFile 298 until [ -f "$newFile" ]; do # Keep going till we get something. 299 echo "Sorry, that file does not exist. Please try again." 300 # Try really hard to get something. 301 read newFile 302 done 303 304 305 # ----------------------------------------------------------------------- 306 # if [ -z ( grep wget ${newfile} ) ]; then 307 # Assume they haven't got the right file and bail out. 308 # echo "Sorry, that file does not contain wget commands. Aborting." 309 # exit 310 # fi 311 # 312 # This is bogus code. 313 # It doesn't actually work. 314 # If anyone wants to fix it, feel free! 315 # ----------------------------------------------------------------------- 316 317 318 filePath="${newFile}" 319 else 320 echo "Save path is $savePath" 321 echo "Please enter name of the file which you want to use." 322 echo "You have a choice of:" 323 ls $savePath # Give them a choice. 324 read inFile 325 until [ -f "$savePath/$inFile" ]; do # Keep going till 326 #+ we get something. 327 if [ ! -f "${savePath}/${inFile}" ]; then # If file doesn't exist. 328 echo "Sorry, that file does not exist. Please choose from:" 329 ls $savePath # If a mistake is made. 330 read inFile 331 fi 332 done 333 filePath="${savePath}/${inFile}" # Make one variable . . . 334 fi 335 else filePath="${savePath}/${OPTARG}" # Which can be many things . . . 336 fi 337 338 if [ ! -f "$filePath" ]; then # If a bogus file got through. 339 echo "You did not specify a suitable file." 340 echo "Run this script with the -${save} option first." 341 echo "Aborting." 342 exit $E_NO_SAVEFILE 343 fi 344 echo "Using: $filePath" 345 while read; do 346 eval $REPLY 347 echo "Completed: $REPLY" 348 done < $filePath # Feed the actual file we are using into a 'while' loop. 349 350 exit 351 } 352 353 354 355 # Fish out any options we are using for the script. 356 # This is based on the demo in "Learning The Bash Shell" (O'Reilly). 357 while getopts ":$save$cook$help$list$runn:$inpu$wopt" opt 358 do 359 case $opt in 360 $save) save_func;; # Save some wgetter sessions for later. 361 $cook) cookie_func;; # Change cookie file. 362 $help) usage;; # Get help. 363 $list) list_func;; # Allow wget to use a list of URLs. 364 $runn) run_func;; # Useful if you are calling wgetter from, 365 #+ for example, a cron script. 366 $inpu) run_func;; # When you don't know what your files are named. 367 $wopt) wopts;; # Pass options directly to wget. 368 \?) echo "Not a valid option." 369 echo "Use -${wopt} to pass options directly to wget," 370 echo "or -${help} for help";; # Catch anything else. 371 esac 372 done 373 shift $((OPTIND - 1)) # Do funky magic stuff with $#. 374 375 376 if [ -z "$1" ] && [ -z "$lister" ]; then 377 # We should be left with at least one URL 378 #+ on the command line, unless a list is 379 #+ being used -- catch empty CL's. 380 echo "No URL's given! You must enter them on the same line as wgetter2." 381 echo "E.g., wgetter2 http://somesite http://anothersite." 382 echo "Use $help option for more information." 383 exit $E_NO_URLS # Bail out, with appropriate error code. 384 fi 385 386 URLS=" $@" 387 # Use this so that URL list can be changed if we stay in the option loop. 388 389 while [ 1 ]; do 390 # This is where we ask for the most used options. 391 # (Mostly unchanged from version 1 of wgetter) 392 if [ -z $curDepth ]; then 393 Current="" 394 else Current=" Current value is $curDepth" 395 fi 396 echo "How deep should I go? \ 397 (integer: Default is $depthDefault.$Current)" 398 read Depth # Recursion -- how far should we go? 399 inputB="" # Reset this to blank on each pass of the loop. 400 echo "Enter the name of the referring page (default is none)." 401 read inputB # Need this for some sites. 402 403 echo "Do you want to have the output logged to the terminal" 404 echo "(y/n, default is yes)?" 405 read noHide # Otherwise wget will just log it to a file. 406 407 case $noHide in # Now you see me, now you don't. 408 y|Y ) hide="";; 409 n|N ) hide=" -b";; 410 * ) hide="";; 411 esac 412 413 if [ -z ${Depth} ]; then 414 # User accepted either default or current depth, 415 #+ in which case Depth is now empty. 416 if [ -z ${curDepth} ]; then 417 # See if a depth was set on a previous iteration. 418 Depth="$depthDefault" 419 # Set the default recursion depth if nothing 420 #+ else to use. 421 else Depth="$curDepth" # Otherwise, set the one we used before. 422 fi 423 fi 424 Recurse=" -l $Depth" # Set how deep we want to go. 425 curDepth=$Depth # Remember setting for next time. 426 427 if [ ! -z $inputB ]; then 428 RefA=" --referer=$inputB" # Option to use referring page. 429 fi 430 431 WGETTER="${CommandA}${pattern}${hide}${RefA}${Recurse}\ 432 ${CookiesON}${lister}${Woptions}${URLS}" 433 # Just string the whole lot together . . . 434 # NB: no embedded spaces. 435 # They are in the individual elements so that if any are empty, 436 #+ we don't get an extra space. 437 438 if [ -z "${CookiesON}" ] && [ "$cFlag" = "1" ] ; then 439 echo "Warning -- can't find cookie file" 440 # This should be changed, 441 #+ in case the user has opted to not use cookies. 442 fi 443 444 if [ "$Flag" = "S" ]; then 445 echo "$WGETTER" >> $savePath/wget-${today} 446 # Create a unique filename for today, or append to it if it exists. 447 echo "$inputB" >> $savePath/site-list-${today} 448 # Make a list, so it's easy to refer back to, 449 #+ since the whole command is a bit confusing to look at. 450 echo "Command saved to the file $savePath/wget-${today}" 451 # Tell the user. 452 echo "Referring page URL saved to the file$ \ 453 savePath/site-list-${today}" 454 # Tell the user. 455 Saver=" with save option" 456 # Stick this somewhere, so it appears in the loop if set. 457 else 458 echo "*****************" 459 echo "*****Getting*****" 460 echo "*****************" 461 echo "" 462 echo "$WGETTER" 463 echo "" 464 echo "*****************" 465 eval "$WGETTER" 466 fi 467 468 echo "" 469 echo "Starting over$Saver." 470 echo "If you want to stop, press q." 471 echo "Otherwise, enter some URL's:" 472 # Let them go again. Tell about save option being set. 473 474 read 475 case $REPLY in 476 # Need to change this to a 'trap' clause. 477 q|Q ) exit $E_USER_EXIT;; # Exercise for the reader? 478 * ) URLS=" $REPLY";; 479 esac 480 481 echo "" 482 done 483 484 485 exit 0 |
Example A-33. A podcasting script
1 #!/bin/bash 2 3 # bashpodder.sh: 4 # By Linc 10/1/2004 5 # Find the latest script at 6 #+ http://linc.homeunix.org:8080/scripts/bashpodder 7 # Last revision 12/14/2004 - Many Contributors! 8 # If you use this and have made improvements or have comments 9 #+ drop me an email at linc dot fessenden at gmail dot com 10 # I'd appreciate it! 11 12 # ==> ABS Guide extra comments. 13 14 # ==> Author of this script has kindly granted permission 15 # ==>+ for inclusion in ABS Guide. 16 17 18 # ==> ################################################################ 19 # 20 # ==> What is "podcasting"? 21 22 # ==> It's broadcasting "radio shows" over the Internet. 23 # ==> These shows can be played on iPods and other music file players. 24 25 # ==> This script makes it possible. 26 # ==> See documentation at the script author's site, above. 27 28 # ==> ################################################################ 29 30 31 # Make script crontab friendly: 32 cd $(dirname $0) 33 # ==> Change to directory where this script lives. 34 35 # datadir is the directory you want podcasts saved to: 36 datadir=$(date +%Y-%m-%d) 37 # ==> Will create a date-labeled directory, named: YYYY-MM-DD 38 39 # Check for and create datadir if necessary: 40 if test ! -d $datadir 41 then 42 mkdir $datadir 43 fi 44 45 # Delete any temp file: 46 rm -f temp.log 47 48 # Read the bp.conf file and wget any url not already 49 #+ in the podcast.log file: 50 while read podcast 51 do # ==> Main action follows. 52 file=$(wget -q $podcast -O - | tr '\r' '\n' | tr \' \" | \ 53 sed -n 's/.*url="\([^"]*\)".*/\1/p') 54 for url in $file 55 do 56 echo $url >> temp.log 57 if ! grep "$url" podcast.log > /dev/null 58 then 59 wget -q -P $datadir "$url" 60 fi 61 done 62 done < bp.conf 63 64 # Move dynamically created log file to permanent log file: 65 cat podcast.log >> temp.log 66 sort temp.log | uniq > podcast.log 67 rm temp.log 68 # Create an m3u playlist: 69 ls $datadir | grep -v m3u > $datadir/podcast.m3u 70 71 72 exit 0 73 74 ################################################# 75 For a different scripting approach to Podcasting, 76 see Phil Salkie's article, 77 "Internet Radio to Podcast with Shell Tools" 78 in the September, 2005 issue of LINUX JOURNAL, 79 http://www.linuxjournal.com/article/8171 80 ################################################# |
Example A-34. Nightly backup to a firewire HD
1 #!/bin/bash 2 # nightly-backup.sh 3 # http://www.richardneill.org/source.php#nightly-backup-rsync 4 # Copyright (c) 2005 Richard Neill <backup@richardneill.org>. 5 # This is Free Software licensed under the GNU GPL. 6 # ==> Included in ABS Guide with script author's kind permission. 7 # ==> (Thanks!) 8 9 # This does a backup from the host computer to a locally connected 10 #+ firewire HDD using rsync and ssh. 11 # It then rotates the backups. 12 # Run it via cron every night at 5am. 13 # This only backs up the home directory. 14 # If ownerships (other than the user's) should be preserved, 15 #+ then run the rsync process as root (and re-instate the -o). 16 # We save every day for 7 days, then every week for 4 weeks, 17 #+ then every month for 3 months. 18 19 # See: http://www.mikerubel.org/computers/rsync_snapshots/ 20 #+ for more explanation of the theory. 21 # Save as: $HOME/bin/nightly-backup_firewire-hdd.sh 22 23 # Known bugs: 24 # ---------- 25 # i) Ideally, we want to exclude ~/.tmp and the browser caches. 26 27 # ii) If the user is sitting at the computer at 5am, 28 #+ and files are modified while the rsync is occurring, 29 #+ then the BACKUP_JUSTINCASE branch gets triggered. 30 # To some extent, this is a 31 #+ feature, but it also causes a "disk-space leak". 32 33 34 35 36 37 ##### BEGIN CONFIGURATION SECTION ############################################ 38 LOCAL_USER=rjn # User whose home directory should be backed up. 39 MOUNT_POINT=/backup # Mountpoint of backup drive. 40 # NO trailing slash! 41 # This must be unique (eg using a udev symlink) 42 SOURCE_DIR=/home/$LOCAL_USER # NO trailing slash - it DOES matter to rsync. 43 BACKUP_DEST_DIR=$MOUNT_POINT/backup/`hostname -s`.${LOCAL_USER}.nightly_backup 44 DRY_RUN=false #If true, invoke rsync with -n, to do a dry run. 45 # Comment out or set to false for normal use. 46 VERBOSE=false # If true, make rsync verbose. 47 # Comment out or set to false otherwise. 48 COMPRESS=false # If true, compress. 49 # Good for internet, bad on LAN. 50 # Comment out or set to false otherwise. 51 52 ### Exit Codes ### 53 E_VARS_NOT_SET=64 54 E_COMMANDLINE=65 55 E_MOUNT_FAIL=70 56 E_NOSOURCEDIR=71 57 E_UNMOUNTED=72 58 E_BACKUP=73 59 ##### END CONFIGURATION SECTION ############################################## 60 61 62 # Check that all the important variables have been set: 63 if [ -z "$LOCAL_USER" ] || 64 [ -z "$SOURCE_DIR" ] || 65 [ -z "$MOUNT_POINT" ] || 66 [ -z "$BACKUP_DEST_DIR" ] 67 then 68 echo 'One of the variables is not set! Edit the file: $0. BACKUP FAILED.' 69 exit $E_VARS_NOT_SET 70 fi 71 72 if [ "$#" != 0 ] # If command-line param(s) . . . 73 then # Here document(ation). 74 cat <<-ENDOFTEXT 75 Automatic Nightly backup run from cron. 76 Read the source for more details: $0 77 The backup directory is $BACKUP_DEST_DIR . 78 It will be created if necessary; initialisation is no longer required. 79 80 WARNING: Contents of $BACKUP_DEST_DIR are rotated. 81 Directories named 'backup.\$i' will eventually be DELETED. 82 We keep backups from every day for 7 days (1-8), 83 then every week for 4 weeks (9-12), 84 then every month for 3 months (13-15). 85 86 You may wish to add this to your crontab using 'crontab -e' 87 # Back up files: $SOURCE_DIR to $BACKUP_DEST_DIR 88 #+ every night at 3:15 am 89 15 03 * * * /home/$LOCAL_USER/bin/nightly-backup_firewire-hdd.sh 90 91 Don't forget to verify the backups are working, 92 especially if you don't read cron's mail!" 93 ENDOFTEXT 94 exit $E_COMMANDLINE 95 fi 96 97 98 # Parse the options. 99 # ================== 100 101 if [ "$DRY_RUN" == "true" ]; then 102 DRY_RUN="-n" 103 echo "WARNING:" 104 echo "THIS IS A 'DRY RUN'!" 105 echo "No data will actually be transferred!" 106 else 107 DRY_RUN="" 108 fi 109 110 if [ "$VERBOSE" == "true" ]; then 111 VERBOSE="-v" 112 else 113 VERBOSE="" 114 fi 115 116 if [ "$COMPRESS" == "true" ]; then 117 COMPRESS="-z" 118 else 119 COMPRESS="" 120 fi 121 122 123 # Every week (actually of 8 days) and every month, 124 #+ extra backups are preserved. 125 DAY_OF_MONTH=`date +%d` # Day of month (01..31). 126 if [ $DAY_OF_MONTH = 01 ]; then # First of month. 127 MONTHSTART=true 128 elif [ $DAY_OF_MONTH = 08 \ 129 -o $DAY_OF_MONTH = 16 \ 130 -o $DAY_OF_MONTH = 24 ]; then 131 # Day 8,16,24 (use 8, not 7 to better handle 31-day months) 132 WEEKSTART=true 133 fi 134 135 136 137 # Check that the HDD is mounted. 138 # At least, check that *something* is mounted here! 139 # We can use something unique to the device, rather than just guessing 140 #+ the scsi-id by having an appropriate udev rule in 141 #+ /etc/udev/rules.d/10-rules.local 142 #+ and by putting a relevant entry in /etc/fstab. 143 # Eg: this udev rule: 144 # BUS="scsi", KERNEL="sd*", SYSFS{vendor}="WDC WD16", 145 # SYSFS{model}="00JB-00GVA0 ", NAME="%k", SYMLINK="lacie_1394d%n" 146 147 if mount | grep $MOUNT_POINT >/dev/null; then 148 echo "Mount point $MOUNT_POINT is indeed mounted. OK" 149 else 150 echo -n "Attempting to mount $MOUNT_POINT..." 151 # If it isn't mounted, try to mount it. 152 sudo mount $MOUNT_POINT 2>/dev/null 153 154 if mount | grep $MOUNT_POINT >/dev/null; then 155 UNMOUNT_LATER=TRUE 156 echo "OK" 157 # Note: Ensure that this is also unmounted 158 #+ if we exit prematurely with failure. 159 else 160 echo "FAILED" 161 echo -e "Nothing is mounted at $MOUNT_POINT. BACKUP FAILED!" 162 exit $E_MOUNT_FAIL 163 fi 164 fi 165 166 167 # Check that source dir exists and is readable. 168 if [ ! -r $SOURCE_DIR ] ; then 169 echo "$SOURCE_DIR does not exist, or cannot be read. BACKUP FAILED." 170 exit $E_NOSOURCEDIR 171 fi 172 173 174 # Check that the backup directory structure is as it should be. 175 # If not, create it. 176 # Create the subdirectories. 177 # Note that backup.0 will be created as needed by rsync. 178 179 for ((i=1;i<=15;i++)); do 180 if [ ! -d $BACKUP_DEST_DIR/backup.$i ]; then 181 if /bin/mkdir -p $BACKUP_DEST_DIR/backup.$i ; then 182 # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ No [ ] test brackets. Why? 183 echo "Warning: directory $BACKUP_DEST_DIR/backup.$i is missing," 184 echo "or was not initialised. (Re-)creating it." 185 else 186 echo "ERROR: directory $BACKUP_DEST_DIR/backup.$i" 187 echo "is missing and could not be created." 188 if [ "$UNMOUNT_LATER" == "TRUE" ]; then 189 # Before we exit, unmount the mount point if necessary. 190 cd 191 sudo umount $MOUNT_POINT && 192 echo "Unmounted $MOUNT_POINT again. Giving up." 193 fi 194 exit $E_UNMOUNTED 195 fi 196 fi 197 done 198 199 200 # Set the permission to 700 for security 201 #+ on an otherwise permissive multi-user system. 202 if ! /bin/chmod 700 $BACKUP_DEST_DIR ; then 203 echo "ERROR: Could not set permissions on $BACKUP_DEST_DIR to 700." 204 205 if [ "$UNMOUNT_LATER" == "TRUE" ]; then 206 # Before we exit, unmount the mount point if necessary. 207 cd ; sudo umount $MOUNT_POINT \ 208 && echo "Unmounted $MOUNT_POINT again. Giving up." 209 fi 210 211 exit $E_UNMOUNTED 212 fi 213 214 # Create the symlink: current -> backup.1 if required. 215 # A failure here is not critical. 216 cd $BACKUP_DEST_DIR 217 if [ ! -h current ] ; then 218 if ! /bin/ln -s backup.1 current ; then 219 echo "WARNING: could not create symlink current -> backup.1" 220 fi 221 fi 222 223 224 # Now, do the rsync. 225 echo "Now doing backup with rsync..." 226 echo "Source dir: $SOURCE_DIR" 227 echo -e "Backup destination dir: $BACKUP_DEST_DIR\n" 228 229 230 /usr/bin/rsync $DRY_RUN $VERBOSE -a -S --delete --modify-window=60 \ 231 --link-dest=../backup.1 $SOURCE_DIR $BACKUP_DEST_DIR/backup.0/ 232 233 # Only warn, rather than exit if the rsync failed, 234 #+ since it may only be a minor problem. 235 # E.g., if one file is not readable, rsync will fail. 236 # This shouldn't prevent the rotation. 237 # Not using, e.g., `date +%a` since these directories 238 #+ are just full of links and don't consume *that much* space. 239 240 if [ $? != 0 ]; then 241 BACKUP_JUSTINCASE=backup.`date +%F_%T`.justincase 242 echo "WARNING: the rsync process did not entirely succeed." 243 echo "Something might be wrong." 244 echo "Saving an extra copy at: $BACKUP_JUSTINCASE" 245 echo "WARNING: if this occurs regularly, a LOT of space will be consumed," 246 echo "even though these are just hard-links!" 247 fi 248 249 # Save a readme in the backup parent directory. 250 # Save another one in the recent subdirectory. 251 echo "Backup of $SOURCE_DIR on `hostname` was last run on \ 252 `date`" > $BACKUP_DEST_DIR/README.txt 253 echo "This backup of $SOURCE_DIR on `hostname` was created on \ 254 `date`" > $BACKUP_DEST_DIR/backup.0/README.txt 255 256 # If we are not in a dry run, rotate the backups. 257 [ -z "$DRY_RUN" ] && 258 259 # Check how full the backup disk is. 260 # Warn if 90%. if 98% or more, we'll probably fail, so give up. 261 # (Note: df can output to more than one line.) 262 # We test this here, rather than before 263 #+ so that rsync may possibly have a chance. 264 DISK_FULL_PERCENT=`/bin/df $BACKUP_DEST_DIR | 265 tr "\n" ' ' | awk '{print $12}' | grep -oE [0-9]+ ` 266 echo "Disk space check on backup partition \ 267 $MOUNT_POINT $DISK_FULL_PERCENT% full." 268 if [ $DISK_FULL_PERCENT -gt 90 ]; then 269 echo "Warning: Disk is greater than 90% full." 270 fi 271 if [ $DISK_FULL_PERCENT -gt 98 ]; then 272 echo "Error: Disk is full! Giving up." 273 if [ "$UNMOUNT_LATER" == "TRUE" ]; then 274 # Before we exit, unmount the mount point if necessary. 275 cd; sudo umount $MOUNT_POINT && 276 echo "Unmounted $MOUNT_POINT again. Giving up." 277 fi 278 exit $E_UNMOUNTED 279 fi 280 281 282 # Create an extra backup. 283 # If this copy fails, give up. 284 if [ -n "$BACKUP_JUSTINCASE" ]; then 285 if ! /bin/cp -al $BACKUP_DEST_DIR/backup.0 \ 286 $BACKUP_DEST_DIR/$BACKUP_JUSTINCASE 287 then 288 echo "ERROR: Failed to create extra copy \ 289 $BACKUP_DEST_DIR/$BACKUP_JUSTINCASE" 290 if [ "$UNMOUNT_LATER" == "TRUE" ]; then 291 # Before we exit, unmount the mount point if necessary. 292 cd ;sudo umount $MOUNT_POINT && 293 echo "Unmounted $MOUNT_POINT again. Giving up." 294 fi 295 exit $E_UNMOUNTED 296 fi 297 fi 298 299 300 # At start of month, rotate the oldest 8. 301 if [ "$MONTHSTART" == "true" ]; then 302 echo -e "\nStart of month. \ 303 Removing oldest backup: $BACKUP_DEST_DIR/backup.15" && 304 /bin/rm -rf $BACKUP_DEST_DIR/backup.15 && 305 echo "Rotating monthly,weekly backups: \ 306 $BACKUP_DEST_DIR/backup.[8-14] -> $BACKUP_DEST_DIR/backup.[9-15]" && 307 /bin/mv $BACKUP_DEST_DIR/backup.14 $BACKUP_DEST_DIR/backup.15 && 308 /bin/mv $BACKUP_DEST_DIR/backup.13 $BACKUP_DEST_DIR/backup.14 && 309 /bin/mv $BACKUP_DEST_DIR/backup.12 $BACKUP_DEST_DIR/backup.13 && 310 /bin/mv $BACKUP_DEST_DIR/backup.11 $BACKUP_DEST_DIR/backup.12 && 311 /bin/mv $BACKUP_DEST_DIR/backup.10 $BACKUP_DEST_DIR/backup.11 && 312 /bin/mv $BACKUP_DEST_DIR/backup.9 $BACKUP_DEST_DIR/backup.10 && 313 /bin/mv $BACKUP_DEST_DIR/backup.8 $BACKUP_DEST_DIR/backup.9 314 315 # At start of week, rotate the second-oldest 4. 316 elif [ "$WEEKSTART" == "true" ]; then 317 echo -e "\nStart of week. \ 318 Removing oldest weekly backup: $BACKUP_DEST_DIR/backup.12" && 319 /bin/rm -rf $BACKUP_DEST_DIR/backup.12 && 320 321 echo "Rotating weekly backups: \ 322 $BACKUP_DEST_DIR/backup.[8-11] -> $BACKUP_DEST_DIR/backup.[9-12]" && 323 /bin/mv $BACKUP_DEST_DIR/backup.11 $BACKUP_DEST_DIR/backup.12 && 324 /bin/mv $BACKUP_DEST_DIR/backup.10 $BACKUP_DEST_DIR/backup.11 && 325 /bin/mv $BACKUP_DEST_DIR/backup.9 $BACKUP_DEST_DIR/backup.10 && 326 /bin/mv $BACKUP_DEST_DIR/backup.8 $BACKUP_DEST_DIR/backup.9 327 328 else 329 echo -e "\nRemoving oldest daily backup: $BACKUP_DEST_DIR/backup.8" && 330 /bin/rm -rf $BACKUP_DEST_DIR/backup.8 331 332 fi && 333 334 # Every day, rotate the newest 8. 335 echo "Rotating daily backups: \ 336 $BACKUP_DEST_DIR/backup.[1-7] -> $BACKUP_DEST_DIR/backup.[2-8]" && 337 /bin/mv $BACKUP_DEST_DIR/backup.7 $BACKUP_DEST_DIR/backup.8 && 338 /bin/mv $BACKUP_DEST_DIR/backup.6 $BACKUP_DEST_DIR/backup.7 && 339 /bin/mv $BACKUP_DEST_DIR/backup.5 $BACKUP_DEST_DIR/backup.6 && 340 /bin/mv $BACKUP_DEST_DIR/backup.4 $BACKUP_DEST_DIR/backup.5 && 341 /bin/mv $BACKUP_DEST_DIR/backup.3 $BACKUP_DEST_DIR/backup.4 && 342 /bin/mv $BACKUP_DEST_DIR/backup.2 $BACKUP_DEST_DIR/backup.3 && 343 /bin/mv $BACKUP_DEST_DIR/backup.1 $BACKUP_DEST_DIR/backup.2 && 344 /bin/mv $BACKUP_DEST_DIR/backup.0 $BACKUP_DEST_DIR/backup.1 && 345 346 SUCCESS=true 347 348 349 if [ "$UNMOUNT_LATER" == "TRUE" ]; then 350 # Unmount the mount point if it wasn't mounted to begin with. 351 cd ; sudo umount $MOUNT_POINT && echo "Unmounted $MOUNT_POINT again." 352 fi 353 354 355 if [ "$SUCCESS" == "true" ]; then 356 echo 'SUCCESS!' 357 exit 0 358 fi 359 360 # Should have already exited if backup worked. 361 echo 'BACKUP FAILED! Is this just a dry run? Is the disk full?) ' 362 exit $E_BACKUP |
Example A-35. An expanded cd command
1 ########################################################################### 2 # 3 # cdll 4 # by Phil Braham 5 # 6 # ############################################ 7 # Latest version of this script available from 8 # http://freshmeat.net/projects/cd/ 9 # ############################################ 10 # 11 # .cd_new 12 # 13 # An enhancement of the Unix cd command 14 # 15 # There are unlimited stack entries and special entries. The stack 16 # entries keep the last cd_maxhistory 17 # directories that have been used. The special entries can be 18 # assigned to commonly used directories. 19 # 20 # The special entries may be pre-assigned by setting the environment 21 # variables CDSn or by using the -u or -U command. 22 # 23 # The following is a suggestion for the .profile file: 24 # 25 # . cdll # Set up the cd command 26 # alias cd='cd_new' # Replace the cd command 27 # cd -U # Upload pre-assigned entries for 28 # #+ the stack and special entries 29 # cd -D # Set non-default mode 30 # alias @="cd_new @" # Allow @ to be used to get history 31 # 32 # For help type: 33 # 34 # cd -h or 35 # cd -H 36 # 37 # 38 ########################################################################### 39 # 40 # Version 1.2.1 41 # 42 # Written by Phil Braham - Realtime Software Pty Ltd 43 # (realtime@mpx.com.au) 44 # Please send any suggestions or enhancements to the author (also at 45 # phil@braham.net) 46 # 47 ############################################################################ 48 49 cd_hm () 50 { 51 ${PRINTF} "%s" "cd [dir] [0-9] [@[s|h] [-g [<dir>]] [-d] \ 52 [-D] [-r<n>] [dir|0-9] [-R<n>] [<dir>|0-9] 53 [-s<n>] [-S<n>] [-u] [-U] [-f] [-F] [-h] [-H] [-v] 54 <dir> Go to directory 55 0-n Go to previous directory (0 is previous, 1 is last but 1 etc) 56 n is up to max history (default is 50) 57 @ List history and special entries 58 @h List history entries 59 @s List special entries 60 -g [<dir>] Go to literal name (bypass special names) 61 This is to allow access to dirs called '0','1','-h' etc 62 -d Change default action - verbose. (See note) 63 -D Change default action - silent. (See note) 64 -s<n> Go to the special entry <n>* 65 -S<n> Go to the special entry <n> 66 and replace it with the current dir* 67 -r<n> [<dir>] Go to directory <dir> 68 and then put it on special entry <n>* 69 -R<n> [<dir>] Go to directory <dir> 70 and put current dir on special entry <n>* 71 -a<n> Alternative suggested directory. See note below. 72 -f [<file>] File entries to <file>. 73 -u [<file>] Update entries from <file>. 74 If no filename supplied then default file 75 (${CDPath}${2:-"$CDFile"}) is used 76 -F and -U are silent versions 77 -v Print version number 78 -h Help 79 -H Detailed help 80 81 *The special entries (0 - 9) are held until log off, replaced by another 82 entry or updated with the -u command 83 84 Alternative suggested directories: 85 If a directory is not found then CD will suggest any 86 possibilities. These are directories starting with the same letters 87 and if any are found they are listed prefixed with -a<n> 88 where <n> is a number. 89 It's possible to go to the directory by entering cd -a<n> 90 on the command line. 91 92 The directory for -r<n> or -R<n> may be a number. 93 For example: 94 $ cd -r3 4 Go to history entry 4 and put it on special entry 3 95 $ cd -R3 4 Put current dir on the special entry 3 96 and go to history entry 4 97 $ cd -s3 Go to special entry 3 98 99 Note that commands R,r,S and s may be used without a number 100 and refer to 0: 101 $ cd -s Go to special entry 0 102 $ cd -S Go to special entry 0 and make special 103 entry 0 current dir 104 $ cd -r 1 Go to history entry 1 and put it on special entry 0 105 $ cd -r Go to history entry 0 and put it on special entry 0 106 " 107 if ${TEST} "$CD_MODE" = "PREV" 108 then 109 ${PRINTF} "$cd_mnset" 110 else 111 ${PRINTF} "$cd_mset" 112 fi 113 } 114 115 cd_Hm () 116 { 117 cd_hm 118 ${PRINTF} "%s" " 119 The previous directories (0-$cd_maxhistory) are stored in the 120 environment variables CD[0] - CD[$cd_maxhistory] 121 Similarly the special directories S0 - $cd_maxspecial are in 122 the environment variable CDS[0] - CDS[$cd_maxspecial] 123 and may be accessed from the command line 124 125 The default pathname for the -f and -u commands is $CDPath 126 The default filename for the -f and -u commands is $CDFile 127 128 Set the following environment variables: 129 CDL_PROMPTLEN - Set to the length of prompt you require. 130 Prompt string is set to the right characters of the 131 current directory. 132 If not set then prompt is left unchanged 133 CDL_PROMPT_PRE - Set to the string to prefix the prompt. 134 Default is: 135 non-root: \"\\[\\e[01;34m\\]\" (sets colour to blue). 136 root: \"\\[\\e[01;31m\\]\" (sets colour to red). 137 CDL_PROMPT_POST - Set to the string to suffix the prompt. 138 Default is: 139 non-root: \"\\[\\e[00m\\]$\" 140 (resets colour and displays $). 141 root: \"\\[\\e[00m\\]#\" 142 (resets colour and displays #). 143 CDPath - Set the default path for the -f & -u options. 144 Default is home directory 145 CDFile - Set the default filename for the -f & -u options. 146 Default is cdfile 147 148 " 149 cd_version 150 151 } 152 153 cd_version () 154 { 155 printf "Version: ${VERSION_MAJOR}.${VERSION_MINOR} Date: ${VERSION_DATE}\n" 156 } 157 158 # 159 # Truncate right. 160 # 161 # params: 162 # p1 - string 163 # p2 - length to truncate to 164 # 165 # returns string in tcd 166 # 167 cd_right_trunc () 168 { 169 local tlen=${2} 170 local plen=${#1} 171 local str="${1}" 172 local diff 173 local filler="<--" 174 if ${TEST} ${plen} -le ${tlen} 175 then 176 tcd="${str}" 177 else 178 let diff=${plen}-${tlen} 179 elen=3 180 if ${TEST} ${diff} -le 2 181 then 182 let elen=${diff} 183 fi 184 tlen=-${tlen} 185 let tlen=${tlen}+${elen} 186 tcd=${filler:0:elen}${str:tlen} 187 fi 188 } 189 190 # 191 # Three versions of do history: 192 # cd_dohistory - packs history and specials side by side 193 # cd_dohistoryH - Shows only hstory 194 # cd_dohistoryS - Shows only specials 195 # 196 cd_dohistory () 197 { 198 cd_getrc 199 ${PRINTF} "History:\n" 200 local -i count=${cd_histcount} 201 while ${TEST} ${count} -ge 0 202 do 203 cd_right_trunc "${CD[count]}" ${cd_lchar} 204 ${PRINTF} "%2d %-${cd_lchar}.${cd_lchar}s " ${count} "${tcd}" 205 206 cd_right_trunc "${CDS[count]}" ${cd_rchar} 207 ${PRINTF} "S%d %-${cd_rchar}.${cd_rchar}s\n" ${count} "${tcd}" 208 count=${count}-1 209 done 210 } 211 212 cd_dohistoryH () 213 { 214 cd_getrc 215 ${PRINTF} "History:\n" 216 local -i count=${cd_maxhistory} 217 while ${TEST} ${count} -ge 0 218 do 219 ${PRINTF} "${count} %-${cd_flchar}.${cd_flchar}s\n" ${CD[$count]} 220 count=${count}-1 221 done 222 } 223 224 cd_dohistoryS () 225 { 226 cd_getrc 227 ${PRINTF} "Specials:\n" 228 local -i count=${cd_maxspecial} 229 while ${TEST} ${count} -ge 0 230 do 231 ${PRINTF} "S${count} %-${cd_flchar}.${cd_flchar}s\n" ${CDS[$count]} 232 count=${count}-1 233 done 234 } 235 236 cd_getrc () 237 { 238 cd_flchar=$(stty -a | awk -F \; 239 '/rows/ { print $2 $3 }' | awk -F \ '{ print $4 }') 240 if ${TEST} ${cd_flchar} -ne 0 241 then 242 cd_lchar=${cd_flchar}/2-5 243 cd_rchar=${cd_flchar}/2-5 244 cd_flchar=${cd_flchar}-5 245 else 246 cd_flchar=${FLCHAR:=75} 247 # cd_flchar is used for for the @s & @h history 248 cd_lchar=${LCHAR:=35} 249 cd_rchar=${RCHAR:=35} 250 fi 251 } 252 253 cd_doselection () 254 { 255 local -i nm=0 256 cd_doflag="TRUE" 257 if ${TEST} "${CD_MODE}" = "PREV" 258 then 259 if ${TEST} -z "$cd_npwd" 260 then 261 cd_npwd=0 262 fi 263 fi 264 tm=$(echo "${cd_npwd}" | cut -b 1) 265 if ${TEST} "${tm}" = "-" 266 then 267 pm=$(echo "${cd_npwd}" | cut -b 2) 268 nm=$(echo "${cd_npwd}" | cut -d $pm -f2) 269 case "${pm}" in 270 a) cd_npwd=${cd_sugg[$nm]} ;; 271 s) cd_npwd="${CDS[$nm]}" ;; 272 S) cd_npwd="${CDS[$nm]}" ; CDS[$nm]=`pwd` ;; 273 r) cd_npwd="$2" ; cd_specDir=$nm ; cd_doselection "$1" "$2";; 274 R) cd_npwd="$2" ; CDS[$nm]=`pwd` ; cd_doselection "$1" "$2";; 275 esac 276 fi 277 278 if ${TEST} "${cd_npwd}" != "." -a "${cd_npwd}" \ 279 != ".." -a "${cd_npwd}" -le ${cd_maxhistory} >>/dev/null 2>&1 280 then 281 cd_npwd=${CD[$cd_npwd]} 282 else 283 case "$cd_npwd" in 284 @) cd_dohistory ; cd_doflag="FALSE" ;; 285 @h) cd_dohistoryH ; cd_doflag="FALSE" ;; 286 @s) cd_dohistoryS ; cd_doflag="FALSE" ;; 287 -h) cd_hm ; cd_doflag="FALSE" ;; 288 -H) cd_Hm ; cd_doflag="FALSE" ;; 289 -f) cd_fsave "SHOW" $2 ; cd_doflag="FALSE" ;; 290 -u) cd_upload "SHOW" $2 ; cd_doflag="FALSE" ;; 291 -F) cd_fsave "NOSHOW" $2 ; cd_doflag="FALSE" ;; 292 -U) cd_upload "NOSHOW" $2 ; cd_doflag="FALSE" ;; 293 -g) cd_npwd="$2" ;; 294 -d) cd_chdefm 1; cd_doflag="FALSE" ;; 295 -D) cd_chdefm 0; cd_doflag="FALSE" ;; 296 -r) cd_npwd="$2" ; cd_specDir=0 ; cd_doselection "$1" "$2";; 297 -R) cd_npwd="$2" ; CDS[0]=`pwd` ; cd_doselection "$1" "$2";; 298 -s) cd_npwd="${CDS[0]}" ;; 299 -S) cd_npwd="${CDS[0]}" ; CDS[0]=`pwd` ;; 300 -v) cd_version ; cd_doflag="FALSE";; 301 esac 302 fi 303 } 304 305 cd_chdefm () 306 { 307 if ${TEST} "${CD_MODE}" = "PREV" 308 then 309 CD_MODE="" 310 if ${TEST} $1 -eq 1 311 then 312 ${PRINTF} "${cd_mset}" 313 fi 314 else 315 CD_MODE="PREV" 316 if ${TEST} $1 -eq 1 317 then 318 ${PRINTF} "${cd_mnset}" 319 fi 320 fi 321 } 322 323 cd_fsave () 324 { 325 local sfile=${CDPath}${2:-"$CDFile"} 326 if ${TEST} "$1" = "SHOW" 327 then 328 ${PRINTF} "Saved to %s\n" $sfile 329 fi 330 ${RM} -f ${sfile} 331 local -i count=0 332 while ${TEST} ${count} -le ${cd_maxhistory} 333 do 334 echo "CD[$count]=\"${CD[$count]}\"" >> ${sfile} 335 count=${count}+1 336 done 337 count=0 338 while ${TEST} ${count} -le ${cd_maxspecial} 339 do 340 echo "CDS[$count]=\"${CDS[$count]}\"" >> ${sfile} 341 count=${count}+1 342 done 343 } 344 345 cd_upload () 346 { 347 local sfile=${CDPath}${2:-"$CDFile"} 348 if ${TEST} "${1}" = "SHOW" 349 then 350 ${PRINTF} "Loading from %s\n" ${sfile} 351 fi 352 . ${sfile} 353 } 354 355 cd_new () 356 { 357 local -i count 358 local -i choose=0 359 360 cd_npwd="${1}" 361 cd_specDir=-1 362 cd_doselection "${1}" "${2}" 363 364 if ${TEST} ${cd_doflag} = "TRUE" 365 then 366 if ${TEST} "${CD[0]}" != "`pwd`" 367 then 368 count=$cd_maxhistory 369 while ${TEST} $count -gt 0 370 do 371 CD[$count]=${CD[$count-1]} 372 count=${count}-1 373 done 374 CD[0]=`pwd` 375 fi 376 command cd "${cd_npwd}" 2>/dev/null 377 if ${TEST} $? -eq 1 378 then 379 ${PRINTF} "Unknown dir: %s\n" "${cd_npwd}" 380 local -i ftflag=0 381 for i in "${cd_npwd}"* 382 do 383 if ${TEST} -d "${i}" 384 then 385 if ${TEST} ${ftflag} -eq 0 386 then 387 ${PRINTF} "Suggest:\n" 388 ftflag=1 389 fi 390 ${PRINTF} "\t-a${choose} %s\n" "$i" 391 cd_sugg[$choose]="${i}" 392 choose=${choose}+1 393 fi 394 done 395 fi 396 fi 397 398 if ${TEST} ${cd_specDir} -ne -1 399 then 400 CDS[${cd_specDir}]=`pwd` 401 fi 402 403 if ${TEST} ! -z "${CDL_PROMPTLEN}" 404 then 405 cd_right_trunc "${PWD}" ${CDL_PROMPTLEN} 406 cd_rp=${CDL_PROMPT_PRE}${tcd}${CDL_PROMPT_POST} 407 export PS1="$(echo -ne ${cd_rp})" 408 fi 409 } 410 ######################################################################### 411 # # 412 # Initialisation here # 413 # # 414 ######################################################################### 415 # 416 VERSION_MAJOR="1" 417 VERSION_MINOR="2.1" 418 VERSION_DATE="24-MAY-2003" 419 # 420 alias cd=cd_new 421 # 422 # Set up commands 423 RM=/bin/rm 424 TEST=test 425 PRINTF=printf # Use builtin printf 426 427 ######################################################################### 428 # # 429 # Change this to modify the default pre- and post prompt strings. # 430 # These only come into effect if CDL_PROMPTLEN is set. # 431 # # 432 ######################################################################### 433 if ${TEST} ${EUID} -eq 0 434 then 435 # CDL_PROMPT_PRE=${CDL_PROMPT_PRE:="$HOSTNAME@"} 436 CDL_PROMPT_PRE=${CDL_PROMPT_PRE:="\\[\\e[01;31m\\]"} # Root is in red 437 CDL_PROMPT_POST=${CDL_PROMPT_POST:="\\[\\e[00m\\]#"} 438 else 439 CDL_PROMPT_PRE=${CDL_PROMPT_PRE:="\\[\\e[01;34m\\]"} # Users in blue 440 CDL_PROMPT_POST=${CDL_PROMPT_POST:="\\[\\e[00m\\]$"} 441 fi 442 ######################################################################### 443 # 444 # cd_maxhistory defines the max number of history entries allowed. 445 typeset -i cd_maxhistory=50 446 447 ######################################################################### 448 # 449 # cd_maxspecial defines the number of special entries. 450 typeset -i cd_maxspecial=9 451 # 452 # 453 ######################################################################### 454 # 455 # cd_histcount defines the number of entries displayed in 456 #+ the history command. 457 typeset -i cd_histcount=9 458 # 459 ######################################################################### 460 export CDPath=${HOME}/ 461 # Change these to use a different # 462 #+ default path and filename # 463 export CDFile=${CDFILE:=cdfile} # for the -u and -f commands # 464 # 465 ######################################################################### 466 # 467 typeset -i cd_lchar cd_rchar cd_flchar 468 # This is the number of chars to allow for the # 469 cd_flchar=${FLCHAR:=75} #+ cd_flchar is used for for the @s & @h history# 470 471 typeset -ax CD CDS 472 # 473 cd_mset="\n\tDefault mode is now set - entering cd with no parameters \ 474 has the default action\n\tUse cd -d or -D for cd to go to \ 475 previous directory with no parameters\n" 476 cd_mnset="\n\tNon-default mode is now set - entering cd with no \ 477 parameters is the same as entering cd 0\n\tUse cd -d or \ 478 -D to change default cd action\n" 479 480 # ==================================================================== # 481 482 483 484 : <<DOCUMENTATION 485 486 Written by Phil Braham. Realtime Software Pty Ltd. 487 Released under GNU license. Free to use. Please pass any modifications 488 or comments to the author Phil Braham: 489 490 realtime@mpx.com.au 491 ======================================================================= 492 493 cdll is a replacement for cd and incorporates similar functionality to 494 the bash pushd and popd commands but is independent of them. 495 496 This version of cdll has been tested on Linux using Bash. It will work 497 on most Linux versions but will probably not work on other shells without 498 modification. 499 500 Introduction 501 ============ 502 503 cdll allows easy moving about between directories. When changing to a new 504 directory the current one is automatically put onto a stack. By default 505 50 entries are kept, but this is configurable. Special directories can be 506 kept for easy access - by default up to 10, but this is configurable. The 507 most recent stack entries and the special entries can be easily viewed. 508 509 The directory stack and special entries can be saved to, and loaded from, 510 a file. This allows them to be set up on login, saved before logging out 511 or changed when moving project to project. 512 513 In addition, cdll provides a flexible command prompt facility that allows, 514 for example, a directory name in colour that is truncated from the left 515 if it gets too long. 516 517 518 Setting up cdll 519 =============== 520 521 Copy cdll to either your local home directory or a central directory 522 such as /usr/bin (this will require root access). 523 524 Copy the file cdfile to your home directory. It will require read and 525 write access. This a default file that contains a directory stack and 526 special entries. 527 528 To replace the cd command you must add commands to your login script. 529 The login script is one or more of: 530 531 /etc/profile 532 ~/.bash_profile 533 ~/.bash_login 534 ~/.profile 535 ~/.bashrc 536 /etc/bash.bashrc.local 537 538 To setup your login, ~/.bashrc is recommended, for global (and root) setup 539 add the commands to /etc/bash.bashrc.local 540 541 To set up on login, add the command: 542 . <dir>/cdll 543 For example if cdll is in your local home directory: 544 . ~/cdll 545 If in /usr/bin then: 546 . /usr/bin/cdll 547 548 If you want to use this instead of the buitin cd command then add: 549 alias cd='cd_new' 550 We would also recommend the following commands: 551 alias @='cd_new @' 552 cd -U 553 cd -D 554 555 If you want to use cdll's prompt facilty then add the following: 556 CDL_PROMPTLEN=nn 557 Where nn is a number described below. Initially 99 would be suitable 558 number. 559 560 Thus the script looks something like this: 561 562 ###################################################################### 563 # CD Setup 564 ###################################################################### 565 CDL_PROMPTLEN=21 # Allow a prompt length of up to 21 characters 566 . /usr/bin/cdll # Initialise cdll 567 alias cd='cd_new' # Replace the built in cd command 568 alias @='cd_new @' # Allow @ at the prompt to display history 569 cd -U # Upload directories 570 cd -D # Set default action to non-posix 571 ###################################################################### 572 573 The full meaning of these commands will become clear later. 574 575 There are a couple of caveats. If another program changes the directory 576 without calling cdll, then the directory won't be put on the stack and 577 also if the prompt facility is used then this will not be updated. Two 578 programs that can do this are pushd and popd. To update the prompt and 579 stack simply enter: 580 581 cd . 582 583 Note that if the previous entry on the stack is the current directory 584 then the stack is not updated. 585 586 Usage 587 ===== 588 cd [dir] [0-9] [@[s|h] [-g <dir>] [-d] [-D] [-r<n>] 589 [dir|0-9] [-R<n>] [<dir>|0-9] [-s<n>] [-S<n>] 590 [-u] [-U] [-f] [-F] [-h] [-H] [-v] 591 592 <dir> Go to directory 593 0-n Goto previous directory (0 is previous, 594 1 is last but 1, etc.) 595 n is up to max history (default is 50) 596 @ List history and special entries (Usually available as $ @) 597 @h List history entries 598 @s List special entries 599 -g [<dir>] Go to literal name (bypass special names) 600 This is to allow access to dirs called '0','1','-h' etc 601 -d Change default action - verbose. (See note) 602 -D Change default action - silent. (See note) 603 -s<n> Go to the special entry <n> 604 -S<n> Go to the special entry <n> 605 and replace it with the current dir 606 -r<n> [<dir>] Go to directory <dir> 607 and then put it on special entry <n> 608 -R<n> [<dir>] Go to directory <dir> 609 and put current dir on special entry <n> 610 -a<n> Alternative suggested directory. See note below. 611 -f [<file>] File entries to <file>. 612 -u [<file>] Update entries from <file>. 613 If no filename supplied then default file (~/cdfile) is used 614 -F and -U are silent versions 615 -v Print version number 616 -h Help 617 -H Detailed help 618 619 620 621 Examples 622 ======== 623 624 These examples assume non-default mode is set (that is, cd with no 625 parameters will go to the most recent stack directory), that aliases 626 have been set up for cd and @ as described above and that cd's prompt 627 facility is active and the prompt length is 21 characters. 628 629 /home/phil$ @ 630 # List the entries with the @ 631 History: 632 # Output of the @ command 633 ..... 634 # Skipped these entries for brevity 635 1 /home/phil/ummdev S1 /home/phil/perl 636 # Most recent two history entries 637 0 /home/phil/perl/eg S0 /home/phil/umm/ummdev 638 # and two special entries are shown 639 640 /home/phil$ cd /home/phil/utils/Cdll 641 # Now change directories 642 /home/phil/utils/Cdll$ @ 643 # Prompt reflects the directory. 644 History: 645 # New history 646 ..... 647 1 /home/phil/perl/eg S1 /home/phil/perl 648 # History entry 0 has moved to 1 649 0 /home/phil S0 /home/phil/umm/ummdev 650 # and the most recent has entered 651 652 To go to a history entry: 653 654 /home/phil/utils/Cdll$ cd 1 655 # Go to history entry 1. 656 /home/phil/perl/eg$ 657 # Current directory is now what was 1 658 659 To go to a special entry: 660 661 /home/phil/perl/eg$ cd -s1 662 # Go to special entry 1 663 /home/phil/umm/ummdev$ 664 # Current directory is S1 665 666 To go to a directory called, for example, 1: 667 668 /home/phil$ cd -g 1 669 # -g ignores the special meaning of 1 670 /home/phil/1$ 671 672 To put current directory on the special list as S1: 673 cd -r1 . # OR 674 cd -R1 . # These have the same effect if the directory is 675 #+ . (the current directory) 676 677 To go to a directory and add it as a special 678 The directory for -r<n> or -R<n> may be a number. 679 For example: 680 $ cd -r3 4 Go to history entry 4 and put it on special entry 3 681 $ cd -R3 4 Put current dir on the special entry 3 and go to 682 history entry 4 683 $ cd -s3 Go to special entry 3 684 685 Note that commands R,r,S and s may be used without a number and 686 refer to 0: 687 $ cd -s Go to special entry 0 688 $ cd -S Go to special entry 0 and make special entry 0 689 current dir 690 $ cd -r 1 Go to history entry 1 and put it on special entry 0 691 $ cd -r Go to history entry 0 and put it on special entry 0 692 693 694 Alternative suggested directories: 695 696 If a directory is not found, then CD will suggest any 697 possibilities. These are directories starting with the same letters 698 and if any are found they are listed prefixed with -a<n> 699 where <n> is a number. It's possible to go to the directory 700 by entering cd -a<n> on the command line. 701 702 Use cd -d or -D to change default cd action. cd -H will show 703 current action. 704 705 The history entries (0-n) are stored in the environment variables 706 CD[0] - CD[n] 707 Similarly the special directories S0 - 9 are in the environment 708 variable CDS[0] - CDS[9] 709 and may be accessed from the command line, for example: 710 711 ls -l ${CDS[3]} 712 cat ${CD[8]}/file.txt 713 714 The default pathname for the -f and -u commands is ~ 715 The default filename for the -f and -u commands is cdfile 716 717 718 Configuration 719 ============= 720 721 The following environment variables can be set: 722 723 CDL_PROMPTLEN - Set to the length of prompt you require. 724 Prompt string is set to the right characters of the current 725 directory. If not set, then prompt is left unchanged. Note 726 that this is the number of characters that the directory is 727 shortened to, not the total characters in the prompt. 728 729 CDL_PROMPT_PRE - Set to the string to prefix the prompt. 730 Default is: 731 non-root: "\\[\\e[01;34m\\]" (sets colour to blue). 732 root: "\\[\\e[01;31m\\]" (sets colour to red). 733 734 CDL_PROMPT_POST - Set to the string to suffix the prompt. 735 Default is: 736 non-root: "\\[\\e[00m\\]$" 737 (resets colour and displays $). 738 root: "\\[\\e[00m\\]#" 739 (resets colour and displays #). 740 741 Note: 742 CDL_PROMPT_PRE & _POST only t 743 744 CDPath - Set the default path for the -f & -u options. 745 Default is home directory 746 CDFile - Set the default filename for the -f & -u options. 747 Default is cdfile 748 749 750 There are three variables defined in the file cdll which control the 751 number of entries stored or displayed. They are in the section labeled 752 'Initialisation here' towards the end of the file. 753 754 cd_maxhistory - The number of history entries stored. 755 Default is 50. 756 cd_maxspecial - The number of special entries allowed. 757 Default is 9. 758 cd_histcount - The number of history and special entries 759 displayed. Default is 9. 760 761 Note that cd_maxspecial should be >= cd_histcount to avoid displaying 762 special entries that can't be set. 763 764 765 Version: 1.2.1 Date: 24-MAY-2003 766 767 DOCUMENTATION |
Example A-36. A soundcard setup script
1 #!/bin/bash 2 # soundcard-on.sh 3 4 # Script author: Mkarcher 5 # http://www.thinkwiki.org/wiki ... 6 # /Script_for_configuring_the_CS4239_sound_chip_in_PnP_mode 7 # ABS Guide author made minor changes and added comments. 8 # Couldn't contact script author to ask for permission to use, but ... 9 #+ the script was released under the FDL, 10 #+ so its use here should be both legal and ethical. 11 12 # Sound-via-pnp-script for Thinkpad 600E 13 #+ and possibly other computers with onboard CS4239/CS4610 14 #+ that do not work with the PCI driver 15 #+ and are not recognized by the PnP code of snd-cs4236. 16 # Also for some 770-series Thinkpads, such as the 770x. 17 # Run as root user, of course. 18 # 19 # These are old and very obsolete laptop computers, 20 #+ but this particular script is very instructive, 21 #+ as it shows how to set up and hack device files. 22 23 24 25 # Search for sound card pnp device: 26 27 for dev in /sys/bus/pnp/devices/* 28 do 29 grep CSC0100 $dev/id > /dev/null && WSSDEV=$dev 30 grep CSC0110 $dev/id > /dev/null && CTLDEV=$dev 31 done 32 # On 770x: 33 # WSSDEV = /sys/bus/pnp/devices/00:07 34 # CTLDEV = /sys/bus/pnp/devices/00:06 35 # These are symbolic links to /sys/devices/pnp0/ ... 36 37 38 # Activate devices: 39 # Thinkpad boots with devices disabled unless "fast boot" is turned off 40 #+ (in BIOS). 41 42 echo activate > $WSSDEV/resources 43 echo activate > $CTLDEV/resources 44 45 46 # Parse resource settings. 47 48 { read # Discard "state = active" (see below). 49 read bla port1 50 read bla port2 51 read bla port3 52 read bla irq 53 read bla dma1 54 read bla dma2 55 # The "bla's" are labels in the first field: "io," "state," etc. 56 # These are discarded. 57 58 # Hack: with PnPBIOS: ports are: port1: WSS, port2: 59 #+ OPL, port3: sb (unneeded) 60 # with ACPI-PnP:ports are: port1: OPL, port2: sb, port3: WSS 61 # (ACPI bios seems to be wrong here, the PnP-card-code in snd-cs4236.c 62 #+ uses the PnPBIOS port order) 63 # Detect port order using the fixed OPL port as reference. 64 if [ ${port2%%-*} = 0x388 ] 65 # ^^^^ Strip out everything following hyphen in port address. 66 # So, if port1 is 0x530-0x537 67 #+ we're left with 0x530 -- the start address of the port. 68 then 69 # PnPBIOS: usual order 70 port=${port1%%-*} 71 oplport=${port2%%-*} 72 else 73 # ACPI: mixed-up order 74 port=${port3%%-*} 75 oplport=${port1%%-*} 76 fi 77 } < $WSSDEV/resources 78 # To see what's going on here: 79 # --------------------------- 80 # cat /sys/devices/pnp0/00:07/resources 81 # 82 # state = active 83 # io 0x530-0x537 84 # io 0x388-0x38b 85 # io 0x220-0x233 86 # irq 5 87 # dma 1 88 # dma 0 89 # ^^^ "bla" labels in first field (discarded). 90 91 92 { read # Discard first line, as above. 93 read bla port1 94 cport=${port1%%-*} 95 # ^^^^ 96 # Just want _start_ address of port. 97 } < $CTLDEV/resources 98 99 100 # Load the module: 101 102 modprobe --ignore-install snd-cs4236 port=$port cport=$cport\ 103 fm_port=$oplport irq=$irq dma1=$dma1 dma2=$dma2 isapnp=0 index=0 104 # See the modprobe manpage. 105 106 exit $? |
Example A-37. Locating split paragraphs in a text file
1 #!/bin/bash 2 # find-splitpara.sh 3 # Finds split paragraphs in a text file, 4 #+ and tags the line numbers. 5 6 7 ARGCOUNT=1 # Expect one arg. 8 E_WRONGARGS=65 9 10 file="$1" # Target filename. 11 lineno=1 # Line number. Start at 1. 12 Flag=0 # Blank line flag. 13 14 if [ $# -ne "$ARGCOUNT" ] 15 then 16 echo "Usage: `basename $0` FILENAME" 17 exit $E_WRONGARGS 18 fi 19 20 file_read () # Scan file for pattern, then print line. 21 { 22 while read line 23 do 24 25 if [[ "$line" =~ ^[a-z] && $Flag -eq 1 ]] 26 then # Line begins with lc character, following blank line. 27 echo -n "$lineno:: " 28 echo "$line" 29 fi 30 31 32 if [[ "$line" =~ "^$" ]] 33 then # If blank line, 34 Flag=1 #+ set flag. 35 else 36 Flag=0 37 fi 38 39 ((lineno++)) 40 41 done 42 } < $file # Redirect file into function's stdin. 43 44 file_read 45 46 47 exit $? 48 49 50 # ---------------------------------------------------------------- 51 This is line one of an example paragraph, bla, bla, bla. 52 This is line two, and line three should follow on next line, but 53 54 there is a blank line separating the two parts of the paragraph. 55 # ---------------------------------------------------------------- 56 57 Running this script on a file containing the above paragraph 58 yields: 59 60 4:: there is a blank line separating the two parts of the paragraph. 61 62 63 There will be additional output for all the other split paragraphs 64 in the target file. |
Example A-38. Insertion sort
1 #!/bin/bash 2 # insertion-sort.bash: Insertion sort implementation in Bash 3 # Heavy use of Bash array features: 4 #+ (string) slicing, merging, etc 5 # URL: http://www.lugmen.org.ar/~jjo/jjotip/insertion-sort.bash.d 6 #+ /insertion-sort.bash.sh 7 # 8 # Author: JuanJo Ciarlante <jjo@irrigacion.gov.ar> 9 # Lightly reformatted by ABS Guide author. 10 # License: GPLv2 11 # Used in ABS Guide with author's permission (thanks!). 12 # 13 # Test with: ./insertion-sort.bash -t 14 # Or: bash insertion-sort.bash -t 15 # The following *doesn't* work: 16 # sh insertion-sort.bash -t 17 # Why not? Hint: which Bash-specific features are disabled 18 #+ when running a script by 'sh script.sh'? 19 # 20 : ${DEBUG:=0} # Debug, override with: DEBUG=1 ./scriptname . . . 21 # Parameter substitution -- set DEBUG to 0 if not previously set. 22 23 # Global array: "list" 24 typeset -a list 25 # Load whitespace-separated numbers from stdin. 26 if [ "$1" = "-t" ]; then 27 DEBUG=1 28 read -a list < <( od -Ad -w24 -t u2 /dev/urandom ) # Random list. 29 # ^ ^ process substition 30 else 31 read -a list 32 fi 33 numelem=${#list[*]} 34 35 # Shows the list, marking the element whose index is $1 36 #+ by surrounding it with the two chars passed as $2. 37 # Whole line prefixed with $3. 38 showlist() 39 { 40 echo "$3"${list[@]:0:$1} ${2:0:1}${list[$1]}${2:1:1} ${list[@]:$1+1}; 41 } 42 43 # Loop _pivot_ -- from second element to end of list. 44 for(( i=1; i<numelem; i++ )) do 45 ((DEBUG))&&showlist i "[]" " " 46 # From current _pivot_, back to first element. 47 for(( j=i; j; j-- )) do 48 # Search for the 1st elem. less than current "pivot" . . . 49 [[ "${list[j-1]}" -le "${list[i]}" ]] && break 50 done 51 (( i==j )) && continue ## No insertion was needed for this element. 52 # . . . Move list[i] (pivot) to the left of list[j]: 53 list=(${list[@]:0:j} ${list[i]} ${list[j]}\ 54 # {0,j-1} {i} {j} 55 ${list[@]:j+1:i-(j+1)} ${list[@]:i+1}) 56 # {j+1,i-1} {i+1,last} 57 ((DEBUG))&&showlist j "<>" "*" 58 done 59 60 61 echo 62 echo "------" 63 echo $'Result:\n'${list[@]} 64 65 exit $? |
Example A-39. Standard Deviation
1 #!/bin/bash 2 # sd.sh: Standard Deviation 3 4 # The Standard Deviation indicates how consistent a set of data is. 5 # It shows to what extent the individual data points deviate from the 6 #+ arithmetic mean, i.e., how much they "bounce around" (or cluster). 7 # It is essentially the average deviation-distance of the 8 #+ data points from the mean. 9 10 # =========================================================== # 11 # To calculate the Standard Deviation: 12 # 13 # 1 Find the arithmetic mean (average) of all the data points. 14 # 2 Subtract each data point from the arithmetic mean, 15 # and square that difference. 16 # 3 Add all of the individual difference-squares in # 2. 17 # 4 Divide the sum in # 3 by the number of data points. 18 # This is known as the "variance." 19 # 5 The square root of # 4 gives the Standard Deviation. 20 # =========================================================== # 21 22 count=0 # Number of data points; global. 23 SC=9 # Scale to be used by bc. Nine decimal places. 24 E_DATAFILE=90 # Data file error. 25 26 # ----------------- Set data file --------------------- 27 if [ ! -z $1 ] # Specify filename as cmd-line arg? 28 then 29 datafile="$1" # ASCII text file, 30 else #+ one (numerical) data point per line! 31 datafile=sample.dat 32 fi # See example data file, below. 33 34 if [ ! -e "$datafile" ] 35 then 36 echo "\""$datafile"\" does not exist!" 37 exit $E_DATAFILE 38 fi 39 # ----------------------------------------------------- 40 41 42 arith_mean () 43 { 44 local rt=0 # Running total. 45 local am=0 # Arithmetic mean. 46 local ct=0 # Number of data points. 47 48 while read value # Read one data point at a time. 49 do 50 rt=$(echo "scale=$SC; $rt + $value" | bc) 51 (( ct++ )) 52 done 53 54 am=$(echo "scale=$SC; $rt / $ct" | bc) 55 56 echo $am; return $ct # This function "returns" TWO values! 57 # Caution: This little trick will not work if $ct > 255! 58 # To handle a larger number of data points, 59 #+ simply comment out the "return $ct" above. 60 } <"$datafile" # Feed in data file. 61 62 sd () 63 { 64 mean1=$1 # Arithmetic mean (passed to function). 65 n=$2 # How many data points. 66 sum2=0 # Sum of squared differences ("variance"). 67 avg2=0 # Average of $sum2. 68 sdev=0 # Standard Deviation. 69 70 while read value # Read one line at a time. 71 do 72 diff=$(echo "scale=$SC; $mean1 - $value" | bc) 73 # Difference between arith. mean and data point. 74 dif2=$(echo "scale=$SC; $diff * $diff" | bc) # Squared. 75 sum2=$(echo "scale=$SC; $sum2 + $dif2" | bc) # Sum of squares. 76 done 77 78 avg2=$(echo "scale=$SC; $sum2 / $n" | bc) # Avg. of sum of squares. 79 sdev=$(echo "scale=$SC; sqrt($avg2)" | bc) # Square root = 80 echo $sdev # Standard Deviation. 81 82 } <"$datafile" # Rewinds data file. 83 84 85 # ======================================================= # 86 mean=$(arith_mean); count=$? # Two returns from function! 87 std_dev=$(sd $mean $count) 88 89 echo 90 echo "Number of data points in \""$datafile"\" = $count" 91 echo "Arithmetic mean (average) = $mean" 92 echo "Standard Deviation = $std_dev" 93 echo 94 # ======================================================= # 95 96 exit 97 98 # This script could stand some drastic streamlining, 99 # but not at the cost of reduced legibility, please. 100 101 102 # ++++++++++++++++++++++++++++++++++++++++ # 103 # A sample data file (sample1.dat): 104 105 # 18.35 106 # 19.0 107 # 18.88 108 # 18.91 109 # 18.64 110 111 112 # $ sh sd.sh sample1.dat 113 114 # Number of data points in "sample1.dat" = 5 115 # Arithmetic mean (average) = 18.756000000 116 # Standard Deviation = .235338054 117 # ++++++++++++++++++++++++++++++++++++++++ # |
Example A-40. A pad file generator for shareware authors
1 #!/bin/bash 2 # pad.sh 3 4 ###################################################### 5 # PAD (xml) file creator 6 #+ Written by Mendel Cooper <thegrendel@theriver.com>. 7 #+ Released to the Public Domain. 8 # 9 # Generates a "PAD" descriptor file for shareware 10 #+ packages, according to the specifications 11 #+ of the ASP. 12 # http://www.asp-shareware.org/pad 13 ###################################################### 14 15 16 # Accepts (optional) save filename as a command-line argument. 17 if [ -n "$1" ] 18 then 19 savefile=$1 20 else 21 savefile=save_file.xml # Default save_file name. 22 fi 23 24 25 # ===== PAD file headers ===== 26 HDR1="<?xml version=\"1.0\" encoding=\"Windows-1252\" ?>" 27 HDR2="<XML_DIZ_INFO>" 28 HDR3="<MASTER_PAD_VERSION_INFO>" 29 HDR4="\t<MASTER_PAD_VERSION>1.15</MASTER_PAD_VERSION>" 30 HDR5="\t<MASTER_PAD_INFO>Portable Application Description, or PAD 31 for short, is a data set that is used by shareware authors to 32 disseminate information to anyone interested in their software products. 33 To find out more go to http://www.asp-shareware.org/pad</MASTER_PAD_INFO>" 34 HDR6="</MASTER_PAD_VERSION_INFO>" 35 # ============================ 36 37 38 fill_in () 39 { 40 if [ -z "$2" ] 41 then 42 echo -n "$1? " # Get user input. 43 else 44 echo -n "$1 $2? " # Additional query? 45 fi 46 47 read var # May paste to fill in field. 48 # This shows how flexible "read" can be. 49 50 if [ -z "$var" ] 51 then 52 echo -e "\t\t<$1 />" >>$savefile # Indent with 2 tabs. 53 return 54 else 55 echo -e "\t\t<$1>$var</$1>" >>$savefile 56 return ${#var} # Return length of input string. 57 fi 58 } 59 60 check_field_length () # Check length of program description fields. 61 { 62 # $1 = maximum field length 63 # $2 = actual field length 64 if [ "$2" -gt "$1" ] 65 then 66 echo "Warning: Maximum field length of $1 characters exceeded!" 67 fi 68 } 69 70 clear # Clear screen. 71 echo "PAD File Creator" 72 echo "--- ---- -------" 73 echo 74 75 # Write File Headers to file. 76 echo $HDR1 >$savefile 77 echo $HDR2 >>$savefile 78 echo $HDR3 >>$savefile 79 echo -e $HDR4 >>$savefile 80 echo -e $HDR5 >>$savefile 81 echo $HDR6 >>$savefile 82 83 84 # Company_Info 85 echo "COMPANY INFO" 86 CO_HDR="Company_Info" 87 echo "<$CO_HDR>" >>$savefile 88 89 fill_in Company_Name 90 fill_in Address_1 91 fill_in Address_2 92 fill_in City_Town 93 fill_in State_Province 94 fill_in Zip_Postal_Code 95 fill_in Country 96 97 # If applicable: 98 # fill_in ASP_Member "[Y/N]" 99 # fill_in ASP_Member_Number 100 # fill_in ESC_Member "[Y/N]" 101 102 fill_in Company_WebSite_URL 103 104 clear # Clear screen between sections. 105 106 # Contact_Info 107 echo "CONTACT INFO" 108 CONTACT_HDR="Contact_Info" 109 echo "<$CONTACT_HDR>" >>$savefile 110 fill_in Author_First_Name 111 fill_in Author_Last_Name 112 fill_in Author_Email 113 fill_in Contact_First_Name 114 fill_in Contact_Last_Name 115 fill_in Contact_Email 116 echo -e "\t</$CONTACT_HDR>" >>$savefile 117 # END Contact_Info 118 119 clear 120 121 # Support_Info 122 echo "SUPPORT INFO" 123 SUPPORT_HDR="Support_Info" 124 echo "<$SUPPORT_HDR>" >>$savefile 125 fill_in Sales_Email 126 fill_in Support_Email 127 fill_in General_Email 128 fill_in Sales_Phone 129 fill_in Support_Phone 130 fill_in General_Phone 131 fill_in Fax_Phone 132 echo -e "\t</$SUPPORT_HDR>" >>$savefile 133 # END Support_Info 134 135 echo "</$CO_HDR>" >>$savefile 136 # END Company_Info 137 138 clear 139 140 # Program_Info 141 echo "PROGRAM INFO" 142 PROGRAM_HDR="Program_Info" 143 echo "<$PROGRAM_HDR>" >>$savefile 144 fill_in Program_Name 145 fill_in Program_Version 146 fill_in Program_Release_Month 147 fill_in Program_Release_Day 148 fill_in Program_Release_Year 149 fill_in Program_Cost_Dollars 150 fill_in Program_Cost_Other 151 fill_in Program_Type "[Shareware/Freeware/GPL]" 152 fill_in Program_Release_Status "[Beta, Major Upgrade, etc.]" 153 fill_in Program_Install_Support 154 fill_in Program_OS_Support "[Win9x/Win2k/Linux/etc.]" 155 fill_in Program_Language "[English/Spanish/etc.]" 156 157 echo; echo 158 159 # File_Info 160 echo "FILE INFO" 161 FILEINFO_HDR="File_Info" 162 echo "<$FILEINFO_HDR>" >>$savefile 163 fill_in Filename_Versioned 164 fill_in Filename_Previous 165 fill_in Filename_Generic 166 fill_in Filename_Long 167 fill_in File_Size_Bytes 168 fill_in File_Size_K 169 fill_in File_Size_MB 170 echo -e "\t</$FILEINFO_HDR>" >>$savefile 171 # END File_Info 172 173 clear 174 175 # Expire_Info 176 echo "EXPIRE INFO" 177 EXPIRE_HDR="Expire_Info" 178 echo "<$EXPIRE_HDR>" >>$savefile 179 fill_in Has_Expire_Info "Y/N" 180 fill_in Expire_Count 181 fill_in Expire_Based_On 182 fill_in Expire_Other_Info 183 fill_in Expire_Month 184 fill_in Expire_Day 185 fill_in Expire_Year 186 echo -e "\t</$EXPIRE_HDR>" >>$savefile 187 # END Expire_Info 188 189 clear 190 191 # More Program_Info 192 echo "ADDITIONAL PROGRAM INFO" 193 fill_in Program_Change_Info 194 fill_in Program_Specific_Category 195 fill_in Program_Categories 196 fill_in Includes_JAVA_VM "[Y/N]" 197 fill_in Includes_VB_Runtime "[Y/N]" 198 fill_in Includes_DirectX "[Y/N]" 199 # END More Program_Info 200 201 echo "</$PROGRAM_HDR>" >>$savefile 202 # END Program_Info 203 204 clear 205 206 # Program Description 207 echo "PROGRAM DESCRIPTIONS" 208 PROGDESC_HDR="Program_Descriptions" 209 echo "<$PROGDESC_HDR>" >>$savefile 210 211 LANG="English" 212 echo "<$LANG>" >>$savefile 213 214 fill_in Keywords "[comma + space separated]" 215 echo 216 echo "45, 80, 250, 450, 2000 word program descriptions" 217 echo "(may cut and paste into field)" 218 # It would be highly appropriate to compose the following 219 #+ "Char_Desc" fields with a text editor, 220 #+ then cut-and-paste the text into the answer fields. 221 echo 222 echo " |---------------45 characters---------------|" 223 fill_in Char_Desc_45 224 check_field_length 45 "$?" 225 echo 226 fill_in Char_Desc_80 227 check_field_length 80 "$?" 228 229 fill_in Char_Desc_250 230 check_field_length 250 "$?" 231 232 fill_in Char_Desc_450 233 fill_in Char_Desc_2000 234 235 echo "</$LANG>" >>$savefile 236 echo "</$PROGDESC_HDR>" >>$savefile 237 # END Program Description 238 239 clear 240 echo "Done."; echo; echo 241 echo "Save file is: \""$savefile"\"" 242 243 exit 0 |
Example A-41. A man page editor
1 #!/bin/bash 2 # maned.sh 3 # A rudimentary man page editor 4 5 # Version: 0.1 (Alpha, probably buggy) 6 # Author: Mendel Cooper <thegrendel@theriver.com> 7 # Reldate: 16 June 2008 8 # License: GPL3 9 10 11 savefile= # Global, used in multiple functions. 12 E_NOINPUT=90 # User input missing (error). May or may not be critical. 13 14 # =========== Markup Tags ============ # 15 TopHeader=".TH" 16 NameHeader=".SH NAME" 17 SyntaxHeader=".SH SYNTAX" 18 SynopsisHeader=".SH SYNOPSIS" 19 InstallationHeader=".SH INSTALLATION" 20 DescHeader=".SH DESCRIPTION" 21 OptHeader=".SH OPTIONS" 22 FilesHeader=".SH FILES" 23 EnvHeader=".SH ENVIRONMENT" 24 AuthHeader=".SH AUTHOR" 25 BugsHeader=".SH BUGS" 26 SeeAlsoHeader=".SH SEE ALSO" 27 BOLD=".B" 28 # Add more tags, as needed. 29 # See groff docs for markup meanings. 30 # ==================================== # 31 32 start () 33 { 34 clear # Clear screen. 35 echo "ManEd" 36 echo "-----" 37 echo 38 echo "Simple man page creator" 39 echo "Author: Mendel Cooper" 40 echo; echo; echo 41 } 42 43 progname () 44 { 45 echo -n "Program name? " 46 read name 47 48 echo -n "Manpage section? [Hit RETURN for default (\"1\") ] " 49 read section 50 if [ -z "$section" ] 51 then 52 section=1 # Most man pages are in section 1. 53 fi 54 55 if [ -n "$name" ] 56 then 57 savefile=""$name"."$section"" # Filename suffix = section. 58 echo -n "$1 " >>$savefile 59 name1=$(echo "$name" | tr a-z A-Z) # Change to uppercase, 60 #+ per man page convention. 61 echo -n "$name1" >>$savefile 62 else 63 echo "Error! No input." # Mandatory input. 64 exit $E_NOINPUT # Critical! 65 fi 66 67 echo -n " \"$section\"">>$savefile # Append, always append. 68 69 echo -n "Version? " 70 read ver 71 echo -n " \"Version $ver \"">>$savefile 72 echo >>$savefile 73 74 echo -n "Short description [0 - 5 words]? " 75 read sdesc 76 echo "$NameHeader">>$savefile 77 echo ""$BOLD" "$name"">>$savefile 78 echo "\- "$sdesc"">>$savefile 79 80 } 81 82 fill_in () 83 { # This function more or less copied from "pad.sh" script. 84 echo -n "$2? " # Get user input. 85 read var # May paste (a single line only!) to fill in field. 86 87 if [ -n "$var" ] 88 then 89 echo "$1 " >>$savefile 90 echo -n "$var" >>$savefile 91 else # Don't append empty field to file. 92 return $E_NOINPUT # Not critical here. 93 fi 94 95 echo >>$savefile 96 97 } 98 99 100 end () 101 { 102 clear 103 echo -n "Would you like to view the saved man page (y/n)? " 104 read ans 105 if [ "$ans" = "n" -o "$ans" = "N" ]; then exit; fi 106 exec less "$savefile" # Exit script and hand off control to "less" ... 107 #+ ... which formats for viewing man page source. 108 } 109 110 111 # ---------------------------------------- # 112 start 113 progname "$TopHeader" 114 fill_in "$SynopsisHeader" "Synopsis" 115 fill_in "$DescHeader" "Long description" 116 # May paste in *single line* of text. 117 fill_in "$OptHeader" "Options" 118 fill_in "$FilesHeader" "Files" 119 fill_in "$AuthHeader" "Author" 120 fill_in "$BugsHeader" "Bugs" 121 fill_in "$SeeAlsoHeader" "See also" 122 # fill_in "$OtherHeader" ... as necessary. 123 end # ... exit not needed. 124 # ---------------------------------------- # 125 126 # Note that the generated man page will usually 127 #+ require manual fine-tuning with a text editor. 128 # However, it's a distinct improvement upon 129 #+ writing man source from scratch 130 #+ or even editing a blank man page template. 131 132 # The main deficiency of the script is that it permits 133 #+ pasting only a single text line into the input fields. 134 # This may be a long, cobbled-together line, which groff 135 # will automatically wrap and hyphenate. 136 # However, if you want multiple (newline-separated) paragraphs, 137 #+ these must be inserted by manual text editing on the 138 #+ script-generated man page. 139 # Exercise (difficult): Fix this! 140 141 # This script is not nearly as elaborate as the 142 #+ full-featured "manedit" package (http://wolfpack.twu.net), 143 #+ but it's much easier to use. |
Example A-42. Petals Around the Rose
1 #!/bin/bash -i 2 # petals.sh 3 4 ######################################################################### 5 # Petals Around the Rose # 6 # # 7 # Version 0.1 Created by Serghey Rodin # 8 # Version 0.2 Modded by ABS Guide Author # 9 # # 10 # License: GPL3 # 11 # Used in ABS Guide with permission. # 12 # ##################################################################### # 13 14 hits=0 # Correct guesses. 15 WIN=6 # Mastered the game. 16 ALMOST=5 # One short of mastery. 17 EXIT=exit # Give up early? 18 19 RANDOM=$$ # Seeds the random number generator from PID of script. 20 21 22 # Bones (ASCII graphics for dice) 23 bone1[1]="| |" 24 bone1[2]="| o |" 25 bone1[3]="| o |" 26 bone1[4]="| o o |" 27 bone1[5]="| o o |" 28 bone1[6]="| o o |" 29 bone2[1]="| o |" 30 bone2[2]="| |" 31 bone2[3]="| o |" 32 bone2[4]="| |" 33 bone2[5]="| o |" 34 bone2[6]="| o o |" 35 bone3[1]="| |" 36 bone3[2]="| o |" 37 bone3[3]="| o |" 38 bone3[4]="| o o |" 39 bone3[5]="| o o |" 40 bone3[6]="| o o |" 41 bone="+---------+" 42 43 44 45 # Functions 46 47 instructions () { 48 49 clear 50 echo -n "Do you need instructions? (y/n) "; read ans 51 if [ "$ans" = "y" -o "$ans" = "Y" ]; then 52 clear 53 echo -e '\E[34;47m' # Blue type. 54 55 # "cat document" 56 cat <<INSTRUCTIONSZZZ 57 The name of the game is Petals Around the Rose, 58 and that name is significant. 59 Five dice will roll and you must guess the "answer" for each roll. 60 It will be zero or an even number. 61 After your guess, you will be told the answer for the roll, but . . . 62 that's ALL the information you will get. 63 64 Six consecutive correct guesses admits you to the 65 Fellowship of the Rose. 66 INSTRUCTIONSZZZ 67 68 echo -e "\033[0m" # Turn off blue. 69 else clear 70 fi 71 72 } 73 74 75 fortune () 76 { 77 RANGE=7 78 FLOOR=0 79 number=0 80 while [ "$number" -le $FLOOR ] 81 do 82 number=$RANDOM 83 let "number %= $RANGE" # 1 - 6. 84 done 85 86 return $number 87 } 88 89 90 91 throw () { # Calculate each individual die. 92 fortune; B1=$? 93 fortune; B2=$? 94 fortune; B3=$? 95 fortune; B4=$? 96 fortune; B5=$? 97 98 calc () { # Function embedded within a function! 99 case "$1" in 100 3 ) rose=2;; 101 5 ) rose=4;; 102 * ) rose=0;; 103 esac # Simplified algorithm. 104 # Doesn't really get to the heart of the matter. 105 return $rose 106 } 107 108 answer=0 109 calc "$B1"; answer=$(expr $answer + $(echo $?)) 110 calc "$B2"; answer=$(expr $answer + $(echo $?)) 111 calc "$B3"; answer=$(expr $answer + $(echo $?)) 112 calc "$B4"; answer=$(expr $answer + $(echo $?)) 113 calc "$B5"; answer=$(expr $answer + $(echo $?)) 114 } 115 116 117 118 game () 119 { # Generate graphic display of dice throw. 120 throw 121 echo -e "\033[1m" # Bold. 122 echo -e "\n" 123 echo -e "$bone\t$bone\t$bone\t$bone\t$bone" 124 echo -e \ 125 "${bone1[$B1]}\t${bone1[$B2]}\t${bone1[$B3]}\t${bone1[$B4]}\t${bone1[$B5]}" 126 echo -e \ 127 "${bone2[$B1]}\t${bone2[$B2]}\t${bone2[$B3]}\t${bone2[$B4]}\t${bone2[$B5]}" 128 echo -e \ 129 "${bone3[$B1]}\t${bone3[$B2]}\t${bone3[$B3]}\t${bone3[$B4]}\t${bone3[$B5]}" 130 echo -e "$bone\t$bone\t$bone\t$bone\t$bone" 131 echo -e "\n\n\t\t" 132 echo -e "\033[0m" # Turn off bold. 133 echo -n "There are how many petals around the rose? " 134 } 135 136 137 138 # ============================================================== # 139 140 instructions 141 142 while [ "$petal" != "$EXIT" ] # Main loop. 143 do 144 game 145 read petal 146 echo "$petal" | grep [0-9] >/dev/null # Filter response for digit. 147 # Otherwise just roll dice again. 148 if [ "$?" -eq 0 ] # If-loop #1. 149 then 150 if [ "$petal" == "$answer" ]; then # If-loop #2. 151 echo -e "\nCorrect. There are $petal petals around the rose.\n" 152 (( hits++ )) 153 154 if [ "$hits" -eq "$WIN" ]; then # If-loop #3. 155 echo -e '\E[31;47m' # Red type. 156 echo -e "\033[1m" # Bold. 157 echo "You have unraveled the mystery of the Rose Petals!" 158 echo "Welcome to the Fellowship of the Rose!!!" 159 echo "(You are herewith sworn to secrecy.)"; echo 160 echo -e "\033[0m" # Turn off red & bold. 161 break # Exit! 162 else echo "You have $hits correct so far."; echo 163 164 if [ "$hits" -eq "$ALMOST" ]; then 165 echo "Just one more gets you to the heart of the mystery!"; echo 166 fi 167 168 fi # Close if-loop #3. 169 170 else 171 echo -e "\nWrong. There are $answer petals around the rose.\n" 172 hits=0 # Reset number of correct guesses. 173 fi # Close if-loop #2. 174 175 echo -n "Hit ENTER for the next roll, or type \"exit\" to end. " 176 read 177 if [ "$REPLY" = "$EXIT" ]; then exit 178 fi 179 180 fi # Close if-loop #1. 181 182 clear 183 done # End of main (while) loop. 184 185 ### 186 187 exit $? 188 189 # Resources: 190 # --------- 191 # 1) http://en.wikipedia.org/wiki/Petals_Around_the_Rose 192 # (Wikipedia entry.) 193 # 2) http://www.borrett.id.au/computing/petals-bg.htm 194 # (How Bill Gates coped with the Petals Around the Rose challenge.) |
Example A-43. Quacky: a Perquackey-type word game
1 #!/bin/bash 2 # qky.sh 3 4 ############################################################## 5 # QUACKEY: a somewhat simplified version of Perquackey [TM]. # 6 # # 7 # Author: Mendel Cooper <thegrendel@theriver.com> # 8 # version 0.1.02 03 May, 2008 # 9 # License: GPL3 # 10 ############################################################## 11 12 WLIST=/usr/share/dict/word.lst 13 # ^^^^^^^^ Word list file found here. 14 # ASCII word list, one word per line, UNIX format. 15 # A suggested list is the script author's "yawl" word list package. 16 # http://ibiblio.org/pub/Linux/libs/yawl-0.3.2.tar.gz 17 # or 18 # http://personal.riverusers.com/~thegrendel/yawl-0.3.2.tar.gz 19 20 NONCONS=0 # Word not constructable from letter set. 21 CONS=1 # Constructable. 22 SUCCESS=0 23 NG=1 24 FAILURE='' 25 NULL=0 # Zero out value of letter (if found). 26 MINWLEN=3 # Minimum word length. 27 MAXCAT=5 # Maximum number of words in a given category. 28 PENALTY=200 # General-purpose penalty for unacceptable words. 29 total= 30 E_DUP=70 # Duplicate word error. 31 32 TIMEOUT=10 # Time for word input. 33 34 NVLET=10 # 10 letters for non-vulnerable. 35 VULET=13 # 13 letters for vulnerable (not yet implemented). 36 37 declare -a Words 38 declare -a Status 39 declare -a Score=( 0 0 0 0 0 0 0 0 0 0 0 ) 40 41 42 letters=( a n s r t m l k p r b c i d s i d z e w u e t f 43 e y e r e f e g t g h h i t r s c i t i d i j a t a o l a 44 m n a n o v n w o s e l n o s p a q e e r a b r s a o d s 45 t g t i t l u e u v n e o x y m r k ) 46 # Letter distribution table shamelessly borrowed from "Wordy" game, 47 #+ ca. 1992, written by a certain fine fellow named Mendel Cooper. 48 49 declare -a LS 50 51 numelements=${#letters[@]} 52 randseed="$1" 53 54 instructions () 55 { 56 clear 57 echo "Welcome to QUACKEY, the anagramming word construction game."; echo 58 echo -n "Do you need instructions? (y/n) "; read ans 59 60 if [ "$ans" = "y" -o "$ans" = "Y" ]; then 61 clear 62 echo -e '\E[31;47m' # Red foreground. '\E[34;47m' for blue. 63 cat <<INSTRUCTION1 64 65 QUACKEY is a variant of Perquackey [TM]. 66 The rules are the same, but the scoring is simplified 67 and plurals of previously played words are allowed. 68 "Vulnerable" play is not yet implemented, 69 but it is otherwise feature-complete. 70 71 As the game begins, the player gets 10 letters. 72 The object is to construct valid dictionary words 73 of at least 3-letter-length from the letterset. 74 Each word-length category 75 -- 3-letter, 4-letter, 5-letter, ... -- 76 fills up with the fifth word entered, 77 and no further words in that category are accepted. 78 79 The penalty for too-short (two-letter), duplicate, unconstructable, 80 and invalid (not in dictionary) words is -200. The same penalty applies 81 to attempts to enter a word in a filled-up category. 82 83 INSTRUCTION1 84 85 echo -n "Hit ENTER for next page of instructions. "; read az1 86 87 cat <<INSTRUCTION2 88 89 The scoring mostly corresponds to classic Perquackey: 90 The first 3-letter word scores 60, plus 10 for each additional one. 91 The first 4-letter word scores 120, plus 20 for each additional one. 92 The first 5-letter word scores 200, plus 50 for each additional one. 93 The first 6-letter word scores 300, plus 100 for each additional one. 94 The first 7-letter word scores 500, plus 150 for each additional one. 95 The first 8-letter word scores 750, plus 250 for each additional one. 96 The first 9-letter word scores 1000, plus 500 for each additional one. 97 The first 10-letter word scores 2000, plus 2000 for each additional one. 98 99 Category completion bonuses are: 100 3-letter words 100 101 4-letter words 200 102 5-letter words 400 103 6-letter words 800 104 7-letter words 2000 105 8-letter words 10000 106 This is a simplification of the absurdly complicated Perquackey bonus 107 scoring system. 108 109 INSTRUCTION2 110 111 echo -n "Hit ENTER for final page of instructions. "; read az1 112 113 cat <<INSTRUCTION3 114 115 116 Hitting just ENTER for a word entry ends the game. 117 118 Individual word entry is timed to a maximum of 10 seconds. 119 *** Timing out on an entry ends the game. *** 120 Other than that, the game is untimed. 121 122 -------------------------------------------------- 123 Game statistics are automatically saved to a file. 124 -------------------------------------------------- 125 126 For competitive ("duplicate") play, a previous letterset 127 may be duplicated by repeating the script's random seed, 128 command-line parameter \$1. 129 For example, "qky 7633" specifies the letterset 130 c a d i f r h u s k ... 131 INSTRUCTION3 132 133 echo; echo -n "Hit ENTER to begin game. "; read az1 134 135 echo -e "\033[0m" # Turn off red. 136 else clear 137 fi 138 139 clear 140 141 } 142 143 144 145 seed_random () 146 { # Seed random number generator. 147 if [ -n "$randseed" ] # Can specify random seed. 148 then #+ for play in competitive mode. 149 # RANDOM="$randseed" 150 echo "RANDOM seed set to "$randseed"" 151 else 152 randseed="$$" # Or get random seed from process ID. 153 echo "RANDOM seed not specified, set to Process ID of script ($$)." 154 fi 155 156 RANDOM="$randseed" 157 158 echo 159 } 160 161 162 get_letset () 163 { 164 element=0 165 echo -n "Letterset:" 166 167 for lset in $(seq $NVLET) 168 do # Pick random letters to fill out letterset. 169 LS[element]="${letters[$((RANDOM%numelements))]}" 170 ((element++)) 171 done 172 173 echo 174 echo "${LS[@]}" 175 176 } 177 178 179 add_word () 180 { 181 wrd="$1" 182 local idx=0 183 184 Status[0]="" 185 Status[3]="" 186 Status[4]="" 187 188 while [ "${Words[idx]}" != '' ] 189 do 190 if [ "${Words[idx]}" = "$wrd" ] 191 then 192 Status[3]="Duplicate-word-PENALTY" 193 let "Score[0]= 0 - $PENALTY" 194 let "Score[1]-=$PENALTY" 195 return $E_DUP 196 fi 197 198 ((idx++)) 199 done 200 201 Words[idx]="$wrd" 202 get_score 203 204 } 205 206 get_score() 207 { 208 local wlen=0 209 local score=0 210 local bonus=0 211 local first_word=0 212 local add_word=0 213 local numwords=0 214 215 wlen=${#wrd} 216 numwords=${Score[wlen]} 217 Score[2]=0 218 Status[4]="" # Initialize "bonus" to 0. 219 220 case "$wlen" in 221 3) first_word=60 222 add_word=10;; 223 4) first_word=120 224 add_word=20;; 225 5) first_word=200 226 add_word=50;; 227 6) first_word=300 228 add_word=100;; 229 7) first_word=500 230 add_word=150;; 231 8) first_word=750 232 add_word=250;; 233 9) first_word=1000 234 add_word=500;; 235 10) first_word=2000 236 add_word=2000;; # This category modified from original rules! 237 esac 238 239 ((Score[wlen]++)) 240 if [ ${Score[wlen]} -eq $MAXCAT ] 241 then # Category completion bonus scoring simplified! 242 case $wlen in 243 3 ) bonus=100;; 244 4 ) bonus=200;; 245 5 ) bonus=400;; 246 6 ) bonus=800;; 247 7 ) bonus=2000;; 248 8 ) bonus=10000;; 249 esac # Needn't worry about 9's and 10's. 250 Status[4]="Category-$wlen-completion***BONUS***" 251 Score[2]=$bonus 252 else 253 Status[4]="" # Erase it. 254 fi 255 256 257 let "score = $first_word + $add_word * $numwords" 258 if [ "$numwords" -eq 0 ] 259 then 260 Score[0]=$score 261 else 262 Score[0]=$add_word 263 fi # All this to distinguish last-word score 264 #+ from total running score. 265 let "Score[1] += ${Score[0]}" 266 let "Score[1] += ${Score[2]}" 267 268 } 269 270 271 272 get_word () 273 { 274 local wrd='' 275 read -t $TIMEOUT wrd # Timed read. 276 echo $wrd 277 } 278 279 is_constructable () 280 { # This was the most complex and difficult-to-write function. 281 local -a local_LS=( "${LS[@]}" ) # Local copy of letter set. 282 local is_found=0 283 local idx=0 284 local pos 285 local strlen 286 local local_word=( "$1" ) 287 strlen=${#local_word} 288 289 while [ "$idx" -lt "$strlen" ] 290 do 291 is_found=$(expr index "${local_LS[*]}" "${local_word:idx:1}") 292 if [ "$is_found" -eq "$NONCONS" ] # Not constructable! 293 then 294 echo "$FAILURE"; return 295 else 296 ((pos = ($is_found - 1) / 2)) # Compensate for spaces betw. letters! 297 local_LS[pos]=$NULL # Zero out used letters. 298 ((idx++)) # Bump index. 299 fi 300 done 301 302 echo "$SUCCESS" 303 return 304 } 305 306 is_valid () 307 { # Surprisingly easy to check if word in dictionary ... 308 fgrep -qw "$1" "$WLIST" # ... thanks to 'grep' ... 309 echo $? 310 } 311 312 check_word () 313 { 314 if [ -z "$1" ] 315 then 316 return 317 fi 318 319 Status[1]="" 320 Status[2]="" 321 Status[3]="" 322 Status[4]="" 323 324 iscons=$(is_constructable "$1") 325 if [ "$iscons" ] 326 then 327 Status[1]="constructable" 328 v=$(is_valid "$1") 329 if [ "$v" -eq "$SUCCESS" ] 330 then 331 Status[2]="valid" 332 strlen=${#1} 333 334 if [ ${Score[strlen]} -eq "$MAXCAT" ] # Category full! 335 then 336 Status[3]="Category-$strlen-overflow-PENALTY" 337 return $NG 338 fi 339 340 case "$strlen" in 341 1 | 2 ) 342 Status[3]="Two-letter-word-PENALTY" 343 return $NG;; 344 * ) 345 Status[3]="" 346 return $SUCCESS;; 347 esac 348 else 349 Status[3]="Not-valid-PENALTY" 350 return $NG 351 fi 352 else 353 Status[3]="Not-constructable-PENALTY" 354 return $NG 355 fi 356 357 ### FIXME: Streamline the above code. 358 359 } 360 361 362 display_words () 363 { 364 local idx=0 365 local wlen0 366 367 clear 368 echo "Letterset: ${LS[@]}" 369 echo "Threes: Fours: Fives: Sixes: Sevens: Eights:" 370 echo "------------------------------------------------------------" 371 372 373 374 while [ "${Words[idx]}" != '' ] 375 do 376 wlen0=${#Words[idx]} 377 case "$wlen0" in 378 3) ;; 379 4) echo -n " " ;; 380 5) echo -n " " ;; 381 6) echo -n " " ;; 382 7) echo -n " " ;; 383 8) echo -n " " ;; 384 esac 385 echo "${Words[idx]}" 386 ((idx++)) 387 done 388 389 ### FIXME: The word display is pretty crude. 390 } 391 392 393 play () 394 { 395 word="Start game" # Dummy word, to start ... 396 397 while [ "$word" ] # If player just hits return (blank word), 398 do #+ then game ends. 399 echo "$word: "${Status[@]}"" 400 echo -n "Last score: [${Score[0]}] TOTAL score: [${Score[1]}]: Next word: " 401 total=${Score[1]} 402 word=$(get_word) 403 check_word "$word" 404 405 if [ "$?" -eq "$SUCCESS" ] 406 then 407 add_word "$word" 408 else 409 let "Score[0]= 0 - $PENALTY" 410 let "Score[1]-=$PENALTY" 411 fi 412 413 display_words 414 done # Exit game. 415 416 ### FIXME: The play () function calls too many other functions. 417 ### This is perilously close to "spaghetti code" ... 418 } 419 420 end_of_game () 421 { # Save and display stats. 422 423 #######################Autosave########################## 424 savefile=qky.save.$$ 425 # ^^ PID of script 426 echo `date` >> $savefile 427 echo "Letterset # $randseed (random seed) ">> $savefile 428 echo -n "Letterset: " >> $savefile 429 echo "${LS[@]}" >> $savefile 430 echo "---------" >> $savefile 431 echo "Words constructed:" >> $savefile 432 echo "${Words[@]}" >> $savefile 433 echo >> $savefile 434 echo "Score: $total" >> $savefile 435 436 echo "Statistics for this round saved in \""$savefile"\"" 437 ######################################################### 438 439 echo "Score for this round: $total" 440 echo "Words: ${Words[@]}" 441 } 442 443 # ---------# 444 instructions 445 seed_random 446 get_letset 447 play 448 end_of_game 449 # ---------# 450 451 exit $? 452 453 # TODO: 454 # 455 # 1) Clean up code! 456 # 2) Prettify the display_words () function (maybe with widgets?). 457 # 3) Improve the time-out ... maybe change to untimed entry, 458 #+ but with a time limit for the overall round. 459 # 4) An on-screen countdown timer would be nice. 460 # 5) Implement "vulnerable" mode of play. 461 # 6) Improve save-to-file capability (and maybe make it optional). 462 # 7) Fix bugs!!! 463 464 # Reference for more info: 465 # http://personal.riverusers.com/~thegrendel/qky.README.html |
Example A-44. Nim
1 #!/bin/bash 2 # nim.sh: Game of Nim 3 4 # Author: Mendel Cooper 5 # Reldate: 15 July 2008 6 # License: GPL3 7 8 ROWS=5 # Five rows of pegs. 9 WON=91 # Exit codes to keep track of wins/losses. 10 LOST=92 # Possibly useful if running in batch mode. 11 QUIT=99 12 peg_msg= # Peg/Pegs? 13 Rows=( 0 5 4 3 2 1 ) # Array holding play info. 14 # ${Rows[0]} holds total number of pegs, updated after each turn. 15 # Other array elements hold number of pegs in corresponding row. 16 17 instructions () 18 { 19 clear 20 tput bold 21 echo "Welcome to the game of Nim."; echo 22 echo -n "Do you need instructions? (y/n) "; read ans 23 24 if [ "$ans" = "y" -o "$ans" = "Y" ]; then 25 clear 26 echo -e '\E[33;41m' # Yellow fg., over red bg.; bold. 27 cat <<INSTRUCTIONS 28 29 Nim is a game with roots in the distant past. 30 This particular variant starts with five rows of pegs. 31 32 1: | | | | | 33 2: | | | | 34 3: | | | 35 4: | | 36 5: | 37 38 The number at the left identifies the row. 39 40 The human player moves first, and alternates turns with the bot. 41 A turn consists of removing at least one peg from a single row. 42 It is permissable to remove ALL the pegs from a row. 43 For example, in row 2, above, the player can remove 1, 2, 3, or 4 pegs. 44 The player who removes the last peg loses. 45 46 The strategy consists of trying to be the one who removes 47 the next-to-last peg(s), leaving the loser with the final peg. 48 49 To exit the game early, hit ENTER during your turn. 50 INSTRUCTIONS 51 52 echo; echo -n "Hit ENTER to begin game. "; read azx 53 54 echo -e "\033[0m" # Restore display. 55 else tput sgr0; clear 56 fi 57 58 clear 59 60 } 61 62 63 tally_up () 64 { 65 let "Rows[0] = ${Rows[1]} + ${Rows[2]} + ${Rows[3]} + ${Rows[4]} + \ 66 ${Rows[5]}" # Add up how many pegs remaining. 67 } 68 69 70 display () 71 { 72 index=1 # Start with top row. 73 echo 74 75 while [ "$index" -le "$ROWS" ] 76 do 77 p=${Rows[index]} 78 echo -n "$index: " # Show row number. 79 80 # ------------------------------------------------ 81 # Two concurrent inner loops. 82 83 indent=$index 84 while [ "$indent" -gt 0 ] 85 do 86 echo -n " " # Staggered rows. 87 ((indent--)) # Spacing between pegs. 88 done 89 90 while [ "$p" -gt 0 ] 91 do 92 echo -n "| " 93 ((p--)) 94 done 95 # ----------------------------------------------- 96 97 echo 98 ((index++)) 99 done 100 101 tally_up 102 103 rp=${Rows[0]} 104 105 if [ "$rp" -eq 1 ] 106 then 107 peg_msg=peg 108 final_msg="Game over." 109 else # Game not yet over . . . 110 peg_msg=pegs 111 final_msg="" # . . . So "final message" is blank. 112 fi 113 114 echo " $rp $peg_msg remaining." 115 echo " "$final_msg"" 116 117 118 echo 119 } 120 121 player_move () 122 { 123 124 echo "Your move:" 125 126 echo -n "Which row? " 127 while read idx 128 do # Validity check, etc. 129 130 if [ -z "$idx" ] # Hitting return quits. 131 then 132 echo "Premature exit."; echo 133 tput sgr0 # Restore display. 134 exit $QUIT 135 fi 136 137 if [ "$idx" -gt "$ROWS" -o "$idx" -lt 1 ] # Bounds check. 138 then 139 echo "Invalid row number!" 140 echo -n "Which row? " 141 else 142 break 143 fi 144 # TODO: 145 # Add check for non-numeric input. 146 # Also, script crashes on input outside of range of long double. 147 # Fix this. 148 149 done 150 151 echo -n "Remove how many? " 152 while read num 153 do # Validity check. 154 155 if [ -z "$num" ] 156 then 157 echo "Premature exit."; echo 158 tput sgr0 # Restore display. 159 exit $QUIT 160 fi 161 162 if [ "$num" -gt ${Rows[idx]} -o "$num" -lt 1 ] 163 then 164 echo "Cannot remove $num!" 165 echo -n "Remove how many? " 166 else 167 break 168 fi 169 done 170 # TODO: 171 # Add check for non-numeric input. 172 # Also, script crashes on input outside of range of long double. 173 # Fix this. 174 175 let "Rows[idx] -= $num" 176 177 display 178 tally_up 179 180 if [ ${Rows[0]} -eq 1 ] 181 then 182 echo " Human wins!" 183 echo " Congratulations!" 184 tput sgr0 # Restore display. 185 echo 186 exit $WON 187 fi 188 189 if [ ${Rows[0]} -eq 0 ] 190 then # Snatching defeat from the jaws of victory . . . 191 echo " Fool!" 192 echo " You just removed the last peg!" 193 echo " Bot wins!" 194 tput sgr0 # Restore display. 195 echo 196 exit $LOST 197 fi 198 } 199 200 201 bot_move () 202 { 203 204 row_b=0 205 while [[ $row_b -eq 0 || ${Rows[row_b]} -eq 0 ]] 206 do 207 row_b=$RANDOM # Choose random row. 208 let "row_b %= $ROWS" 209 done 210 211 212 num_b=0 213 r0=${Rows[row_b]} 214 215 if [ "$r0" -eq 1 ] 216 then 217 num_b=1 218 else 219 let "num_b = $r0 - 1" 220 # Leave only a single peg in the row. 221 fi # Not a very strong strategy, 222 #+ but probably a bit better than totally random. 223 224 let "Rows[row_b] -= $num_b" 225 echo -n "Bot: " 226 echo "Removing from row $row_b ... " 227 228 if [ "$num_b" -eq 1 ] 229 then 230 peg_msg=peg 231 else 232 peg_msg=pegs 233 fi 234 235 echo " $num_b $peg_msg." 236 237 display 238 tally_up 239 240 if [ ${Rows[0]} -eq 1 ] 241 then 242 echo " Bot wins!" 243 tput sgr0 # Restore display. 244 exit $WON 245 fi 246 247 } 248 249 250 # ================================================== # 251 instructions # If human player needs them . . . 252 tput bold # Bold characters for easier viewing. 253 display # Show game board. 254 255 while [ true ] # Main loop. 256 do # Alternate human and bot turns. 257 player_move 258 bot_move 259 done 260 # ================================================== # 261 262 # Exercise: 263 # -------- 264 # Improve the bot's strategy. 265 # There is, in fact, a Nim strategy that can force a win. 266 # See the Wikipedia article on Nim: http://en.wikipedia.org/wiki/Nim 267 # Recode the bot to use this strategy (rather difficult). 268 269 # Curiosities: 270 # ----------- 271 # Nim played a prominent role in Alain Resnais' 1961 New Wave film, 272 #+ Last Year at Marienbad. 273 # 274 # In 1978, Leo Christopherson wrote an animated version of Nim, 275 #+ Android Nim, for the TRS-80 Model I. |
Example A-45. An all-purpose shell scripting homework assignment solution
1 #!/bin/bash 2 # homework.sh: All-purpose homework assignment solution. 3 # Author: M. Leo Cooper 4 # If you substitute your own name as author, then it is plagiarism, 5 #+ possibly a lesser sin than cheating on your homework! 6 # License: Public Domain 7 8 # This script may be turned in to your instructor 9 #+ in fulfillment of ALL shell scripting homework assignments. 10 # It's sparsely commented, but you, the student, can easily remedy that. 11 # The script author repudiates all responsibility! 12 13 DLA=1 14 P1=2 15 P2=4 16 P3=7 17 PP1=0 18 PP2=8 19 MAXL=9 20 E_LZY=99 21 22 declare -a L 23 L[0]="3 4 0 17 29 8 13 18 19 17 20 2 19 14 17 28" 24 L[1]="8 29 12 14 18 19 29 4 12 15 7 0 19 8 2 0 11 11 24 29 17 4 6 17 4 19" 25 L[2]="29 19 7 0 19 29 8 29 7 0 21 4 29 13 4 6 11 4 2 19 4 3" 26 L[3]="19 14 29 2 14 12 15 11 4 19 4 29 19 7 8 18 29" 27 L[4]="18 2 7 14 14 11 22 14 17 10 29 0 18 18 8 6 13 12 4 13 19 26" 28 L[5]="15 11 4 0 18 4 29 0 2 2 4 15 19 29 12 24 29 7 20 12 1 11 4 29" 29 L[6]="4 23 2 20 18 4 29 14 5 29 4 6 17 4 6 8 14 20 18 29" 30 L[7]="11 0 25 8 13 4 18 18 27" 31 L[8]="0 13 3 29 6 17 0 3 4 29 12 4 29 0 2 2 14 17 3 8 13 6 11 24 26" 32 L[9]="19 7 0 13 10 29 24 14 20 26" 33 34 declare -a \ 35 alph=( A B C D E F G H I J K L M N O P Q R S T U V W X Y Z . , : ' ' ) 36 37 38 pt_lt () 39 { 40 echo -n "${alph[$1]}" 41 echo -n -e "\a" 42 sleep $DLA 43 } 44 45 b_r () 46 { 47 echo -e '\E[31;48m\033[1m' 48 } 49 50 cr () 51 { 52 echo -e "\a" 53 sleep $DLA 54 } 55 56 restore () 57 { 58 echo -e '\033[0m' # Bold off. 59 tput sgr0 # Normal. 60 } 61 62 63 p_l () 64 { 65 for ltr in $1 66 do 67 pt_lt "$ltr" 68 done 69 } 70 71 # ---------------------- 72 b_r 73 74 for i in $(seq 0 $MAXL) 75 do 76 p_l "${L[i]}" 77 if [[ "$i" -eq "$P1" || "$i" -eq "$P2" || "$i" -eq "$P3" ]] 78 then 79 cr 80 elif [[ "$i" -eq "$PP1" || "$i" -eq "$PP2" ]] 81 then 82 cr; cr 83 fi 84 done 85 86 restore 87 # ---------------------- 88 89 echo 90 91 exit $E_LZY 92 93 # A typical example of an obfuscated script that is difficult 94 #+ to understand, and frustrating to maintain. 95 # In your career as a sysadmin, you'll run into these critters 96 #+ all too often. |
Example A-46. An alternate version of the getopt-simple.sh script
1 #!/bin/bash 2 # UseGetOpt.sh 3 4 # Author: Peggy Russell <prusselltechgroup@gmail.com> 5 6 UseGetOpt () { 7 declare inputOptions 8 declare -r E_OPTERR=85 9 declare -r ScriptName=${0##*/} 10 declare -r ShortOpts="adf:hlt" 11 declare -r LongOpts="aoption,debug,file:,help,log,test" 12 13 DoSomething () { 14 echo "The function name is '${FUNCNAME}'" 15 # Recall that $FUNCNAME is an internal variable 16 #+ holding the name of the function it is in. 17 } 18 19 inputOptions=$(getopt -o "${ShortOpts}" --long \ 20 "${LongOpts}" --name "${ScriptName}" -- "${@}") 21 22 if [[ ($? -ne 0) || ($# -eq 0) ]]; then 23 echo "Usage: ${ScriptName} [-dhlt] {OPTION...}" 24 exit $E_OPTERR 25 fi 26 27 eval set -- "${inputOptions}" 28 29 # Only for educational purposes. Can be removed. 30 #----------------------------------------------- 31 echo "++ Test: Number of arguments: [$#]" 32 echo '++ Test: Looping through "$@"' 33 for a in "$@"; do 34 echo " ++ [$a]" 35 done 36 #----------------------------------------------- 37 38 while true; do 39 case "${1}" in 40 --aoption | -a) # Argument found. 41 echo "Option [$1]" 42 ;; 43 44 --debug | -d) # Enable informational messages. 45 echo "Option [$1] Debugging enabled" 46 ;; 47 48 --file | -f) # Check for optional argument. 49 case "$2" in #+ Double colon is optional argument. 50 "") # Not there. 51 echo "Option [$1] Use default" 52 shift 53 ;; 54 55 *) # Got it 56 echo "Option [$1] Using input [$2]" 57 shift 58 ;; 59 60 esac 61 DoSomething 62 ;; 63 64 --log | -l) # Enable Logging. 65 echo "Option [$1] Logging enabled" 66 ;; 67 68 --test | -t) # Enable testing. 69 echo "Option [$1] Testing enabled" 70 ;; 71 72 --help | -h) 73 echo "Option [$1] Display help" 74 break 75 ;; 76 77 --) # Done! $# is argument number for "--", $@ is "--" 78 echo "Option [$1] Dash Dash" 79 break 80 ;; 81 82 *) 83 echo "Major internal error!" 84 exit 8 85 ;; 86 87 esac 88 echo "Number of arguments: [$#]" 89 shift 90 done 91 92 shift 93 # Only for educational purposes. Can be removed. 94 #---------------------------------------------------------------------- 95 echo "++ Test: Number of arguments after \"--\" is [$#] They are: [$@]" 96 echo '++ Test: Looping through "$@"' 97 for a in "$@"; do 98 echo " ++ [$a]" 99 done 100 #---------------------------------------------------------------------- 101 102 } 103 104 ################################### M A I N ######################## 105 # If you remove "function UseGetOpt () {" and corresponding "}", 106 #+ you can uncomment the "exit 0" line below, and invoke this script 107 #+ with the various options from the command line. 108 #------------------------------------------------------------------- 109 # exit 0 110 111 echo "Test 1" 112 UseGetOpt -f myfile one "two three" four 113 114 echo;echo "Test 2" 115 UseGetOpt -h 116 117 echo;echo "Test 3 - Short Options" 118 UseGetOpt -adltf myfile anotherfile 119 120 echo;echo "Test 4 - Long Options" 121 UseGetOpt --aoption --debug --log --test --file myfile anotherfile 122 123 exit |
To end this section, a review of the basics . . . and more.
Example A-47. Basics Reviewed
1 #!/bin/bash 2 # basics-reviewed.bash 3 4 # File extension == *.bash == specific to Bash 5 6 # Copyright (c) Michael S. Zick, 2003; All rights reserved. 7 # License: Use in any form, for any purpose. 8 # Revision: $ID$ 9 # 10 # Edited for layout by M.C. 11 # (author of the "Advanced Bash Scripting Guide") 12 # Fixes and updates (04/08) by Cliff Bamford. 13 14 15 # This script tested under Bash versions 2.04, 2.05a and 2.05b. 16 # It may not work with earlier versions. 17 # This demonstration script generates one --intentional-- 18 #+ "command not found" error message. See line 436. 19 20 # The current Bash maintainer, Chet Ramey, has fixed the items noted 21 #+ for later versions of Bash. 22 23 24 25 ###-------------------------------------------### 26 ### Pipe the output of this script to 'more' ### 27 ###+ else it will scroll off the page. ### 28 ### ### 29 ### You may also redirect its output ### 30 ###+ to a file for examination. ### 31 ###-------------------------------------------### 32 33 34 35 # Most of the following points are described at length in 36 #+ the text of the foregoing "Advanced Bash Scripting Guide." 37 # This demonstration script is mostly just a reorganized presentation. 38 # -- msz 39 40 # Variables are not typed unless otherwise specified. 41 42 # Variables are named. Names must contain a non-digit. 43 # File descriptor names (as in, for example: 2>&1) 44 #+ contain ONLY digits. 45 46 # Parameters and Bash array elements are numbered. 47 # (Parameters are very similar to Bash arrays.) 48 49 # A variable name may be undefined (null reference). 50 unset VarNull 51 52 # A variable name may be defined but empty (null contents). 53 VarEmpty='' # Two, adjacent, single quotes. 54 55 # A variable name may be defined and non-empty. 56 VarSomething='Literal' 57 58 # A variable may contain: 59 # * A whole number as a signed 32-bit (or larger) integer 60 # * A string 61 # A variable may also be an array. 62 63 # A string may contain embedded blanks and may be treated 64 #+ as if it where a function name with optional arguments. 65 66 # The names of variables and the names of functions 67 #+ are in different namespaces. 68 69 70 # A variable may be defined as a Bash array either explicitly or 71 #+ implicitly by the syntax of the assignment statement. 72 # Explicit: 73 declare -a ArrayVar 74 75 76 77 # The echo command is a built-in. 78 echo $VarSomething 79 80 # The printf command is a built-in. 81 # Translate %s as: String-Format 82 printf %s $VarSomething # No linebreak specified, none output. 83 echo # Default, only linebreak output. 84 85 86 87 88 # The Bash parser word breaks on whitespace. 89 # Whitespace, or the lack of it is significant. 90 # (This holds true in general; there are, of course, exceptions.) 91 92 93 94 95 # Translate the DOLLAR_SIGN character as: Content-Of. 96 97 # Extended-Syntax way of writing Content-Of: 98 echo ${VarSomething} 99 100 # The ${ ... } Extended-Syntax allows more than just the variable 101 #+ name to be specified. 102 # In general, $VarSomething can always be written as: ${VarSomething}. 103 104 # Call this script with arguments to see the following in action. 105 106 107 108 # Outside of double-quotes, the special characters @ and * 109 #+ specify identical behavior. 110 # May be pronounced as: All-Elements-Of. 111 112 # Without specification of a name, they refer to the 113 #+ pre-defined parameter Bash-Array. 114 115 116 117 # Glob-Pattern references 118 echo $* # All parameters to script or function 119 echo ${*} # Same 120 121 # Bash disables filename expansion for Glob-Patterns. 122 # Only character matching is active. 123 124 125 # All-Elements-Of references 126 echo $@ # Same as above 127 echo ${@} # Same as above 128 129 130 131 132 # Within double-quotes, the behavior of Glob-Pattern references 133 #+ depends on the setting of IFS (Input Field Separator). 134 # Within double-quotes, All-Elements-Of references behave the same. 135 136 137 # Specifying only the name of a variable holding a string refers 138 #+ to all elements (characters) of a string. 139 140 141 # To specify an element (character) of a string, 142 #+ the Extended-Syntax reference notation (see below) MAY be used. 143 144 145 146 147 # Specifying only the name of a Bash array references 148 #+ the subscript zero element, 149 #+ NOT the FIRST DEFINED nor the FIRST WITH CONTENTS element. 150 151 # Additional qualification is needed to reference other elements, 152 #+ which means that the reference MUST be written in Extended-Syntax. 153 # The general form is: ${name[subscript]}. 154 155 # The string forms may also be used: ${name:subscript} 156 #+ for Bash-Arrays when referencing the subscript zero element. 157 158 159 # Bash-Arrays are implemented internally as linked lists, 160 #+ not as a fixed area of storage as in some programming languages. 161 162 163 # Characteristics of Bash arrays (Bash-Arrays): 164 # -------------------------------------------- 165 166 # If not otherwise specified, Bash-Array subscripts begin with 167 #+ subscript number zero. Literally: [0] 168 # This is called zero-based indexing. 169 ### 170 # If not otherwise specified, Bash-Arrays are subscript packed 171 #+ (sequential subscripts without subscript gaps). 172 ### 173 # Negative subscripts are not allowed. 174 ### 175 # Elements of a Bash-Array need not all be of the same type. 176 ### 177 # Elements of a Bash-Array may be undefined (null reference). 178 # That is, a Bash-Array may be "subscript sparse." 179 ### 180 # Elements of a Bash-Array may be defined and empty (null contents). 181 ### 182 # Elements of a Bash-Array may contain: 183 # * A whole number as a signed 32-bit (or larger) integer 184 # * A string 185 # * A string formated so that it appears to be a function name 186 # + with optional arguments 187 ### 188 # Defined elements of a Bash-Array may be undefined (unset). 189 # That is, a subscript packed Bash-Array may be changed 190 # + into a subscript sparse Bash-Array. 191 ### 192 # Elements may be added to a Bash-Array by defining an element 193 #+ not previously defined. 194 ### 195 # For these reasons, I have been calling them "Bash-Arrays". 196 # I'll return to the generic term "array" from now on. 197 # -- msz 198 199 200 echo "=========================================================" 201 202 # Lines 202 - 334 supplied by Cliff Bamford. (Thanks!) 203 # Demo --- Interaction with Arrays, quoting, IFS, echo, * and @ --- 204 #+ all affect how things work 205 206 ArrayVar[0]='zero' # 0 normal 207 ArrayVar[1]=one # 1 unquoted literal 208 ArrayVar[2]='two' # 2 normal 209 ArrayVar[3]='three' # 3 normal 210 ArrayVar[4]='I am four' # 4 normal with spaces 211 ArrayVar[5]='five' # 5 normal 212 unset ArrayVar[6] # 6 undefined 213 ArrayValue[7]='seven' # 7 normal 214 ArrayValue[8]='' # 8 defined but empty 215 ArrayValue[9]='nine' # 9 normal 216 217 218 echo '--- Here is the array we are using for this test' 219 echo 220 echo "ArrayVar[0]='zero' # 0 normal" 221 echo "ArrayVar[1]=one # 1 unquoted literal" 222 echo "ArrayVar[2]='two' # 2 normal" 223 echo "ArrayVar[3]='three' # 3 normal" 224 echo "ArrayVar[4]='I am four' # 4 normal with spaces" 225 echo "ArrayVar[5]='five' # 5 normal" 226 echo "unset ArrayVar[6] # 6 undefined" 227 echo "ArrayValue[7]='seven' # 7 normal" 228 echo "ArrayValue[8]='' # 8 defined but empty" 229 echo "ArrayValue[9]='nine' # 9 normal" 230 echo 231 232 233 echo 234 echo '---Case0: No double-quotes, Default IFS of space,tab,newline ---' 235 IFS=$'\x20'$'\x09'$'\x0A' # In exactly this order. 236 echo 'Here is: printf %q {${ArrayVar[*]}' 237 printf %q ${ArrayVar[*]} 238 echo 239 echo 'Here is: printf %q {${ArrayVar[@]}' 240 printf %q ${ArrayVar[@]} 241 echo 242 echo 'Here is: echo ${ArrayVar[*]}' 243 echo ${ArrayVar[@]} 244 echo 'Here is: echo {${ArrayVar[@]}' 245 echo ${ArrayVar[@]} 246 247 echo 248 echo '---Case1: Within double-quotes - Default IFS of space-tab- 249 newline ---' 250 IFS=$'\x20'$'\x09'$'\x0A' # These three bytes, 251 echo 'Here is: printf %q "{${ArrayVar[*]}"' 252 printf %q "${ArrayVar[*]}" 253 echo 254 echo 'Here is: printf %q "{${ArrayVar[@]}"' 255 printf %q "${ArrayVar[@]}" 256 echo 257 echo 'Here is: echo "${ArrayVar[*]}"' 258 echo "${ArrayVar[@]}" 259 echo 'Here is: echo "{${ArrayVar[@]}"' 260 echo "${ArrayVar[@]}" 261 262 echo 263 echo '---Case2: Within double-quotes - IFS is q' 264 IFS='q' 265 echo 'Here is: printf %q "{${ArrayVar[*]}"' 266 printf %q "${ArrayVar[*]}" 267 echo 268 echo 'Here is: printf %q "{${ArrayVar[@]}"' 269 printf %q "${ArrayVar[@]}" 270 echo 271 echo 'Here is: echo "${ArrayVar[*]}"' 272 echo "${ArrayVar[@]}" 273 echo 'Here is: echo "{${ArrayVar[@]}"' 274 echo "${ArrayVar[@]}" 275 276 echo 277 echo '---Case3: Within double-quotes - IFS is ^' 278 IFS='^' 279 echo 'Here is: printf %q "{${ArrayVar[*]}"' 280 printf %q "${ArrayVar[*]}" 281 echo 282 echo 'Here is: printf %q "{${ArrayVar[@]}"' 283 printf %q "${ArrayVar[@]}" 284 echo 285 echo 'Here is: echo "${ArrayVar[*]}"' 286 echo "${ArrayVar[@]}" 287 echo 'Here is: echo "{${ArrayVar[@]}"' 288 echo "${ArrayVar[@]}" 289 290 echo 291 echo '---Case4: Within double-quotes - IFS is ^ followed by 292 space,tab,newline' 293 IFS=$'^'$'\x20'$'\x09'$'\x0A' # ^ + space tab newline 294 echo 'Here is: printf %q "{${ArrayVar[*]}"' 295 printf %q "${ArrayVar[*]}" 296 echo 297 echo 'Here is: printf %q "{${ArrayVar[@]}"' 298 printf %q "${ArrayVar[@]}" 299 echo 300 echo 'Here is: echo "${ArrayVar[*]}"' 301 echo "${ArrayVar[@]}" 302 echo 'Here is: echo "{${ArrayVar[@]}"' 303 echo "${ArrayVar[@]}" 304 305 echo 306 echo '---Case6: Within double-quotes - IFS set and empty ' 307 IFS='' 308 echo 'Here is: printf %q "{${ArrayVar[*]}"' 309 printf %q "${ArrayVar[*]}" 310 echo 311 echo 'Here is: printf %q "{${ArrayVar[@]}"' 312 printf %q "${ArrayVar[@]}" 313 echo 314 echo 'Here is: echo "${ArrayVar[*]}"' 315 echo "${ArrayVar[@]}" 316 echo 'Here is: echo "{${ArrayVar[@]}"' 317 echo "${ArrayVar[@]}" 318 319 echo 320 echo '---Case7: Within double-quotes - IFS is unset' 321 unset IFS 322 echo 'Here is: printf %q "{${ArrayVar[*]}"' 323 printf %q "${ArrayVar[*]}" 324 echo 325 echo 'Here is: printf %q "{${ArrayVar[@]}"' 326 printf %q "${ArrayVar[@]}" 327 echo 328 echo 'Here is: echo "${ArrayVar[*]}"' 329 echo "${ArrayVar[@]}" 330 echo 'Here is: echo "{${ArrayVar[@]}"' 331 echo "${ArrayVar[@]}" 332 333 echo 334 echo '---End of Cases---' 335 echo "========================================================="; echo 336 337 338 339 # Put IFS back to the default. 340 # Default is exactly these three bytes. 341 IFS=$'\x20'$'\x09'$'\x0A' # In exactly this order. 342 343 # Interpretation of the above outputs: 344 # A Glob-Pattern is I/O; the setting of IFS matters. 345 ### 346 # An All-Elements-Of does not consider IFS settings. 347 ### 348 # Note the different output using the echo command and the 349 #+ quoted format operator of the printf command. 350 351 352 # Recall: 353 # Parameters are similar to arrays and have the similar behaviors. 354 ### 355 # The above examples demonstrate the possible variations. 356 # To retain the shape of a sparse array, additional script 357 #+ programming is required. 358 ### 359 # The source code of Bash has a routine to output the 360 #+ [subscript]=value array assignment format. 361 # As of version 2.05b, that routine is not used, 362 #+ but that might change in future releases. 363 364 365 366 # The length of a string, measured in non-null elements (characters): 367 echo 368 echo '- - Non-quoted references - -' 369 echo 'Non-Null character count: '${#VarSomething}' characters.' 370 371 # test='Lit'$'\x00''eral' # $'\x00' is a null character. 372 # echo ${#test} # See that? 373 374 375 376 # The length of an array, measured in defined elements, 377 #+ including null content elements. 378 echo 379 echo 'Defined content count: '${#ArrayVar[@]}' elements.' 380 # That is NOT the maximum subscript (4). 381 # That is NOT the range of the subscripts (1 . . 4 inclusive). 382 # It IS the length of the linked list. 383 ### 384 # Both the maximum subscript and the range of the subscripts may 385 #+ be found with additional script programming. 386 387 # The length of a string, measured in non-null elements (characters): 388 echo 389 echo '- - Quoted, Glob-Pattern references - -' 390 echo 'Non-Null character count: '"${#VarSomething}"' characters.' 391 392 # The length of an array, measured in defined elements, 393 #+ including null-content elements. 394 echo 395 echo 'Defined element count: '"${#ArrayVar[*]}"' elements.' 396 397 # Interpretation: Substitution does not effect the ${# ... } operation. 398 # Suggestion: 399 # Always use the All-Elements-Of character 400 #+ if that is what is intended (independence from IFS). 401 402 403 404 # Define a simple function. 405 # I include an underscore in the name 406 #+ to make it distinctive in the examples below. 407 ### 408 # Bash separates variable names and function names 409 #+ in different namespaces. 410 # The Mark-One eyeball isn't that advanced. 411 ### 412 _simple() { 413 echo -n 'SimpleFunc'$@ # Newlines are swallowed in 414 } #+ result returned in any case. 415 416 417 # The ( ... ) notation invokes a command or function. 418 # The $( ... ) notation is pronounced: Result-Of. 419 420 421 # Invoke the function _simple 422 echo 423 echo '- - Output of function _simple - -' 424 _simple # Try passing arguments. 425 echo 426 # or 427 (_simple) # Try passing arguments. 428 echo 429 430 echo '- Is there a variable of that name? -' 431 echo $_simple not defined # No variable by that name. 432 433 # Invoke the result of function _simple (Error msg intended) 434 435 ### 436 $(_simple) # Gives an error message: 437 # line 436: SimpleFunc: command not found 438 # --------------------------------------- 439 440 echo 441 ### 442 443 # The first word of the result of function _simple 444 #+ is neither a valid Bash command nor the name of a defined function. 445 ### 446 # This demonstrates that the output of _simple is subject to evaluation. 447 ### 448 # Interpretation: 449 # A function can be used to generate in-line Bash commands. 450 451 452 # A simple function where the first word of result IS a bash command: 453 ### 454 _print() { 455 echo -n 'printf %q '$@ 456 } 457 458 echo '- - Outputs of function _print - -' 459 _print parm1 parm2 # An Output NOT A Command. 460 echo 461 462 $(_print parm1 parm2) # Executes: printf %q parm1 parm2 463 # See above IFS examples for the 464 #+ various possibilities. 465 echo 466 467 $(_print $VarSomething) # The predictable result. 468 echo 469 470 471 472 # Function variables 473 # ------------------ 474 475 echo 476 echo '- - Function variables - -' 477 # A variable may represent a signed integer, a string or an array. 478 # A string may be used like a function name with optional arguments. 479 480 # set -vx # Enable if desired 481 declare -f funcVar #+ in namespace of functions 482 483 funcVar=_print # Contains name of function. 484 $funcVar parm1 # Same as _print at this point. 485 echo 486 487 funcVar=$(_print ) # Contains result of function. 488 $funcVar # No input, No output. 489 $funcVar $VarSomething # The predictable result. 490 echo 491 492 funcVar=$(_print $VarSomething) # $VarSomething replaced HERE. 493 $funcVar # The expansion is part of the 494 echo #+ variable contents. 495 496 funcVar="$(_print $VarSomething)" # $VarSomething replaced HERE. 497 $funcVar # The expansion is part of the 498 echo #+ variable contents. 499 500 # The difference between the unquoted and the double-quoted versions 501 #+ above can be seen in the "protect_literal.sh" example. 502 # The first case above is processed as two, unquoted, Bash-Words. 503 # The second case above is processed as one, quoted, Bash-Word. 504 505 506 507 508 # Delayed replacement 509 # ------------------- 510 511 echo 512 echo '- - Delayed replacement - -' 513 funcVar="$(_print '$VarSomething')" # No replacement, single Bash-Word. 514 eval $funcVar # $VarSomething replaced HERE. 515 echo 516 517 VarSomething='NewThing' 518 eval $funcVar # $VarSomething replaced HERE. 519 echo 520 521 # Restore the original setting trashed above. 522 VarSomething=Literal 523 524 # There are a pair of functions demonstrated in the 525 #+ "protect_literal.sh" and "unprotect_literal.sh" examples. 526 # These are general purpose functions for delayed replacement literals 527 #+ containing variables. 528 529 530 531 532 533 # REVIEW: 534 # ------ 535 536 # A string can be considered a Classic-Array of elements (characters). 537 # A string operation applies to all elements (characters) of the string 538 #+ (in concept, anyway). 539 ### 540 # The notation: ${array_name[@]} represents all elements of the 541 #+ Bash-Array: array_name. 542 ### 543 # The Extended-Syntax string operations can be applied to all 544 #+ elements of an array. 545 ### 546 # This may be thought of as a For-Each operation on a vector of strings. 547 ### 548 # Parameters are similar to an array. 549 # The initialization of a parameter array for a script 550 #+ and a parameter array for a function only differ 551 #+ in the initialization of ${0}, which never changes its setting. 552 ### 553 # Subscript zero of the script's parameter array contains 554 #+ the name of the script. 555 ### 556 # Subscript zero of a function's parameter array DOES NOT contain 557 #+ the name of the function. 558 # The name of the current function is accessed by the $FUNCNAME variable. 559 ### 560 # A quick, review list follows (quick, not short). 561 562 echo 563 echo '- - Test (but not change) - -' 564 echo '- null reference -' 565 echo -n ${VarNull-'NotSet'}' ' # NotSet 566 echo ${VarNull} # NewLine only 567 echo -n ${VarNull:-'NotSet'}' ' # NotSet 568 echo ${VarNull} # Newline only 569 570 echo '- null contents -' 571 echo -n ${VarEmpty-'Empty'}' ' # Only the space 572 echo ${VarEmpty} # Newline only 573 echo -n ${VarEmpty:-'Empty'}' ' # Empty 574 echo ${VarEmpty} # Newline only 575 576 echo '- contents -' 577 echo ${VarSomething-'Content'} # Literal 578 echo ${VarSomething:-'Content'} # Literal 579 580 echo '- Sparse Array -' 581 echo ${ArrayVar[@]-'not set'} 582 583 # ASCII-Art time 584 # State Y==yes, N==no 585 # - :- 586 # Unset Y Y ${# ... } == 0 587 # Empty N Y ${# ... } == 0 588 # Contents N N ${# ... } > 0 589 590 # Either the first and/or the second part of the tests 591 #+ may be a command or a function invocation string. 592 echo 593 echo '- - Test 1 for undefined - -' 594 declare -i t 595 _decT() { 596 t=$t-1 597 } 598 599 # Null reference, set: t == -1 600 t=${#VarNull} # Results in zero. 601 ${VarNull- _decT } # Function executes, t now -1. 602 echo $t 603 604 # Null contents, set: t == 0 605 t=${#VarEmpty} # Results in zero. 606 ${VarEmpty- _decT } # _decT function NOT executed. 607 echo $t 608 609 # Contents, set: t == number of non-null characters 610 VarSomething='_simple' # Set to valid function name. 611 t=${#VarSomething} # non-zero length 612 ${VarSomething- _decT } # Function _simple executed. 613 echo $t # Note the Append-To action. 614 615 # Exercise: clean up that example. 616 unset t 617 unset _decT 618 VarSomething=Literal 619 620 echo 621 echo '- - Test and Change - -' 622 echo '- Assignment if null reference -' 623 echo -n ${VarNull='NotSet'}' ' # NotSet NotSet 624 echo ${VarNull} 625 unset VarNull 626 627 echo '- Assignment if null reference -' 628 echo -n ${VarNull:='NotSet'}' ' # NotSet NotSet 629 echo ${VarNull} 630 unset VarNull 631 632 echo '- No assignment if null contents -' 633 echo -n ${VarEmpty='Empty'}' ' # Space only 634 echo ${VarEmpty} 635 VarEmpty='' 636 637 echo '- Assignment if null contents -' 638 echo -n ${VarEmpty:='Empty'}' ' # Empty Empty 639 echo ${VarEmpty} 640 VarEmpty='' 641 642 echo '- No change if already has contents -' 643 echo ${VarSomething='Content'} # Literal 644 echo ${VarSomething:='Content'} # Literal 645 646 647 # "Subscript sparse" Bash-Arrays 648 ### 649 # Bash-Arrays are subscript packed, beginning with 650 #+ subscript zero unless otherwise specified. 651 ### 652 # The initialization of ArrayVar was one way 653 #+ to "otherwise specify". Here is the other way: 654 ### 655 echo 656 declare -a ArraySparse 657 ArraySparse=( [1]=one [2]='' [4]='four' ) 658 # [0]=null reference, [2]=null content, [3]=null reference 659 660 echo '- - Array-Sparse List - -' 661 # Within double-quotes, default IFS, Glob-Pattern 662 663 IFS=$'\x20'$'\x09'$'\x0A' 664 printf %q "${ArraySparse[*]}" 665 echo 666 667 # Note that the output does not distinguish between "null content" 668 #+ and "null reference". 669 # Both print as escaped whitespace. 670 ### 671 # Note also that the output does NOT contain escaped whitespace 672 #+ for the "null reference(s)" prior to the first defined element. 673 ### 674 # This behavior of 2.04, 2.05a and 2.05b has been reported 675 #+ and may change in a future version of Bash. 676 677 # To output a sparse array and maintain the [subscript]=value 678 #+ relationship without change requires a bit of programming. 679 # One possible code fragment: 680 ### 681 # local l=${#ArraySparse[@]} # Count of defined elements 682 # local f=0 # Count of found subscripts 683 # local i=0 # Subscript to test 684 ( # Anonymous in-line function 685 for (( l=${#ArraySparse[@]}, f = 0, i = 0 ; f < l ; i++ )) 686 do 687 # 'if defined then...' 688 ${ArraySparse[$i]+ eval echo '\ ['$i']='${ArraySparse[$i]} ; (( f++ )) } 689 done 690 ) 691 692 # The reader coming upon the above code fragment cold 693 #+ might want to review "command lists" and "multiple commands on a line" 694 #+ in the text of the foregoing "Advanced Bash Scripting Guide." 695 ### 696 # Note: 697 # The "read -a array_name" version of the "read" command 698 #+ begins filling array_name at subscript zero. 699 # ArraySparse does not define a value at subscript zero. 700 ### 701 # The user needing to read/write a sparse array to either 702 #+ external storage or a communications socket must invent 703 #+ a read/write code pair suitable for their purpose. 704 ### 705 # Exercise: clean it up. 706 707 unset ArraySparse 708 709 echo 710 echo '- - Conditional alternate (But not change)- -' 711 echo '- No alternate if null reference -' 712 echo -n ${VarNull+'NotSet'}' ' 713 echo ${VarNull} 714 unset VarNull 715 716 echo '- No alternate if null reference -' 717 echo -n ${VarNull:+'NotSet'}' ' 718 echo ${VarNull} 719 unset VarNull 720 721 echo '- Alternate if null contents -' 722 echo -n ${VarEmpty+'Empty'}' ' # Empty 723 echo ${VarEmpty} 724 VarEmpty='' 725 726 echo '- No alternate if null contents -' 727 echo -n ${VarEmpty:+'Empty'}' ' # Space only 728 echo ${VarEmpty} 729 VarEmpty='' 730 731 echo '- Alternate if already has contents -' 732 733 # Alternate literal 734 echo -n ${VarSomething+'Content'}' ' # Content Literal 735 echo ${VarSomething} 736 737 # Invoke function 738 echo -n ${VarSomething:+ $(_simple) }' ' # SimpleFunc Literal 739 echo ${VarSomething} 740 echo 741 742 echo '- - Sparse Array - -' 743 echo ${ArrayVar[@]+'Empty'} # An array of 'Empty'(ies) 744 echo 745 746 echo '- - Test 2 for undefined - -' 747 748 declare -i t 749 _incT() { 750 t=$t+1 751 } 752 753 # Note: 754 # This is the same test used in the sparse array 755 #+ listing code fragment. 756 757 # Null reference, set: t == -1 758 t=${#VarNull}-1 # Results in minus-one. 759 ${VarNull+ _incT } # Does not execute. 760 echo $t' Null reference' 761 762 # Null contents, set: t == 0 763 t=${#VarEmpty}-1 # Results in minus-one. 764 ${VarEmpty+ _incT } # Executes. 765 echo $t' Null content' 766 767 # Contents, set: t == (number of non-null characters) 768 t=${#VarSomething}-1 # non-null length minus-one 769 ${VarSomething+ _incT } # Executes. 770 echo $t' Contents' 771 772 # Exercise: clean up that example. 773 unset t 774 unset _incT 775 776 # ${name?err_msg} ${name:?err_msg} 777 # These follow the same rules but always exit afterwards 778 #+ if an action is specified following the question mark. 779 # The action following the question mark may be a literal 780 #+ or a function result. 781 ### 782 # ${name?} ${name:?} are test-only, the return can be tested. 783 784 785 786 787 # Element operations 788 # ------------------ 789 790 echo 791 echo '- - Trailing sub-element selection - -' 792 793 # Strings, Arrays and Positional parameters 794 795 # Call this script with multiple arguments 796 #+ to see the parameter selections. 797 798 echo '- All -' 799 echo ${VarSomething:0} # all non-null characters 800 echo ${ArrayVar[@]:0} # all elements with content 801 echo ${@:0} # all parameters with content; 802 # ignoring parameter[0] 803 804 echo 805 echo '- All after -' 806 echo ${VarSomething:1} # all non-null after character[0] 807 echo ${ArrayVar[@]:1} # all after element[0] with content 808 echo ${@:2} # all after param[1] with content 809 810 echo 811 echo '- Range after -' 812 echo ${VarSomething:4:3} # ral 813 # Three characters after 814 # character[3] 815 816 echo '- Sparse array gotch -' 817 echo ${ArrayVar[@]:1:2} # four - The only element with content. 818 # Two elements after (if that many exist). 819 # the FIRST WITH CONTENTS 820 #+ (the FIRST WITH CONTENTS is being 821 #+ considered as if it 822 #+ were subscript zero). 823 # Executed as if Bash considers ONLY array elements with CONTENT 824 # printf %q "${ArrayVar[@]:0:3}" # Try this one 825 826 # In versions 2.04, 2.05a and 2.05b, 827 #+ Bash does not handle sparse arrays as expected using this notation. 828 # 829 # The current Bash maintainer, Chet Ramey, has corrected this. 830 831 832 echo '- Non-sparse array -' 833 echo ${@:2:2} # Two parameters following parameter[1] 834 835 # New victims for string vector examples: 836 stringZ=abcABC123ABCabc 837 arrayZ=( abcabc ABCABC 123123 ABCABC abcabc ) 838 sparseZ=( [1]='abcabc' [3]='ABCABC' [4]='' [5]='123123' ) 839 840 echo 841 echo ' - - Victim string - -'$stringZ'- - ' 842 echo ' - - Victim array - -'${arrayZ[@]}'- - ' 843 echo ' - - Sparse array - -'${sparseZ[@]}'- - ' 844 echo ' - [0]==null ref, [2]==null ref, [4]==null content - ' 845 echo ' - [1]=abcabc [3]=ABCABC [5]=123123 - ' 846 echo ' - non-null-reference count: '${#sparseZ[@]}' elements' 847 848 echo 849 echo '- - Prefix sub-element removal - -' 850 echo '- - Glob-Pattern match must include the first character. - -' 851 echo '- - Glob-Pattern may be a literal or a function result. - -' 852 echo 853 854 855 # Function returning a simple, Literal, Glob-Pattern 856 _abc() { 857 echo -n 'abc' 858 } 859 860 echo '- Shortest prefix -' 861 echo ${stringZ#123} # Unchanged (not a prefix). 862 echo ${stringZ#$(_abc)} # ABC123ABCabc 863 echo ${arrayZ[@]#abc} # Applied to each element. 864 865 # echo ${sparseZ[@]#abc} # Version-2.05b core dumps. 866 # Has since been fixed by Chet Ramey. 867 868 # The -it would be nice- First-Subscript-Of 869 # echo ${#sparseZ[@]#*} # This is NOT valid Bash. 870 871 echo 872 echo '- Longest prefix -' 873 echo ${stringZ##1*3} # Unchanged (not a prefix) 874 echo ${stringZ##a*C} # abc 875 echo ${arrayZ[@]##a*c} # ABCABC 123123 ABCABC 876 877 # echo ${sparseZ[@]##a*c} # Version-2.05b core dumps. 878 # Has since been fixed by Chet Ramey. 879 880 echo 881 echo '- - Suffix sub-element removal - -' 882 echo '- - Glob-Pattern match must include the last character. - -' 883 echo '- - Glob-Pattern may be a literal or a function result. - -' 884 echo 885 echo '- Shortest suffix -' 886 echo ${stringZ%1*3} # Unchanged (not a suffix). 887 echo ${stringZ%$(_abc)} # abcABC123ABC 888 echo ${arrayZ[@]%abc} # Applied to each element. 889 890 # echo ${sparseZ[@]%abc} # Version-2.05b core dumps. 891 # Has since been fixed by Chet Ramey. 892 893 # The -it would be nice- Last-Subscript-Of 894 # echo ${#sparseZ[@]%*} # This is NOT valid Bash. 895 896 echo 897 echo '- Longest suffix -' 898 echo ${stringZ%%1*3} # Unchanged (not a suffix) 899 echo ${stringZ%%b*c} # a 900 echo ${arrayZ[@]%%b*c} # a ABCABC 123123 ABCABC a 901 902 # echo ${sparseZ[@]%%b*c} # Version-2.05b core dumps. 903 # Has since been fixed by Chet Ramey. 904 905 echo 906 echo '- - Sub-element replacement - -' 907 echo '- - Sub-element at any location in string. - -' 908 echo '- - First specification is a Glob-Pattern - -' 909 echo '- - Glob-Pattern may be a literal or Glob-Pattern function result. - -' 910 echo '- - Second specification may be a literal or function result. - -' 911 echo '- - Second specification may be unspecified. Pronounce that' 912 echo ' as: Replace-With-Nothing (Delete) - -' 913 echo 914 915 916 917 # Function returning a simple, Literal, Glob-Pattern 918 _123() { 919 echo -n '123' 920 } 921 922 echo '- Replace first occurrence -' 923 echo ${stringZ/$(_123)/999} # Changed (123 is a component). 924 echo ${stringZ/ABC/xyz} # xyzABC123ABCabc 925 echo ${arrayZ[@]/ABC/xyz} # Applied to each element. 926 echo ${sparseZ[@]/ABC/xyz} # Works as expected. 927 928 echo 929 echo '- Delete first occurrence -' 930 echo ${stringZ/$(_123)/} 931 echo ${stringZ/ABC/} 932 echo ${arrayZ[@]/ABC/} 933 echo ${sparseZ[@]/ABC/} 934 935 # The replacement need not be a literal, 936 #+ since the result of a function invocation is allowed. 937 # This is general to all forms of replacement. 938 echo 939 echo '- Replace first occurrence with Result-Of -' 940 echo ${stringZ/$(_123)/$(_simple)} # Works as expected. 941 echo ${arrayZ[@]/ca/$(_simple)} # Applied to each element. 942 echo ${sparseZ[@]/ca/$(_simple)} # Works as expected. 943 944 echo 945 echo '- Replace all occurrences -' 946 echo ${stringZ//[b2]/X} # X-out b's and 2's 947 echo ${stringZ//abc/xyz} # xyzABC123ABCxyz 948 echo ${arrayZ[@]//abc/xyz} # Applied to each element. 949 echo ${sparseZ[@]//abc/xyz} # Works as expected. 950 951 echo 952 echo '- Delete all occurrences -' 953 echo ${stringZ//[b2]/} 954 echo ${stringZ//abc/} 955 echo ${arrayZ[@]//abc/} 956 echo ${sparseZ[@]//abc/} 957 958 echo 959 echo '- - Prefix sub-element replacement - -' 960 echo '- - Match must include the first character. - -' 961 echo 962 963 echo '- Replace prefix occurrences -' 964 echo ${stringZ/#[b2]/X} # Unchanged (neither is a prefix). 965 echo ${stringZ/#$(_abc)/XYZ} # XYZABC123ABCabc 966 echo ${arrayZ[@]/#abc/XYZ} # Applied to each element. 967 echo ${sparseZ[@]/#abc/XYZ} # Works as expected. 968 969 echo 970 echo '- Delete prefix occurrences -' 971 echo ${stringZ/#[b2]/} 972 echo ${stringZ/#$(_abc)/} 973 echo ${arrayZ[@]/#abc/} 974 echo ${sparseZ[@]/#abc/} 975 976 echo 977 echo '- - Suffix sub-element replacement - -' 978 echo '- - Match must include the last character. - -' 979 echo 980 981 echo '- Replace suffix occurrences -' 982 echo ${stringZ/%[b2]/X} # Unchanged (neither is a suffix). 983 echo ${stringZ/%$(_abc)/XYZ} # abcABC123ABCXYZ 984 echo ${arrayZ[@]/%abc/XYZ} # Applied to each element. 985 echo ${sparseZ[@]/%abc/XYZ} # Works as expected. 986 987 echo 988 echo '- Delete suffix occurrences -' 989 echo ${stringZ/%[b2]/} 990 echo ${stringZ/%$(_abc)/} 991 echo ${arrayZ[@]/%abc/} 992 echo ${sparseZ[@]/%abc/} 993 994 echo 995 echo '- - Special cases of null Glob-Pattern - -' 996 echo 997 998 echo '- Prefix all -' 999 # null substring pattern means 'prefix' 1000 echo ${stringZ/#/NEW} # NEWabcABC123ABCabc 1001 echo ${arrayZ[@]/#/NEW} # Applied to each element. 1002 echo ${sparseZ[@]/#/NEW} # Applied to null-content also. 1003 # That seems reasonable. 1004 1005 echo 1006 echo '- Suffix all -' 1007 # null substring pattern means 'suffix' 1008 echo ${stringZ/%/NEW} # abcABC123ABCabcNEW 1009 echo ${arrayZ[@]/%/NEW} # Applied to each element. 1010 echo ${sparseZ[@]/%/NEW} # Applied to null-content also. 1011 # That seems reasonable. 1012 1013 echo 1014 echo '- - Special case For-Each Glob-Pattern - -' 1015 echo '- - - - This is a nice-to-have dream - - - -' 1016 echo 1017 1018 _GenFunc() { 1019 echo -n ${0} # Illustration only. 1020 # Actually, that would be an arbitrary computation. 1021 } 1022 1023 # All occurrences, matching the AnyThing pattern. 1024 # Currently //*/ does not match null-content nor null-reference. 1025 # /#/ and /%/ does match null-content but not null-reference. 1026 echo ${sparseZ[@]//*/$(_GenFunc)} 1027 1028 1029 # A possible syntax would be to make 1030 #+ the parameter notation used within this construct mean: 1031 # ${1} - The full element 1032 # ${2} - The prefix, if any, to the matched sub-element 1033 # ${3} - The matched sub-element 1034 # ${4} - The suffix, if any, to the matched sub-element 1035 # 1036 # echo ${sparseZ[@]//*/$(_GenFunc ${3})} # Same as ${1} here. 1037 # Perhaps it will be implemented in a future version of Bash. 1038 1039 1040 exit 0 |