Merge pull request #98 from uberbaud/posix_sh

Use posix `sh` for systems without `bash`
This commit is contained in:
Al Barrentine
2016-07-27 18:44:11 -04:00
committed by GitHub
2 changed files with 14 additions and 21 deletions

View File

@@ -1,2 +1,2 @@
#!/usr/bin/env bash #!/bin/sh
autoreconf -fi --warning=no-portability autoreconf -fi --warning=no-portability

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash #!/bin/sh
set -e set -e
@@ -26,7 +26,7 @@ LIBPOSTAL_GEO_UPDATED_PATH=$LIBPOSTAL_DATA_DIR/last_updated_geo
LIBPOSTAL_PARSER_UPDATED_PATH=$LIBPOSTAL_DATA_DIR/last_updated_parser LIBPOSTAL_PARSER_UPDATED_PATH=$LIBPOSTAL_DATA_DIR/last_updated_parser
LIBPOSTAL_LANG_CLASS_UPDATED_PATH=$LIBPOSTAL_DATA_DIR/last_updated_language_classifier LIBPOSTAL_LANG_CLASS_UPDATED_PATH=$LIBPOSTAL_DATA_DIR/last_updated_language_classifier
BASIC_MODULE_DIRS=(address_expansions numex transliteration) BASIC_MODULE_DIRS="address_expansions numex transliteration"
GEODB_MODULE_DIR=geodb GEODB_MODULE_DIR=geodb
PARSER_MODULE_DIR=address_parser PARSER_MODULE_DIR=address_parser
LANGUAGE_CLASSIFIER_MODULE_DIR=language_classifier LANGUAGE_CLASSIFIER_MODULE_DIR=language_classifier
@@ -43,29 +43,22 @@ LARGE_FILE_SIZE=$((CHUNK_SIZE*2))
NUM_WORKERS=10 NUM_WORKERS=10
function kill_background_processes { kill_background_processes() {
jobs -p | xargs kill; jobs -p | xargs kill;
exit exit
} }
trap kill_background_processes SIGINT trap kill_background_processes INT
function download_part() { PART_MSG='echo "Downloading part $1: filename=$5, offset=$2, max=$3"'
i=$1 PART_CURL='curl $4 --silent -H"Range:bytes=$2-$3" -o $5'
offset=$2 DOWNLOAD_PART="$PART_MSG;$PART_CURL"
max=$3
url=$4
part_filename=$5
echo "Downloading part $i: filename=$part_filename, offset=$offset, max=$max"
curl $url --silent -H"Range:bytes=$offset-$max" -o $part_filename
}
export -f download_part
function download_multipart() {
download_multipart() {
url=$1 url=$1
filename=$2 filename=$2
size=$3 size=$3
num_workers=$4
num_chunks=$((size/CHUNK_SIZE)) num_chunks=$((size/CHUNK_SIZE))
echo "Downloading multipart: $url, size=$size, num_chunks=$num_chunks" echo "Downloading multipart: $url, size=$size, num_chunks=$num_chunks"
@@ -81,7 +74,7 @@ function download_multipart() {
fi; fi;
printf "%s\0%s\0%s\0%s\0%s\0" "$i" "$offset" "$max" "$url" "$part_filename" printf "%s\0%s\0%s\0%s\0%s\0" "$i" "$offset" "$max" "$url" "$part_filename"
offset=$((offset+CHUNK_SIZE)) offset=$((offset+CHUNK_SIZE))
done | xargs -0 -n 5 -P $NUM_WORKERS bash -c 'download_part "$@"' -- done | xargs -0 -n 5 -P $NUM_WORKERS sh -c "$DOWNLOAD_PART" --
> $local_path > $local_path
@@ -96,7 +89,7 @@ function download_multipart() {
} }
function download_file() { download_file() {
updated_path=$1 updated_path=$1
data_dir=$2 data_dir=$2
filename=$3 filename=$3
@@ -117,7 +110,7 @@ function download_file() {
content_length=$(curl -I $url 2> /dev/null | awk '/^Content-Length:/ { print $2 }' | tr -d '[[:space:]]') content_length=$(curl -I $url 2> /dev/null | awk '/^Content-Length:/ { print $2 }' | tr -d '[[:space:]]')
if [ $content_length -ge $LARGE_FILE_SIZE ]; then if [ $content_length -ge $LARGE_FILE_SIZE ]; then
download_multipart $url $local_path $content_length $NUM_WORKERS download_multipart $url $local_path $content_length
else else
curl $url -o $local_path curl $url -o $local_path
fi fi
@@ -153,7 +146,7 @@ if [ $COMMAND = "download" ]; then
elif [ $COMMAND = "upload" ]; then elif [ $COMMAND = "upload" ]; then
if [ $FILE = "base" ] || [ $FILE = "all" ]; then if [ $FILE = "base" ] || [ $FILE = "all" ]; then
tar -C $LIBPOSTAL_DATA_DIR -cvzf $LIBPOSTAL_DATA_DIR/$LIBPOSTAL_DATA_FILE ${BASIC_MODULE_DIRS[*]} tar -C $LIBPOSTAL_DATA_DIR -cvzf $LIBPOSTAL_DATA_DIR/$LIBPOSTAL_DATA_FILE $BASIC_MODULE_DIRS
aws s3 cp --acl=public-read $LIBPOSTAL_DATA_DIR/$LIBPOSTAL_DATA_FILE $LIBPOSTAL_S3_KEY aws s3 cp --acl=public-read $LIBPOSTAL_DATA_DIR/$LIBPOSTAL_DATA_FILE $LIBPOSTAL_S3_KEY
fi fi