Skip to content
Snippets Groups Projects
loggernet_rotate.sh 3.15 KiB
#!/usr/bin/env bash
# Separate continuous loggernet files in to daily files.
# LoggerNet writes data to two growing files. Every day at midnight this script should be run.

alias errcho='>&2 echo'

LOGGERNET_PATH=/opt/CampbellSci/LoggerNet
LOGGERNET_DATA=/data1/incoming/aoss/tower/
#LOGGERNET_DATA=/tmp
HEADER_SIZE=0
INPUT_PREFIX="rig_tower"
OUTPUT_PREFIX="rig_tower"
#FILE_SUFFIXES="engdata metdata gpsdata"
FILE_SUFFIXES="_"
INPUT_EXT="dat"
OUTPUT_EXT="ascii"
DAYS=6

assert_python() {
    python -c "import sys; sys.exit(sys.version_info[0] >= 3)"
    if [ $? != 0 ]; then
        errcho "'python' must be version 3"
        exit 1
    fi
}

pause_loggernet() {
    $LOGGERNET_PATH/cora_cmd <<EOF
connect localhost;
set-lgrnet-setting 1 0;
exit;
EOF
}

unpause_loggernet() {
    $LOGGERNET_PATH/cora_cmd <<EOF
connect localhost;
set-lgrnet-setting 1 1;
exit;
EOF
}

#yyyymmdd_dates_in_csv() {
#    # parse out the unique dates from the LoggerNet timestamped CSV data file
#    # Returns epoch seconds
#    awk -F"," -v "header_size=$HEADER_SIZE" 'NR>header_size { print $1 }' <&0 | \
#    awk -F" " '{ time_spec=sprintf("%04d %02d %02d 00 00 00", substr($1, 2, 4), substr($1, 7, 2), substr($1, 10, 2)); print mktime(time_spec) } ' | \
#    sort | \
#    uniq
#}

# completes almost every feature of the below for loops
# TODO: if file doesn't exist add the header
# TODO: Handle output file suffix
yyyy_jjj_tower_split() {
    awk_command='
NR>header_size {
time_spec = sprintf("%04d 01 %03d 00 00 00", $2, $3);
epoch_seconds = mktime(time_spec);
file_date = strftime("%Y-%m-%d", epoch_seconds);
jday = strftime("%j", epoch_seconds);
y = strftime("%Y", epoch_seconds);
output_file = sprintf("%s.%s.%s", file_prefix, file_date, file_ext);
print $0 >> output_file;
print output_file
}
'
    awk -F"," -v "header_size=$HEADER_SIZE" -v "file_prefix=$OUTPUT_PREFIX" -v "file_ext=$OUTPUT_EXT" "$awk_command" $1 | uniq;
}

oops() {
    errcho $*
    exit 1
}

assert_python

if [ $# -eq 1 ]; then
    ARCHIVE_PATH=$1
else
    ARCHIVE_PATH=$LOGGERNET_DATA
fi

if [ ! -d $ARCHIVE_PATH ]; then
    errcho "Archive destination location does not exist: $ARCHIVE_PATH"
    exit 1
fi

# Pause LoggerNet so it doesn't perform any ingest and doesn't affect the files we'll be working with
pause_loggernet || oops "Could not pause LoggerNet"

cd $LOGGERNET_DATA
for file_suffix in $FILE_SUFFIXES; do
    if [ $file_suffix == '_' ]; then
        # we don't have a suffix
        file_suffix=""
    fi

    input_file="${INPUT_PREFIX}${file_suffix}.${INPUT_EXT}"
    if [ ! -s $input_file ]; then
        errcho "Input file ${input_file} does not exist"
        continue
    fi

    # Create a temporary file with just the header information in it
    header_fn="header${file_suffix}.tmp"
    head -n ${HEADER_SIZE} ${input_file} > $header_fn

    # Split main file in to dated files
    yyyy_jjj_tower_split $input_file

    # copy the header back to the original file
    cp $header_fn $input_file

    # remove the temporary header file
    rm ${header_fn}

    # Remove the input file if it only has the header left (just in case loggernet wants to make a new header)
done

# Have LoggerNet continue data collection
unpause_loggernet