diff --git a/aosstower/level_b1/nc.py b/aosstower/level_b1/nc.py
index f4e472c915b22026a9cb8c875bbc8ed4d1ec7d34..2f9e2703d2fe98e39b4ae72a096851c483d463ad 100644
--- a/aosstower/level_b1/nc.py
+++ b/aosstower/level_b1/nc.py
@@ -2,13 +2,12 @@ import os
 import sys
 import logging
 import pandas as pd
-from datetime import datetime as dt
+from datetime import datetime
 from netCDF4 import Dataset
 import numpy as np
 import platform
 from aosstower import station, schema
 from aosstower.level_00 import parser
-from datetime import timedelta as delta
 from aosstower.level_b1 import calc
 
 LOG = logging.getLogger(__name__)
@@ -437,7 +436,7 @@ def create_giant_netcdf(input_files, output_fn, zlib, chunk_size,
     else:
         chunk_sizes = [frame.shape[0]]
 
-    first_stamp = dt.strptime(str(frame.index[0]), '%Y-%m-%d %H:%M:%S')
+    first_stamp = datetime.strptime(str(frame.index[0]), '%Y-%m-%d %H:%M:%S')
     # NETCDF4_CLASSIC was chosen so that MFDataset reading would work. See:
     # http://unidata.github.io/netcdf4-python/#netCDF4.MFDataset
     nc_file = Dataset(output_fn, 'w', format='NETCDF4_CLASSIC')
@@ -455,9 +454,12 @@ def create_giant_netcdf(input_files, output_fn, zlib, chunk_size,
 def _dt_convert(datetime_str):
     """Parse datetime string, return datetime object"""
     try:
-        return dt.strptime(datetime_str, '%Y-%m-%dT%H:%M:%S')
+        return datetime.strptime(datetime_str, '%Y%m%d')
     except ValueError:
-        return dt.strptime(datetime_str, '%Y-%m-%d')
+        try:
+            return datetime.strptime(datetime_str, '%Y-%m-%d')
+        except ValueError:
+            return datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%S')
 
 
 def main():
@@ -477,9 +479,7 @@ def main():
                                 "\'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'")
     parser.add_argument('-n', '--interval', default='1T',
                         help="""Width of the interval to average input data
-over in Pandas offset format. If not specified, 1 minute averages are used. If
-specified then '_high', '_mean', and '_low' versions of the data fields are
-written to the output NetCDF.
+over in Pandas offset format. If not specified, 1 minute averages are used.
 Use '1D' for daily or '5T' for 5 minute averages.
 See this page for more details:
 http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases""")
@@ -492,7 +492,7 @@ http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases""")
     parser.add_argument('--data-stream', help="'datastream' global attribute to put in output file")
 
     parser.add_argument('-i', '--input', dest='input_files', required=True, nargs="+",
-                        help="aoss_tower level_00 paths. Use @filename to red a list of paths from that file.")
+                        help="aoss_tower level_00 paths. Use @filename to read a list of paths from that filename.")
 
     parser.add_argument('-o', '--output', dest='output_files', required=True, nargs="+",
                         help="""NetCDF filename(s) to create from input. If one
diff --git a/aosstower/level_b1/quicklook.py b/aosstower/level_b1/quicklook.py
index 2d9ce7f593dafdd26202e8db2fa0da41b4c29855..cb1e6e7b844d91afe109ec433312f2c5c0579c24 100644
--- a/aosstower/level_b1/quicklook.py
+++ b/aosstower/level_b1/quicklook.py
@@ -446,10 +446,14 @@ def create_plot(plot_names, frame, output,
 
 
 def _dt_convert(datetime_str):
+    """Parse datetime string, return datetime object"""
     try:
-        return datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%S')
+        return datetime.strptime(datetime_str, '%Y%m%d')
     except ValueError:
-        return datetime.strptime(datetime_str, '%Y-%m-%d')
+        try:
+            return datetime.strptime(datetime_str, '%Y-%m-%d')
+        except ValueError:
+            return datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%S')
 
 
 def main():
@@ -465,7 +469,7 @@ def main():
         "only that day is created. Formats allowed: \'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'")
     parser.add_argument('-e', '--end-time', type=_dt_convert,
     help="End time of plot. If only -e is given, a plot of only that day is " +
-          "created. Formats allowed: \'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\'")
+          "created. Formats allowed: \'YYYY-MM-DDTHH:MM:SS\', \'YYYY-MM-DD\', \'YYYYMMDD\'")
     parser.add_argument('--met-plots', nargs='+',
                         help="Override plots to use in the combined meteorogram plot")
     parser.add_argument("input_files", nargs="+", help="aoss_tower_level_b1 files")
diff --git a/scripts/archive_tower.sh b/scripts/archive_tower.sh
index f13212558d51fe096ccddeb68ab39e0ab23779c9..859c8cc2857ce3660e067bb41893f9f15c405dd1 100755
--- a/scripts/archive_tower.sh
+++ b/scripts/archive_tower.sh
@@ -29,7 +29,6 @@ fi
     fi
 
     log_info "$(date +%Y-%m-%dT%H:%M:%S): Running archive jobs for ${DATE}" >> $logfile
-    #log_info "$(date +%Y-%m-%dT%H:%M:%S): Running archive jobs for the past 3 days" >> $logfile
     $ENV/bin/python -m metobscommon.archive.incoming -vv -l $logfile --date=${DATE} aoss.tower
 
     log_info "Done"
diff --git a/scripts/metobs_config.sh b/scripts/metobs_config.sh
index 502a660590154aa1ce48ac16e246fe37362d3cb7..287839cf167fbbff08d4037c5171e52e0eca5d9c 100644
--- a/scripts/metobs_config.sh
+++ b/scripts/metobs_config.sh
@@ -11,6 +11,8 @@ export TOWER_CACHE_DIR=/mnt/inst-data/cache/aoss/tower
 export ENV=/data1/software/aoss-tower3
 # Directory where logs will be stored
 export LOGDIR=$ENV/logs
+# Directory where work files can be placed (inside a separate temp directory)
+export WORKDIR=/mnt/inst-data/tmp
 
 log_info() {
     echo "INFO: $*" &>> $logfile
@@ -21,3 +23,26 @@ oops() {
     exit 1
 }
 
+day_before() {
+    date %Y%m%d --date "$1 -1 day"
+}
+
+work_dir() {
+    mktemp -d --tmpdir="$WORKDIR" "work_$1_"
+}
+
+cache_level_00_file() {
+    d=$1
+    year=${d:0:4}
+    month=${d:4:2}
+    day=${d:6:2}
+    echo "${TOWER_CACHE_DIR}/aoss/tower/level_00/version_00/${year}/${month}/${day}/aoss_tower.${year}-${month}-${day}.ascii"
+}
+
+cache_level_b1_file() {
+    d=$1
+    year=${d:0:4}
+    month=${d:4:2}
+    day=${d:6:2}
+    echo "${TOWER_CACHE_DIR}/aoss/tower/level_b1/version_00/${year}/${month}/${day}/aoss_tower.${year}-${month}-${day}.nc"
+}
\ No newline at end of file
diff --git a/scripts/run_tower_level_b1.sh b/scripts/run_tower_level_b1.sh
new file mode 100644
index 0000000000000000000000000000000000000000..f9328b17513a53e635c60a3bae68f26bc7db0677
--- /dev/null
+++ b/scripts/run_tower_level_b1.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+
+# Description: Create Level b1 netcdf4 files and the corresponding quicklooks
+SCRIPT_HOME="$( cd -P "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+SCRIPT_NAME=$(basename $0)
+SCRIPT_NAME=${SCRIPT_NAME/.sh/}
+
+# Get environment variables and common functions
+source $SCRIPT_HOME/metobs_config.sh
+
+DATE=$1
+if [ -z "$DATE" ]; then
+   DATE=`date +%Y%m%d`
+fi
+LOCK="${ENV}/locks/${SCRIPT_NAME}.lock"
+logfile="${LOGDIR}/${SCRIPT_NAME}.log"
+
+if [ ! -d $LOGDIR ]; then
+    oops "Log directory doesn't exist: $LOGDIR"
+    exit 1
+fi
+
+(
+    flock -x -n 200 || log_info "Script is already running, will not run again."
+
+    if [ ! -d $TOWER_CACHE_DIR ]; then
+        oops "Tower cache directory doesn't exist: $TOWER_CACHE_DIR"
+    fi
+
+    log_info "$(date +%Y-%m-%dT%H:%M:%S): Running level b1 jobs for ${DATE}" >> $logfile
+
+    tmp_dir=`work_dir "$DATE"`
+
+    ### NetCDF Generation ###
+    # properly generating a netcdf file for day X at least requires the data for day X-1 and X
+    previous_date=`day_before "$DATE"`
+    prev_file=`cache_level_00_file "$previous_date"`
+    curr_file=`cache_level_00_file "$DATE"`
+    out_file=`cache_level_b1_file "$DATE"`
+    out_fn=`basename "$out_file"`
+    tmp_out="$tmp_dir/$out_fn"
+    $ENV/bin/python -m aosstower.level_b1.nc -vv -z -i "$prev_file" "$curr_file" --date="${DATE}" -o "$tmp_out" >> $logfile
+    nc_status=$?
+    if [ $nc_status -ne 0 ]; then
+        oops "NetCDF generation failed for $DATE"
+    fi
+
+    echo "Moving NetCDF file from temp directory to cache" >> $logfile
+    $ENV/bin/python -m metobscommon.archive.incoming -vv -l $logfile --dates=${DATE} b1 aoss.tower "$tmp_out"
+
+    ### Quicklook Generation ###
+    # assumes that out_file is what the archive script wrote the file as
+    $ENV/bin/python -m aosstower.level_b1.quicklook -vv --thumbnail -s "$DATE" -i "$out_file" -o "$tmp_dir/aoss_tower.{plot_name}.{start_time:%Y-%m-%d}.png" -p meteorogram td pressure wind_speed wind_dir accum_precip solar_flux
+    quicklook_status=$?
+    if [ $quicklook_statis -ne 0 ]; then
+        oops "Quicklook generation failed for $DATE"
+    fi
+
+    echo "Moving Level B1 quicklooks from temp directory to cache" >> $logfile
+    $ENV/bin/python -m metobscommon.archive.incoming -vv -l $logfile --dates=${DATE} b1 aoss.tower "$tmp_dir/aoss_tower.*.png"
+
+    log_info "Done"
+
+) 200>$LOCK