From 2e3f3fd8b0448f1ab5c17644dc94a339c19edce9 Mon Sep 17 00:00:00 2001
From: ygao <ygao355@wisc.edu>
Date: Fri, 7 Mar 2025 17:26:45 +0000
Subject: [PATCH] a stable veriosn

---
 assets/js/scripts.js                          |   8 +-
 assets/python/.htaccess                       |  27 -
 assets/python/data.py                         |  15 +-
 assets/python/metadata.py                     | 164 +++--
 assets/python/sat_db_functions.py             |   0
 assets/python/satellites.py                   | 162 ++---
 .../sat_db_functions.cpython-36.pyc           | Bin 0 -> 5687 bytes
 cgi-bin/data.py                               | 142 ++++
 cgi-bin/metadata.py                           | 124 ++++
 cgi-bin/sat_db_functions.py                   | 262 +++++++
 cgi-bin/satellite_relationships.json          | 682 ++++++++++++++++++
 cgi-bin/satellites.py                         |  91 +++
 index.html                                    |   3 +-
 latency_viewer.log                            | 184 +++++
 logs/debug.log                                |   0
 15 files changed, 1694 insertions(+), 170 deletions(-)
 delete mode 100644 assets/python/.htaccess
 mode change 100644 => 100755 assets/python/data.py
 mode change 100644 => 100755 assets/python/metadata.py
 mode change 100644 => 100755 assets/python/sat_db_functions.py
 mode change 100644 => 100755 assets/python/satellites.py
 create mode 100644 cgi-bin/__pycache__/sat_db_functions.cpython-36.pyc
 create mode 100755 cgi-bin/data.py
 create mode 100755 cgi-bin/metadata.py
 create mode 100755 cgi-bin/sat_db_functions.py
 create mode 100644 cgi-bin/satellite_relationships.json
 create mode 100755 cgi-bin/satellites.py
 create mode 100644 latency_viewer.log
 create mode 100644 logs/debug.log

diff --git a/assets/js/scripts.js b/assets/js/scripts.js
index 2965880..40639cf 100644
--- a/assets/js/scripts.js
+++ b/assets/js/scripts.js
@@ -163,7 +163,7 @@ function fetchMetadata(dateObj) {
     showLoading();
     
     $.ajax({
-        url: 'assets/python/metadata.py',
+        url: '/cgi-bin/metadata.py', 
         method: 'GET',
         dataType: 'json',
         success: function(response) {
@@ -406,7 +406,7 @@ function fetchDataForDay(dateObj){
     
     // Use low-level XHR for better memory control
     window.currentAjaxRequest = $.ajax({
-        url: '/api/data?' + query,
+        url: '/cgi-bin/data.py?' + query,
         method: 'GET',
         dataType: 'json',
         success: function(response) {
@@ -559,7 +559,7 @@ function fetchSatellitesForDay(dateObj) {
     let dateStr = `${year}-${month}-${day}`;
     
     $.ajax({
-        url: 'assets/python/satellites.py?date=' + dateStr,
+        url: '/cgi-bin/satellites.py?date=' + dateStr,
         method: 'GET',
         dataType: 'json',
         success: function(response) {
@@ -781,7 +781,7 @@ function fetchDataForDay(dateObj){
     
     // Use low-level XHR for better memory control
     window.currentAjaxRequest = $.ajax({
-        url: 'assets/python/data.py?' + query,
+        url: '/cgi-bin/data.py?' + query,
         method: 'GET',
         dataType: 'json',
         success: function(response) {
diff --git a/assets/python/.htaccess b/assets/python/.htaccess
deleted file mode 100644
index ac5dce0..0000000
--- a/assets/python/.htaccess
+++ /dev/null
@@ -1,27 +0,0 @@
-# Enable execution of Python scripts
-AddHandler cgi-script .py
-Options +ExecCGI
-
-# Make sure all Python scripts are treated as executable CGI scripts
-<FilesMatch "\.py$">
-    SetHandler cgi-script
-    Options +ExecCGI
-</FilesMatch>
-
-# Set proper permissions for Python scripts
-<Files ~ "\.py$">
-    Require all granted
-</Files>
-
-# Set default character encoding
-AddDefaultCharset UTF-8
-
-# Disable directory browsing for security
-Options -Indexes
-
-# Enable CORS for API access if needed
-<IfModule mod_headers.c>
-    Header set Access-Control-Allow-Origin "*"
-    Header set Access-Control-Allow-Methods "GET, POST, OPTIONS"
-    Header set Access-Control-Allow-Headers "Content-Type"
-</IfModule>
\ No newline at end of file
diff --git a/assets/python/data.py b/assets/python/data.py
old mode 100644
new mode 100755
index 62af622..eb0a55f
--- a/assets/python/data.py
+++ b/assets/python/data.py
@@ -23,7 +23,7 @@ logger = logging.getLogger()
 # Import functions from our shared module
 # Adjust the path as needed to find the module
 sys.path.append(os.path.dirname(os.path.abspath(__file__)))
-from sat_db_functions import run_sat_latency_query, get_canonical_id
+from sat_db_functions import run_sat_latency_query, get_canonical_id, get_all_variants
 
 def data_endpoint():
     """
@@ -49,9 +49,20 @@ def data_endpoint():
         # Prepare filters
         filters = {}
         if satellite_id:
-            filters["satellite-id"] = satellite_id
+            # Get the canonical form
+            canonical_id = get_canonical_id(satellite_id)
+            
+            # Get all variants of this canonical ID
+            all_variants = get_all_variants(canonical_id)
+            
+            # Use all variants in the filter
+            filters["satellite-id"] = all_variants
+            
+            logger.info(f"Expanded satellite ID {satellite_id} to variants: {all_variants}")
+        
         if coverage:
             filters["coverage"] = coverage
+        
         if instrument:
             filters["instrument"] = instrument
         
diff --git a/assets/python/metadata.py b/assets/python/metadata.py
old mode 100644
new mode 100755
index cd2d12b..eb20928
--- a/assets/python/metadata.py
+++ b/assets/python/metadata.py
@@ -1,62 +1,124 @@
 #!/usr/bin/env python3
 import cgi
-import cgitb
 import json
-import sys
 import os
-import logging
-
-# Enable detailed CGI error reporting
-cgitb.enable()
+import sys
 
-# Set up logging
-LOG_FILE = "latency_viewer.log"
-logging.basicConfig(
-    level=logging.INFO,
-    format='%(asctime)s - %(levelname)s - %(message)s',
-    filename=LOG_FILE,
-    filemode='a'
-)
-logger = logging.getLogger()
+# Print headers
+print("Content-Type: application/json")
+print()  # Empty line after headers
 
-# Import functions from our shared module
-# Adjust the path as needed to find the module
-sys.path.append(os.path.dirname(os.path.abspath(__file__)))
-from sat_db_functions import load_relationship_data
+# Get script directory
+script_dir = os.path.dirname(os.path.abspath(__file__))
 
-def metadata_endpoint():
-    """
-    API endpoint to get metadata about satellites, coverages, and instruments.
-    Returns consolidated data from the prebuilt relationship JSON.
-    """
-    try:
-        # Return the consolidated prebuilt relationships
-        relationships = load_relationship_data()
-        if relationships:
-            return relationships
-        else:
-            return {
-                "satellites": [],
-                "coverages": [],
-                "instruments": [],
-                "relationships": {}
-            }
-    except Exception as e:
-        logger.error(f"Error processing metadata request: {str(e)}", exc_info=True)
-        return {"error": f"Internal Server Error: {str(e)}"}, 500
+# Satellite ID mappings
+SATELLITE_ID_MAPPINGS = {
+    'G16': 'G16', 'g16': 'G16',
+    'G18': 'G18', 'g18': 'G18',
+    'G19': 'G19', 'g19': 'G19',
+    'DMSP-17': 'DMSP-17', 'dmsp17': 'DMSP-17',
+    'DMSP-18': 'DMSP-18', 'dmsp18': 'DMSP-18',
+    'DMSP-16': 'DMSP-16', 'dmsp16': 'DMSP-16',
+    'NOAA-19': 'NOAA-19', 'n19': 'NOAA-19',
+    'NOAA-20': 'NOAA-20', 'n20': 'NOAA-20',
+    'NOAA-21': 'NOAA-21', 'n21': 'NOAA-21'
+}
 
-# Main entry point for CGI
-if __name__ == "__main__":
-    # Set content-type header for JSON response
-    print("Content-Type: application/json")
-    print()  # Empty line after headers
+try:
+    # Define the path to the relationships file
+    relationships_file = os.path.join(script_dir, "satellite_relationships.json")
+    
+    # Check if file exists
+    if not os.path.exists(relationships_file):
+        print(json.dumps({
+            "error": f"Relationships file not found: {relationships_file}",
+            "satellites": [],
+            "coverages": [],
+            "instruments": [],
+            "relationships": {}
+        }))
+        sys.exit(0)
+    
+    # Load the relationships data
+    with open(relationships_file, 'r') as f:
+        raw_relationships = json.load(f)
     
-    # Get the result from our endpoint function
-    result, status_code = metadata_endpoint() if isinstance(metadata_endpoint(), tuple) else (metadata_endpoint(), 200)
+    # Create normalized data structure
+    normalized_data = {
+        "satellites": [],
+        "coverages": raw_relationships.get("coverages", []),
+        "instruments": raw_relationships.get("instruments", []),
+        "relationships": {},
+        "satellite_variants": {}
+    }
     
-    # If there's an error code, log it (CGI can't easily send HTTP status codes)
-    if status_code != 200:
-        logger.warning(f"Returning error with status code {status_code}: {result}")
+    # Group satellites by canonical ID
+    satellite_groups = {}
+    for sat_id in raw_relationships.get("satellites", []):
+        canonical_id = SATELLITE_ID_MAPPINGS.get(sat_id, sat_id)
+        
+        if canonical_id not in satellite_groups:
+            satellite_groups[canonical_id] = []
+        
+        satellite_groups[canonical_id].append(sat_id)
     
-    # Print JSON response
-    print(json.dumps(result))
\ No newline at end of file
+    # Use canonical IDs as the satellite list
+    normalized_data["satellites"] = sorted(satellite_groups.keys())
+    
+    # Store variant mapping
+    normalized_data["satellite_variants"] = satellite_groups
+    
+    # Merge relationships for each canonical ID
+    for canonical_id, variants in satellite_groups.items():
+        normalized_data["relationships"][canonical_id] = {
+            "coverages": [],
+            "instruments": [],
+            "coverage_instruments": {}
+        }
+        
+        # Merge relationship data from all variants
+        for variant_id in variants:
+            if variant_id not in raw_relationships.get("relationships", {}):
+                continue
+                
+            original_relationship = raw_relationships["relationships"][variant_id]
+            
+            # Merge coverages
+            for coverage in original_relationship.get("coverages", []):
+                if coverage not in normalized_data["relationships"][canonical_id]["coverages"]:
+                    normalized_data["relationships"][canonical_id]["coverages"].append(coverage)
+            
+            # Merge instruments
+            for instrument in original_relationship.get("instruments", []):
+                if instrument not in normalized_data["relationships"][canonical_id]["instruments"]:
+                    normalized_data["relationships"][canonical_id]["instruments"].append(instrument)
+            
+            # Merge coverage_instruments
+            for coverage, instruments in original_relationship.get("coverage_instruments", {}).items():
+                if coverage not in normalized_data["relationships"][canonical_id]["coverage_instruments"]:
+                    normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage] = []
+                
+                for instrument in instruments:
+                    if instrument not in normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage]:
+                        normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage].append(instrument)
+        
+        # Sort arrays for consistent output
+        normalized_data["relationships"][canonical_id]["coverages"].sort()
+        normalized_data["relationships"][canonical_id]["instruments"].sort()
+        
+        for coverage in normalized_data["relationships"][canonical_id]["coverage_instruments"]:
+            normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage].sort()
+    
+    # Return the normalized data
+    print(json.dumps(normalized_data))
+
+except Exception as e:
+    import traceback
+    print(json.dumps({
+        "error": str(e),
+        "traceback": traceback.format_exc(),
+        "satellites": [],
+        "coverages": [],
+        "instruments": [],
+        "relationships": {}
+    }))
\ No newline at end of file
diff --git a/assets/python/sat_db_functions.py b/assets/python/sat_db_functions.py
old mode 100644
new mode 100755
diff --git a/assets/python/satellites.py b/assets/python/satellites.py
old mode 100644
new mode 100755
index 8dcd276..f5a38a4
--- a/assets/python/satellites.py
+++ b/assets/python/satellites.py
@@ -1,99 +1,91 @@
 #!/usr/bin/env python3
 import cgi
-import cgitb
 import json
 import sys
 import os
-import logging
 
-# Enable detailed CGI error reporting
-cgitb.enable()
+# Print headers
+print("Content-Type: application/json")
+print()  # Empty line after headers
 
-# Set up logging
-LOG_FILE = "latency_viewer.log"
-logging.basicConfig(
-    level=logging.INFO,
-    format='%(asctime)s - %(levelname)s - %(message)s',
-    filename=LOG_FILE,
-    filemode='a'
-)
-logger = logging.getLogger()
+# Get script directory
+script_dir = os.path.dirname(os.path.abspath(__file__))
 
-# Import functions from our shared module
-# Adjust the path as needed to find the module
-sys.path.append(os.path.dirname(os.path.abspath(__file__)))
-from sat_db_functions import load_relationship_data
+# Satellite ID mappings
+SATELLITE_ID_MAPPINGS = {
+    'G16': 'G16', 'g16': 'G16',
+    'G18': 'G18', 'g18': 'G18',
+    'G19': 'G19', 'g19': 'G19',
+    'DMSP-17': 'DMSP-17', 'dmsp17': 'DMSP-17',
+    'DMSP-18': 'DMSP-18', 'dmsp18': 'DMSP-18',
+    'DMSP-16': 'DMSP-16', 'dmsp16': 'DMSP-16',
+    'NOAA-19': 'NOAA-19', 'n19': 'NOAA-19',
+    'NOAA-20': 'NOAA-20', 'n20': 'NOAA-20',
+    'NOAA-21': 'NOAA-21', 'n21': 'NOAA-21'
+}
 
-# Path to satellite data
-SATELLITE_DATA_DIR = "/data/sat_latency"
-
-def satellites_endpoint():
-    """
-    API endpoint to get available satellites for a specific date.
-    Uses the consolidated prebuilt relationships.
-    """
-    try:
-        # Get query parameters
-        form = cgi.FieldStorage()
-        date_str = form.getvalue("date")
-        
-        if not date_str:
-            return {"error": "Missing date parameter (YYYY-MM-DD)"}, 400
-        
-        # Load consolidated relationships
-        relationships = load_relationship_data()
-        if not relationships:
-            return {
-                "satellites": [],
-                "baseDir": SATELLITE_DATA_DIR
-            }
-        
-        # Convert satellite list to the format expected by the frontend
-        satellites = []
-        
-        # Include variant information for display
-        for sat_id in relationships.get("satellites", []):
-            # Get variant information for display
-            variants = relationships.get("satellite_variants", {}).get(sat_id, [])
-            display_name = sat_id
-            
-            # If there are multiple variants, include them in the display name
-            if len(variants) > 1:
-                variant_str = ", ".join([v for v in variants if v != sat_id])
-                if variant_str:
-                    display_name = f"{sat_id} ({variant_str})"
-            
-            satellites.append({
-                "id": sat_id,
-                "displayName": display_name,
-                "fileExists": True,  # Assume file exists since it's in the relationships
-                "filePath": None     # We don't have file paths in this approach
-            })
+try:
+    # Get query parameters
+    form = cgi.FieldStorage()
+    date_str = form.getvalue("date")
+    
+    # Define the path to the relationships file
+    relationships_file = os.path.join(script_dir, "satellite_relationships.json")
+    
+    # Check if file exists
+    if not os.path.exists(relationships_file):
+        print(json.dumps({
+            "error": f"Relationships file not found: {relationships_file}",
+            "satellites": [],
+            "baseDir": "/data/sat_latency"
+        }))
+        sys.exit(0)
+    
+    # Load the relationships data
+    with open(relationships_file, 'r') as f:
+        raw_relationships = json.load(f)
+    
+    # Group satellites by canonical ID
+    satellite_groups = {}
+    for sat_id in raw_relationships.get("satellites", []):
+        canonical_id = SATELLITE_ID_MAPPINGS.get(sat_id, sat_id)
         
-        # Sort by ID for consistent display
-        satellites.sort(key=lambda x: x["id"])
+        if canonical_id not in satellite_groups:
+            satellite_groups[canonical_id] = []
         
-        return {
-            "satellites": satellites,
-            "baseDir": SATELLITE_DATA_DIR
-        }
-    
-    except Exception as e:
-        logger.error(f"Error getting satellites: {str(e)}", exc_info=True)
-        return {"error": f"Internal Server Error: {str(e)}"}, 500
-
-# Main entry point for CGI
-if __name__ == "__main__":
-    # Set content-type header for JSON response
-    print("Content-Type: application/json")
-    print()  # Empty line after headers
+        satellite_groups[canonical_id].append(sat_id)
     
-    # Get the result from our endpoint function
-    result, status_code = satellites_endpoint() if isinstance(satellites_endpoint(), tuple) else (satellites_endpoint(), 200)
+    # Create the normalized list of satellites
+    satellites = []
+    for canonical_id, variants in satellite_groups.items():
+        # Create display name with variants
+        display_name = canonical_id
+        if len(variants) > 1:
+            variant_str = ", ".join([v for v in variants if v != canonical_id])
+            if variant_str:
+                display_name = f"{canonical_id} ({variant_str})"
+        
+        satellites.append({
+            "id": canonical_id,
+            "displayName": display_name,
+            "fileExists": True
+        })
     
-    # If there's an error code, log it (CGI can't easily send HTTP status codes)
-    if status_code != 200:
-        logger.warning(f"Returning error with status code {status_code}: {result}")
+    # Sort the satellites by ID
+    satellites.sort(key=lambda x: x["id"])
     
-    # Print JSON response
-    print(json.dumps(result))
\ No newline at end of file
+    # Return the response
+    print(json.dumps({
+        "satellites": satellites,
+        "baseDir": "/data/sat_latency",
+        "normalized": True  # Flag to indicate normalization was performed
+    }))
+
+except Exception as e:
+    import traceback
+    print(json.dumps({
+        "error": str(e),
+        "traceback": traceback.format_exc(),
+        "satellites": [],
+        "baseDir": "/data/sat_latency"
+    }))
\ No newline at end of file
diff --git a/cgi-bin/__pycache__/sat_db_functions.cpython-36.pyc b/cgi-bin/__pycache__/sat_db_functions.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5a3861f8008388044281d3f25a9beb84ea428fee
GIT binary patch
literal 5687
zcmb7I%WvGq8RwAP_pTnRwJa&NOgoP4O)SZFQpXVlqpGFEYHeBd3bso)U95Mg71z7u
zYRI)Eg6l(-0zIUyTNJq@?kSfZddaEiv4<dN`WNt2ph18X=&k6%=%K%F_Mw$pv?vJ<
zhr@5a7r!Up%;nPmJn?1iSLYSwFUrs{f%&&_d4Iyd70w!p*}XH9$!FD6<y|v1dDqRD
zrPbs6ikaZ5ndF+8;<}mUF`RLn37kosDV%AX8Jt<1Ih-S(<jst&nAzPcR;HfYS5}nw
z&plMl5p&cUwX%;B;=`I^Wv$UAIZi9}F>7R>F=fZUukcYm_JLxK^KqWXJ<lh20rv@h
zj8EcT;M4p#pZb8A$2cn~#hGs)k+L4lENr_?Yr$<<Vqw0?g|*wTM7ioZyb_Er@QPnq
z@G5?}QSmLOx)+?lgVku*zEu_$7H!w@w(XWTU-w)m(%!p#m4<6Hyb~l=ZkIOZF2CKM
zT<cG+28s1M%gb{FBPUlb^(U8$d7*+cQ>$IRDzx70n$UZ*1FUA8H@(&n_TIt)mEOVu
zHB)njNaAUTE<IhYGLWHz?nL|sE-#D0SL!TOLS<it@>uZ3dzNohD~{{f)k?!yTQRm=
zVN{HPdT8lMMc5U`FRIb8((>l&ty^oGtL3$o^6lk~jkWdnN|9Ex{3468gI#5t|3(o>
z(0h5g`(gHK6?dVh{c>w>p<1)&9@&okDEv`*tL;?fN0@KzMPoo$9xw%53&cHXQ&B}4
z<9`-!1aJD{)mTr!ITEnaX!M2fdIAp69}<;Bys^A~XMOF)@~!gbo$|fqyKBqqn<Wtk
zauRW{4{S*RyF&`EyL^LIxu-DiO|16$&S5^_JA;P=F8NtDAX%t*NwVh}W_5+DHGCjB
z(`aTZw!-yKVjX5H9d&n6zKPKrc-HG3&1a#u&pLXjhbp}59<ZpPz9dI@mr8HpU5ob>
zUklZ`zOU@Fknxy=U_8F0JZHOK;7yEt^*MY0_Yc*M-T{RgQBHiK0XjLY5Kcf|4`UMQ
ztxi0QOJ0&Tcmd@<?tB(15;8umbP|9}^rZo(^POav>|>gSl+z0Ke06vQ+$JTrU)@)B
zza#fc9bo^JPAW_dL2I=CITL3OuK`1fFg#~FXMy8Gi6ccEcIA727t$p==|jAvdboZK
z-SuPzhgc1hOT<SS^cRoNmv4LfK0Ufi>b=j$r}v4Q#(ibyv-`vNp#h4qY<LY)b=P*U
z9L5O_W}(3&-1VWKt2^g_|3ktL*G>n^X#>X%1U9!}BLG@MmQL}|XnVF(GyHAK*scgZ
zS9Q6?jb^3Q!m`}E9;K@86H8QTmKSAg$MZ$IX*s@Ej78~zuX}<VPmE%Sa)*Lxlpg>M
zTtbWya2`WZ8`@at6Dkjp5#t0)xvQ9v4k(>Jii1VV;ZfXkg>Ugle{Ah}Q4F!X=|ws%
z7Bgnf6?V;bDvdJ9YK}UtXjU3_VDaw4*a2%b;kH{I&>=s-sVPdD$-WDl>D~;l%wq$j
z!*ZI*KJ#Y!fP1NsqY+{T390`%$_E?}i*JLhw}C;?b*5`7OXE~oQrFcyOR7m+d2LKf
zYLlvp^)Y>fjWJctYI#;*GZ-h?32g>rK~1w6_PSQUY?7T(#SCy(2{9d%e@Oc>^85~y
zy=N3N>#C0!A+~+p*EnM5&&N87AFIQ=5my+pvig1HuJS<f6CuNj8mi)fpRA|2=BGoI
z>(hYySmCi3GSY-dE_j>aaq>g5N2eg46TbGb@<OFm4w(Cj^Ommo*~e+|8B;nMQ-UeL
z<#^H`p_nxc%hitdOnaa>>I1pAm<oO<t@M^#;YLeXkJ@&_AF}b`Ph?1g-x6-q_-^UW
zy0K+9ETiIZW9WMryxnRb3;LEfFA+yftOrws9)58Im>bfNgV&6<WAC(wd<zMN$P82W
zf(!3fK#?21dx+v8AjxAedv~}T=`!lLKC)KZ;fqFadQ}Ky=mxkV?;XY<5iA<T(MWaS
z2(6009mTC(+w;9Be|PoP^5)u|_0r9?jZ*pDwOgx^j$+A)bXoD}giu;a6mPh-nk6FL
zcDCF|YXBzkv?3gki&Cq*RjWmbI*M7uNTeYs6!nO0NoO^)Lki^)vyM8zB+)rka!Ggm
z5e(6r#h~aa%i=6(NmYj#>oDfHI>wSP_5u?Z0a4Wm4#zQEXK_hW{+g1gDn8;2DKIWR
zf>AQ0Ak9}oq{n?#>~f?*yxHX%*FV5&UF9&u6*6$WrgUPvXF?qzcBT_Yl7!)Q5@D>a
z`>`;w50gjwTtWJb!vSLS+=+!TqOqhnvjZ9*VJFPClj!d1zUMJYdvK1pPh%ofFd~d&
zOua}VrKm_@=XSS|ep9kkeu(sx?qtG@`xP)}{P@1=eJy7s$LE9DKf+9nbuv6%r}8@s
znKC2|=x4*M_&iMUT$l!xbeM%B>2E8Y)XwLSAyd!tkucSFuIDWH*>kqH!ACneKNsfs
z7~FrhGZK!}NBFpOV9bCme;n8uoR&}Yg28Bg4Df}074D4koC@<WSBrH<C5>s~EF3*>
zV}HD!4@WUh(8Xu?D?imcW8ql6u&+So@lGD|WBkNp4I23?a7^-(v;rAs!#v4A^lM5<
z3CEwSA}gt!3dhkR7(YfT1>EUw0CLiJcbuOg_`S3KRDC*xB%KMl<5hnqoRHz_Y%g3*
zln@sI^~&Qk@csm$sDMy3FFANEEc7)cpU+8p=MU2(p7=zk@T|}cPzY>Sf$@aI_lF+7
zDY^dwinYRlPX2u-pYTWPC+o9eZl8ghKjY~X%^AWk2dBki<(DYyWr$j_g;n(%dj>*~
z*ps<@kX4LcwJpQKql#zs5<aE5ej#o<z9qIQRZC)%!?LJ(i*jb*@cfGK%f8*Tj0@!H
zi$;l747p-E##-qP71Yg&Z(R5Rj=9^n=T=rWZ{A$IeS5J~Dh?1KVIM-Xis1WyBSF{(
zzn*b{+f^UD+>xOOeM|oZH7^4NAY)%dxa}4&7mb6|e9^G{YB$D{jFQK@mfsf6kmL=-
z%|+uDM#J42=o0F71^0}KXAlnvgy@YdPofA8`ky(QzhoB&<vz786tzmYyaEO)^WV>Q
zP=wc&edQPGj$Z1vOK34-(6ik_C!)?@ikNpKg1$^I5ocAm*}DE)LPxmcN>CJShboG}
zr<E2lztnKi*Ll}5>F)|2HeKFE{-aQAG;MT#)Fm&xIe&Fdn#s9yhUd0L6&Vr|KB3C-
z+0mZbAfO>?149aQV~!g^%9xvzdGLIYIgfeU@$H6jJ}A(G+xA;+f6m`)Sq5dFm&`b%
zDymUDx+o(+QIn)7f&UXSXeuwXd#IRq5Pabb>PvQJUFxW)3hEOh_T;F_QcE?<v36VH
z`j>>2G^Q)Th5x0o{-!_!&%yZJwnI4(7S(K`8bB^SzTh`o3rEn*d)pCv8f0HRy+Hjf
zG`gLRSbE+58=ThEy*76Pb*^nv_7#}fL1uN=s<wULUNkm~8VXULLq+nABpc;XV!((!
z)UuBCv|&4zC#bIvW^Z&^F}AwZ#Z%kgHjwbq(W90(0u?-->BDrDZd};11x)(NrAtN?
zWxoe6`sNElrp?dK2PgZ`Lb?Mg7OUCv_l)l5;8dwyty-S9)ovi>w6GlzvY?{+3e-kW
zkaQhH)+@ougStmTcY(Xu06u%S^0bG$&)Bzy7_<KlF@x!D#bfoAbnpR+MwGVE#2`pH
zRZA2h2D}GFoZo8{tX=%=;euN4NGAtCxnlWI{@^dh%JSxNd1dXcI4)u8uI-3dF%?u;
z2!j@5PX$^FaYD|k+i)H<KkY{?;gTw%*haUE+F09IjWT2$_*_&)M(M5+>05{kgzg*-
zrzIZpJx{zw^HEaZ3e?6eDXGBEJ4KwNf$Tqu3#*ABRy<~=52CS|><82+(F=Zpekho^
z;qd2~S(rVH6<v=Fdeelo+ux9<m#a-4X>vdd6~&)a8g0unb6tdJ+2mu!t0ErxWvVB|
z>qH|-Ml3fcdRh?I2=hA_BArY@yg|?O|ED|1-y_G4cuHA@sE+~U2@Fau%aXF2LD8pT
zre#?g<zJFzHC;QQ#aW(Bp(l!?5Y*X}I;K*I2pBe{WwjA?Qq$2Fp?5;hl*QgDbWw43
zLQTtFY7*-O6p=INil}2MXd_HeJT9{J;-vh#^3#Y_tVIc`f#Hy5oU~W*BDKY%6n;zH
zlC`lk5p(1QzT0wnb^Ts>V|nwYSjJ9qiiQ{s^p8iHm6_<ZkZ7m-f7%3P0coYuI%VZ8
zt6Ry_kmjTgrTUUf-MD?7;+J<51A4;liLQ|3*5lL}{`V98B=ku8@@i7Y`Mu=yzX1KA
B;d1~0

literal 0
HcmV?d00001

diff --git a/cgi-bin/data.py b/cgi-bin/data.py
new file mode 100755
index 0000000..eb0a55f
--- /dev/null
+++ b/cgi-bin/data.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python3
+import cgi
+import cgitb
+import json
+import sys
+import os
+import logging
+import pandas as pd
+
+# Enable detailed CGI error reporting
+cgitb.enable()
+
+# Set up logging
+LOG_FILE = "latency_viewer.log"
+logging.basicConfig(
+    level=logging.INFO,
+    format='%(asctime)s - %(levelname)s - %(message)s',
+    filename=LOG_FILE,
+    filemode='a'
+)
+logger = logging.getLogger()
+
+# Import functions from our shared module
+# Adjust the path as needed to find the module
+sys.path.append(os.path.dirname(os.path.abspath(__file__)))
+from sat_db_functions import run_sat_latency_query, get_canonical_id, get_all_variants
+
+def data_endpoint():
+    """
+    API endpoint to query satellite latency data directly from the database
+    """
+    try:
+        # Get query parameters
+        form = cgi.FieldStorage()
+        start_date = form.getvalue("start_date")
+        end_date = form.getvalue("end_date")
+        start_hour = form.getvalue("start_hour", "00:00")
+        end_hour = form.getvalue("end_hour", "23:59")
+        
+        # Convert date and time to ISO format
+        start_datetime = f"{start_date}T{start_hour}:00"
+        end_datetime = f"{end_date}T{end_hour}:59"
+        
+        # Get filter parameters
+        satellite_id = form.getvalue("satellite_id")
+        coverage = form.getvalue("coverage")
+        instrument = form.getvalue("instrument")
+        
+        # Prepare filters
+        filters = {}
+        if satellite_id:
+            # Get the canonical form
+            canonical_id = get_canonical_id(satellite_id)
+            
+            # Get all variants of this canonical ID
+            all_variants = get_all_variants(canonical_id)
+            
+            # Use all variants in the filter
+            filters["satellite-id"] = all_variants
+            
+            logger.info(f"Expanded satellite ID {satellite_id} to variants: {all_variants}")
+        
+        if coverage:
+            filters["coverage"] = coverage
+        
+        if instrument:
+            filters["instrument"] = instrument
+        
+        logger.info(f"Data request - Period: {start_datetime} to {end_datetime}, Filters: {filters}")
+        
+        # Query the database
+        data = run_sat_latency_query(start_datetime, end_datetime, filters)
+        
+        if not data:
+            return {"message": "No data available for the selected period.", "data": []}
+        
+        # Convert to DataFrame for easier processing
+        df = pd.DataFrame(data)
+        
+        # Clean and process data
+        try:
+            # Normalize column names (case-insensitive matching)
+            df.columns = [col.lower() for col in df.columns]
+            
+            # Clean latency data
+            df['latency'] = pd.to_numeric(df['latency'], errors='coerce')
+            df = df.dropna(subset=['latency'])
+            
+            # Add missing columns with 'Not Available' default
+            default_columns = ['ingest_source', 'coverage', 'instrument', 'band', 'section', 'satellite_id']
+            for col in default_columns:
+                if col not in df.columns:
+                    logger.warning(f"Column '{col}' not found. Adding with default value.")
+                    df[col] = 'Not Available'
+            
+            # Fill NaN values with "Not Available"
+            for col in default_columns:
+                df[col] = df[col].fillna('Not Available')
+            
+            # Add canonical_satellite_id column
+            if 'satellite_id' in df.columns:
+                df['canonical_satellite_id'] = df['satellite_id'].apply(get_canonical_id)
+            
+            # Convert timestamps to string for JSON serialization
+            if 'start_time' in df.columns:
+                df['start_time'] = pd.to_datetime(df['start_time']).astype(str)
+            
+            # Convert to records and handle NaN values
+            result = df.replace({pd.NA: "Not Available", pd.NaT: "Not Available"}).to_dict(orient="records")
+            
+            return {
+                "data": result,
+                "metadata": {
+                    "instruments": df['instrument'].unique().tolist(),
+                    "coverages": df['coverage'].unique().tolist(),
+                    "total_records": len(result)
+                }
+            }
+            
+        except Exception as e:
+            logger.error(f"Error during data processing: {str(e)}", exc_info=True)
+            return {"message": f"Data processing error: {str(e)}"}, 500
+            
+    except Exception as e:
+        logger.error(f"Error processing data request: {str(e)}", exc_info=True)
+        return {"message": f"Internal Server Error: {str(e)}"}, 500
+
+# Main entry point for CGI
+if __name__ == "__main__":
+    # Set content-type header for JSON response
+    print("Content-Type: application/json")
+    print()  # Empty line after headers
+    
+    # Get the result from our endpoint function
+    result, status_code = data_endpoint() if isinstance(data_endpoint(), tuple) else (data_endpoint(), 200)
+    
+    # If there's an error code, log it (CGI can't easily send HTTP status codes)
+    if status_code != 200:
+        logger.warning(f"Returning error with status code {status_code}: {result}")
+    
+    # Print JSON response
+    print(json.dumps(result))
\ No newline at end of file
diff --git a/cgi-bin/metadata.py b/cgi-bin/metadata.py
new file mode 100755
index 0000000..eb20928
--- /dev/null
+++ b/cgi-bin/metadata.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python3
+import cgi
+import json
+import os
+import sys
+
+# Print headers
+print("Content-Type: application/json")
+print()  # Empty line after headers
+
+# Get script directory
+script_dir = os.path.dirname(os.path.abspath(__file__))
+
+# Satellite ID mappings
+SATELLITE_ID_MAPPINGS = {
+    'G16': 'G16', 'g16': 'G16',
+    'G18': 'G18', 'g18': 'G18',
+    'G19': 'G19', 'g19': 'G19',
+    'DMSP-17': 'DMSP-17', 'dmsp17': 'DMSP-17',
+    'DMSP-18': 'DMSP-18', 'dmsp18': 'DMSP-18',
+    'DMSP-16': 'DMSP-16', 'dmsp16': 'DMSP-16',
+    'NOAA-19': 'NOAA-19', 'n19': 'NOAA-19',
+    'NOAA-20': 'NOAA-20', 'n20': 'NOAA-20',
+    'NOAA-21': 'NOAA-21', 'n21': 'NOAA-21'
+}
+
+try:
+    # Define the path to the relationships file
+    relationships_file = os.path.join(script_dir, "satellite_relationships.json")
+    
+    # Check if file exists
+    if not os.path.exists(relationships_file):
+        print(json.dumps({
+            "error": f"Relationships file not found: {relationships_file}",
+            "satellites": [],
+            "coverages": [],
+            "instruments": [],
+            "relationships": {}
+        }))
+        sys.exit(0)
+    
+    # Load the relationships data
+    with open(relationships_file, 'r') as f:
+        raw_relationships = json.load(f)
+    
+    # Create normalized data structure
+    normalized_data = {
+        "satellites": [],
+        "coverages": raw_relationships.get("coverages", []),
+        "instruments": raw_relationships.get("instruments", []),
+        "relationships": {},
+        "satellite_variants": {}
+    }
+    
+    # Group satellites by canonical ID
+    satellite_groups = {}
+    for sat_id in raw_relationships.get("satellites", []):
+        canonical_id = SATELLITE_ID_MAPPINGS.get(sat_id, sat_id)
+        
+        if canonical_id not in satellite_groups:
+            satellite_groups[canonical_id] = []
+        
+        satellite_groups[canonical_id].append(sat_id)
+    
+    # Use canonical IDs as the satellite list
+    normalized_data["satellites"] = sorted(satellite_groups.keys())
+    
+    # Store variant mapping
+    normalized_data["satellite_variants"] = satellite_groups
+    
+    # Merge relationships for each canonical ID
+    for canonical_id, variants in satellite_groups.items():
+        normalized_data["relationships"][canonical_id] = {
+            "coverages": [],
+            "instruments": [],
+            "coverage_instruments": {}
+        }
+        
+        # Merge relationship data from all variants
+        for variant_id in variants:
+            if variant_id not in raw_relationships.get("relationships", {}):
+                continue
+                
+            original_relationship = raw_relationships["relationships"][variant_id]
+            
+            # Merge coverages
+            for coverage in original_relationship.get("coverages", []):
+                if coverage not in normalized_data["relationships"][canonical_id]["coverages"]:
+                    normalized_data["relationships"][canonical_id]["coverages"].append(coverage)
+            
+            # Merge instruments
+            for instrument in original_relationship.get("instruments", []):
+                if instrument not in normalized_data["relationships"][canonical_id]["instruments"]:
+                    normalized_data["relationships"][canonical_id]["instruments"].append(instrument)
+            
+            # Merge coverage_instruments
+            for coverage, instruments in original_relationship.get("coverage_instruments", {}).items():
+                if coverage not in normalized_data["relationships"][canonical_id]["coverage_instruments"]:
+                    normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage] = []
+                
+                for instrument in instruments:
+                    if instrument not in normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage]:
+                        normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage].append(instrument)
+        
+        # Sort arrays for consistent output
+        normalized_data["relationships"][canonical_id]["coverages"].sort()
+        normalized_data["relationships"][canonical_id]["instruments"].sort()
+        
+        for coverage in normalized_data["relationships"][canonical_id]["coverage_instruments"]:
+            normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage].sort()
+    
+    # Return the normalized data
+    print(json.dumps(normalized_data))
+
+except Exception as e:
+    import traceback
+    print(json.dumps({
+        "error": str(e),
+        "traceback": traceback.format_exc(),
+        "satellites": [],
+        "coverages": [],
+        "instruments": [],
+        "relationships": {}
+    }))
\ No newline at end of file
diff --git a/cgi-bin/sat_db_functions.py b/cgi-bin/sat_db_functions.py
new file mode 100755
index 0000000..c365b08
--- /dev/null
+++ b/cgi-bin/sat_db_functions.py
@@ -0,0 +1,262 @@
+#!/usr/bin/env python3
+import os
+import json
+import subprocess
+import logging
+import pandas as pd
+
+# Set up logging
+logger = logging.getLogger()
+
+# Path to conda environment and database
+CONDA_ENV_PATH = "/home/oper/.mdrexler_conda"
+SATELLITE_DATA_DIR = "/data/sat_latency"  # Path to your latency database
+RELATIONSHIPS_FILE = "satellite_relationships.json"  # Path to your prebuilt relationships file
+
+# Hard-coded mapping of satellite ID variations to canonical IDs
+# This makes it easy for future developers to add or modify mappings
+SATELLITE_ID_MAPPINGS = {
+    # Format: 'variant': 'canonical'
+    'G16': 'G16',
+    'g16': 'G16',
+    'G18': 'G18',
+    'g18': 'G18',
+    'G19': 'G19',
+    'g19': 'G19',
+    'DMSP-17': 'DMSP-17',
+    'dmsp17': 'DMSP-17',
+    'DMSP-18': 'DMSP-18',
+    'dmsp18': 'DMSP-18',
+    'DMSP-16': 'DMSP-16',
+    'dmsp16': 'DMSP-16',
+    'NOAA-19': 'NOAA-19',
+    'n19': 'NOAA-19',
+    'NOAA-20': 'NOAA-20',
+    'n20': 'NOAA-20',
+    'NOAA-21': 'NOAA-21',
+    'n21': 'NOAA-21'
+}
+
+# Create reverse mapping (canonical to variants)
+CANONICAL_TO_VARIANTS = {}
+for variant, canonical in SATELLITE_ID_MAPPINGS.items():
+    if canonical not in CANONICAL_TO_VARIANTS:
+        CANONICAL_TO_VARIANTS[canonical] = []
+    CANONICAL_TO_VARIANTS[canonical].append(variant)
+
+def get_canonical_id(satellite_id):
+    """Get canonical ID for a satellite ID variant"""
+    return SATELLITE_ID_MAPPINGS.get(satellite_id, satellite_id)
+
+def get_all_variants(canonical_id):
+    """Get all variants for a canonical satellite ID"""
+    return CANONICAL_TO_VARIANTS.get(canonical_id, [canonical_id])
+
+def consolidate_satellite_data(original_data):
+    """
+    Consolidate satellite data using the hard-coded mapping
+    """
+    if not original_data:
+        return None
+        
+    normalized_data = {
+        "satellites": [],
+        "coverages": original_data.get("coverages", []),
+        "instruments": original_data.get("instruments", []),
+        "relationships": {},
+        "satellite_variants": {}  # Maps canonical IDs to their variants
+    }
+    
+    # Group satellites by canonical ID
+    satellite_groups = {}
+    
+    for sat_id in original_data.get("satellites", []):
+        canonical_id = get_canonical_id(sat_id)
+        
+        if canonical_id not in satellite_groups:
+            satellite_groups[canonical_id] = []
+        
+        satellite_groups[canonical_id].append(sat_id)
+    
+    # Use canonical IDs as the satellite list
+    normalized_data["satellites"] = sorted(satellite_groups.keys())
+    
+    # Store variant mapping
+    normalized_data["satellite_variants"] = satellite_groups
+    
+    # Merge relationships for each canonical ID
+    for canonical_id, variants in satellite_groups.items():
+        normalized_data["relationships"][canonical_id] = {
+            "coverages": [],
+            "instruments": [],
+            "coverage_instruments": {}
+        }
+        
+        # Merge relationship data from all variants
+        for variant_id in variants:
+            if variant_id not in original_data.get("relationships", {}):
+                continue
+                
+            original_relationship = original_data["relationships"][variant_id]
+            
+            # Merge coverages
+            for coverage in original_relationship.get("coverages", []):
+                if coverage not in normalized_data["relationships"][canonical_id]["coverages"]:
+                    normalized_data["relationships"][canonical_id]["coverages"].append(coverage)
+            
+            # Merge instruments
+            for instrument in original_relationship.get("instruments", []):
+                if instrument not in normalized_data["relationships"][canonical_id]["instruments"]:
+                    normalized_data["relationships"][canonical_id]["instruments"].append(instrument)
+            
+            # Merge coverage_instruments
+            for coverage, instruments in original_relationship.get("coverage_instruments", {}).items():
+                if coverage not in normalized_data["relationships"][canonical_id]["coverage_instruments"]:
+                    normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage] = []
+                
+                for instrument in instruments:
+                    if instrument not in normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage]:
+                        normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage].append(instrument)
+        
+        # Sort arrays for consistent output
+        normalized_data["relationships"][canonical_id]["coverages"].sort()
+        normalized_data["relationships"][canonical_id]["instruments"].sort()
+        
+        for coverage in normalized_data["relationships"][canonical_id]["coverage_instruments"]:
+            normalized_data["relationships"][canonical_id]["coverage_instruments"][coverage].sort()
+    
+    return normalized_data
+
+def load_relationship_data():
+    """
+    Load prebuilt satellite relationship data from JSON file and consolidate duplicates.
+    """
+    try:
+        if os.path.exists(RELATIONSHIPS_FILE):
+            with open(RELATIONSHIPS_FILE, 'r') as f:
+                relationships = json.load(f)
+                
+            # Consolidate satellite data to merge variants
+            consolidated = consolidate_satellite_data(relationships)
+            
+            if consolidated:
+                logger.info(f"Loaded and consolidated {len(consolidated['satellites'])} unique satellites from relationships")
+                return consolidated
+            else:
+                logger.warning("Failed to consolidate relationships data")
+                return relationships
+        else:
+            logger.warning(f"Relationships file not found: {RELATIONSHIPS_FILE}")
+            return None
+    except Exception as e:
+        logger.error(f"Error loading relationship data: {str(e)}")
+        return None
+
+def run_sat_latency_query(start_time, end_time, filters=None):
+    """
+    Directly query the satellite latency database using sat_latency_interface
+    
+    Args:
+        start_time (str): Start time in ISO format (YYYY-MM-DDTHH:MM:SS)
+        end_time (str): End time in ISO format (YYYY-MM-DDTHH:MM:SS)
+        filters (dict): Optional filters for satellite_id, coverage, instrument, etc.
+        
+    Returns:
+        list: List of latency records as dictionaries
+    """
+    # Expand satellite IDs to include all variants
+    if filters and "satellite-id" in filters:
+        satellite_id = filters["satellite-id"]
+        
+        # Handle comma-separated list of satellites
+        if isinstance(satellite_id, str) and ',' in satellite_id:
+            satellite_ids = [s.strip() for s in satellite_id.split(',')]
+            expanded_ids = []
+            
+            for sat_id in satellite_ids:
+                # Get the canonical form
+                canonical_id = get_canonical_id(sat_id)
+                
+                # Add all variants of this canonical ID
+                expanded_ids.extend(get_all_variants(canonical_id))
+            
+            # Remove duplicates
+            expanded_ids = list(set(expanded_ids))
+            filters["satellite-id"] = expanded_ids
+        # Handle single satellite ID
+        elif isinstance(satellite_id, str):
+            canonical_id = get_canonical_id(satellite_id)
+            filters["satellite-id"] = get_all_variants(canonical_id)
+    
+    # Build the command exactly as specified
+    base_cmd = "module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface"
+    
+    # Add database path and time parameters
+    cmd = f"{base_cmd} -d {SATELLITE_DATA_DIR} --from '{start_time}' --until '{end_time}' --output-type json"
+    
+    # Add filters if provided
+    if filters:
+        for key, values in filters.items():
+            if values and (isinstance(values, list) or isinstance(values, str)):
+                if isinstance(values, str):
+                    values = [values]
+                filter_values = " ".join(f'"{v}"' for v in values if v)
+                if filter_values:
+                    cmd += f" --{key} {filter_values}"
+    
+    logger.info(f"Running command: {cmd}")
+    
+    try:
+        # Create a temporary shell script to run the command
+        script_path = "/tmp/run_sat_latency.sh"
+        with open(script_path, 'w') as f:
+            f.write("#!/bin/bash\n")
+            f.write(cmd + "\n")
+        
+        # Make the script executable
+        os.chmod(script_path, 0o755)
+        
+        # Run the script using sudo as the oper user
+        sudo_cmd = ["sudo", "-u", "oper", "-i", script_path]
+        
+        logger.info(f"Executing: {' '.join(sudo_cmd)}")
+        
+        # Use PIPE for stdout and stderr
+        process = subprocess.Popen(
+            sudo_cmd, 
+            stdout=subprocess.PIPE, 
+            stderr=subprocess.PIPE,
+            universal_newlines=True
+        )
+        
+        # Get the output and error
+        stdout, stderr = process.communicate()
+        
+        # Check if the command was successful
+        if process.returncode != 0:
+            logger.error(f"Command failed with exit code {process.returncode}: {stderr}")
+            return []
+        
+        # Log the first part of the output
+        if stdout:
+            logger.info(f"Command output (first 200 chars): {stdout[:200]}...")
+        else:
+            logger.warning("Command returned empty output")
+            
+        # Parse the JSON output
+        try:
+            data = json.loads(stdout)
+            logger.info(f"Successfully parsed JSON data: {len(data)} records found")
+            return data
+        except json.JSONDecodeError as e:
+            logger.error(f"Failed to parse JSON output: {e}")
+            logger.error(f"Raw output (first 500 chars): {stdout[:500]}...")
+            return []
+            
+    except Exception as e:
+        logger.error(f"Error executing command: {str(e)}")
+        return []
+    finally:
+        # Clean up temporary script
+        if os.path.exists(script_path):
+            os.remove(script_path)
\ No newline at end of file
diff --git a/cgi-bin/satellite_relationships.json b/cgi-bin/satellite_relationships.json
new file mode 100644
index 0000000..4c49bc9
--- /dev/null
+++ b/cgi-bin/satellite_relationships.json
@@ -0,0 +1,682 @@
+{
+    "satellites": [
+      "4B",
+      "DMSP-16",
+      "DMSP-17",
+      "DMSP-18",
+      "G16",
+      "G18",
+      "G19",
+      "GW1",
+      "H9",
+      "M10",
+      "M9",
+      "MetOp-1",
+      "MetOp-3",
+      "NOAA-15",
+      "NOAA-18",
+      "NOAA-19",
+      "S3A",
+      "S3B",
+      "composite",
+      "dmsp17",
+      "dmsp18",
+      "g16",
+      "g18",
+      "g2",
+      "j01",
+      "n19",
+      "n20",
+      "n21",
+      "snpp"
+    ],
+    "coverages": [
+      "CONUS",
+      "FLDK",
+      "Full Disk",
+      "JP01",
+      "JP02",
+      "JP03",
+      "JP04",
+      "Mesoscale-1",
+      "Mesoscale-2",
+      "Not Available",
+      "R301",
+      "R302",
+      "R303",
+      "R304",
+      "sun"
+    ],
+    "instruments": [
+      "ABI",
+      "AMSR2",
+      "ATMS",
+      "EXIS",
+      "GIIRS",
+      "GLM",
+      "MAG",
+      "Not Available",
+      "OMPS",
+      "SEIS",
+      "SLSTR",
+      "SUVI",
+      "VIIRS",
+      "ahi",
+      "amsu",
+      "argos",
+      "atms",
+      "avhrr",
+      "cris",
+      "eosdb",
+      "hirs",
+      "iasi",
+      "imager",
+      "mhs",
+      "mirs",
+      "misc",
+      "omps",
+      "scrif",
+      "seviri",
+      "ssmi",
+      "viirs"
+    ],
+    "relationships": {
+      "G18": {
+        "coverages": [
+          "CONUS",
+          "Full Disk",
+          "Mesoscale-1",
+          "Mesoscale-2",
+          "Not Available",
+          "sun"
+        ],
+        "instruments": [
+          "ABI",
+          "EXIS",
+          "GLM",
+          "MAG",
+          "SEIS",
+          "SUVI",
+          "misc"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "ABI",
+            "EXIS",
+            "GLM",
+            "MAG",
+            "SEIS",
+            "SUVI",
+            "misc"
+          ],
+          "Mesoscale-1": [
+            "ABI"
+          ],
+          "sun": [
+            "SUVI"
+          ],
+          "Mesoscale-2": [
+            "ABI"
+          ],
+          "CONUS": [
+            "ABI"
+          ],
+          "Full Disk": [
+            "ABI"
+          ]
+        }
+      },
+      "G16": {
+        "coverages": [
+          "CONUS",
+          "Full Disk",
+          "Mesoscale-1",
+          "Mesoscale-2",
+          "Not Available",
+          "sun"
+        ],
+        "instruments": [
+          "ABI",
+          "EXIS",
+          "GLM",
+          "MAG",
+          "SEIS",
+          "SUVI"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "EXIS",
+            "GLM",
+            "MAG",
+            "SEIS",
+            "SUVI"
+          ],
+          "Mesoscale-1": [
+            "ABI"
+          ],
+          "sun": [
+            "SUVI"
+          ],
+          "Mesoscale-2": [
+            "ABI"
+          ],
+          "CONUS": [
+            "ABI"
+          ],
+          "Full Disk": [
+            "ABI"
+          ]
+        }
+      },
+      "G19": {
+        "coverages": [
+          "CONUS",
+          "Full Disk",
+          "Mesoscale-1",
+          "Mesoscale-2",
+          "Not Available"
+        ],
+        "instruments": [
+          "ABI",
+          "EXIS",
+          "GLM",
+          "MAG",
+          "SEIS",
+          "SUVI"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "EXIS",
+            "GLM",
+            "MAG",
+            "SEIS",
+            "SUVI"
+          ],
+          "Mesoscale-1": [
+            "ABI"
+          ],
+          "Mesoscale-2": [
+            "ABI"
+          ],
+          "CONUS": [
+            "ABI"
+          ],
+          "Full Disk": [
+            "ABI"
+          ]
+        }
+      },
+      "M9": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "seviri"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "seviri"
+          ]
+        }
+      },
+      "M10": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "seviri"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "seviri"
+          ]
+        }
+      },
+      "g16": {
+        "coverages": [
+          "CONUS",
+          "Full Disk"
+        ],
+        "instruments": [
+          "ABI"
+        ],
+        "coverage_instruments": {
+          "CONUS": [
+            "ABI"
+          ],
+          "Full Disk": [
+            "ABI"
+          ]
+        }
+      },
+      "g18": {
+        "coverages": [
+          "CONUS",
+          "Full Disk"
+        ],
+        "instruments": [
+          "ABI"
+        ],
+        "coverage_instruments": {
+          "CONUS": [
+            "ABI"
+          ],
+          "Full Disk": [
+            "ABI"
+          ]
+        }
+      },
+      "4B": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "GIIRS"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "GIIRS"
+          ]
+        }
+      },
+      "H9": {
+        "coverages": [
+          "FLDK",
+          "JP01",
+          "JP02",
+          "JP03",
+          "JP04",
+          "R301",
+          "R302",
+          "R303",
+          "R304"
+        ],
+        "instruments": [
+          "ahi"
+        ],
+        "coverage_instruments": {
+          "FLDK": [
+            "ahi"
+          ],
+          "JP04": [
+            "ahi"
+          ],
+          "R301": [
+            "ahi"
+          ],
+          "R302": [
+            "ahi"
+          ],
+          "JP01": [
+            "ahi"
+          ],
+          "JP03": [
+            "ahi"
+          ],
+          "R303": [
+            "ahi"
+          ],
+          "R304": [
+            "ahi"
+          ],
+          "JP02": [
+            "ahi"
+          ]
+        }
+      },
+      "n20": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "Not Available",
+          "VIIRS",
+          "atms",
+          "cris",
+          "omps",
+          "scrif",
+          "viirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "Not Available",
+            "VIIRS",
+            "atms",
+            "cris",
+            "omps",
+            "scrif",
+            "viirs"
+          ]
+        }
+      },
+      "n21": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "Not Available",
+          "OMPS",
+          "VIIRS",
+          "atms",
+          "cris",
+          "omps",
+          "scrif",
+          "viirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "Not Available",
+            "OMPS",
+            "VIIRS",
+            "atms",
+            "cris",
+            "omps",
+            "scrif",
+            "viirs"
+          ]
+        }
+      },
+      "g2": {
+        "coverages": [
+          "Full Disk"
+        ],
+        "instruments": [
+          "imager"
+        ],
+        "coverage_instruments": {
+          "Full Disk": [
+            "imager"
+          ]
+        }
+      },
+      "composite": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "Not Available",
+          "VIIRS",
+          "mirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "Not Available",
+            "VIIRS",
+            "mirs"
+          ]
+        }
+      },
+      "j01": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "Not Available",
+          "OMPS",
+          "VIIRS"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "Not Available",
+            "OMPS",
+            "VIIRS"
+          ]
+        }
+      },
+      "MetOp-3": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "amsu",
+          "argos",
+          "avhrr",
+          "iasi",
+          "mhs",
+          "mirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "amsu",
+            "argos",
+            "avhrr",
+            "iasi",
+            "mhs",
+            "mirs"
+          ]
+        }
+      },
+      "MetOp-1": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "amsu",
+          "argos",
+          "avhrr",
+          "iasi",
+          "mhs",
+          "mirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "amsu",
+            "argos",
+            "avhrr",
+            "iasi",
+            "mhs",
+            "mirs"
+          ]
+        }
+      },
+      "snpp": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "ATMS",
+          "Not Available",
+          "OMPS",
+          "VIIRS",
+          "atms",
+          "cris",
+          "eosdb",
+          "omps",
+          "scrif",
+          "viirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "ATMS",
+            "Not Available",
+            "OMPS",
+            "VIIRS",
+            "atms",
+            "cris",
+            "eosdb",
+            "omps",
+            "scrif",
+            "viirs"
+          ]
+        }
+      },
+      "NOAA-15": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "amsu",
+          "argos",
+          "avhrr",
+          "hirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "amsu",
+            "argos",
+            "avhrr",
+            "hirs"
+          ]
+        }
+      },
+      "NOAA-18": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "amsu",
+          "argos",
+          "avhrr",
+          "hirs",
+          "mhs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "amsu",
+            "argos",
+            "avhrr",
+            "hirs",
+            "mhs"
+          ]
+        }
+      },
+      "S3A": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "SLSTR"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "SLSTR"
+          ]
+        }
+      },
+      "DMSP-18": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "ssmi"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "ssmi"
+          ]
+        }
+      },
+      "DMSP-17": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "ssmi"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "ssmi"
+          ]
+        }
+      },
+      "NOAA-19": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "amsu",
+          "argos",
+          "avhrr",
+          "hirs",
+          "mhs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "amsu",
+            "argos",
+            "avhrr",
+            "hirs",
+            "mhs"
+          ]
+        }
+      },
+      "S3B": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "SLSTR"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "SLSTR"
+          ]
+        }
+      },
+      "GW1": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "AMSR2"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "AMSR2"
+          ]
+        }
+      },
+      "dmsp17": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "mirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "mirs"
+          ]
+        }
+      },
+      "dmsp18": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "mirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "mirs"
+          ]
+        }
+      },
+      "DMSP-16": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "ssmi"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "ssmi"
+          ]
+        }
+      },
+      "n19": {
+        "coverages": [
+          "Not Available"
+        ],
+        "instruments": [
+          "mirs"
+        ],
+        "coverage_instruments": {
+          "Not Available": [
+            "mirs"
+          ]
+        }
+      }
+    }
+  }
\ No newline at end of file
diff --git a/cgi-bin/satellites.py b/cgi-bin/satellites.py
new file mode 100755
index 0000000..f5a38a4
--- /dev/null
+++ b/cgi-bin/satellites.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python3
+import cgi
+import json
+import sys
+import os
+
+# Print headers
+print("Content-Type: application/json")
+print()  # Empty line after headers
+
+# Get script directory
+script_dir = os.path.dirname(os.path.abspath(__file__))
+
+# Satellite ID mappings
+SATELLITE_ID_MAPPINGS = {
+    'G16': 'G16', 'g16': 'G16',
+    'G18': 'G18', 'g18': 'G18',
+    'G19': 'G19', 'g19': 'G19',
+    'DMSP-17': 'DMSP-17', 'dmsp17': 'DMSP-17',
+    'DMSP-18': 'DMSP-18', 'dmsp18': 'DMSP-18',
+    'DMSP-16': 'DMSP-16', 'dmsp16': 'DMSP-16',
+    'NOAA-19': 'NOAA-19', 'n19': 'NOAA-19',
+    'NOAA-20': 'NOAA-20', 'n20': 'NOAA-20',
+    'NOAA-21': 'NOAA-21', 'n21': 'NOAA-21'
+}
+
+try:
+    # Get query parameters
+    form = cgi.FieldStorage()
+    date_str = form.getvalue("date")
+    
+    # Define the path to the relationships file
+    relationships_file = os.path.join(script_dir, "satellite_relationships.json")
+    
+    # Check if file exists
+    if not os.path.exists(relationships_file):
+        print(json.dumps({
+            "error": f"Relationships file not found: {relationships_file}",
+            "satellites": [],
+            "baseDir": "/data/sat_latency"
+        }))
+        sys.exit(0)
+    
+    # Load the relationships data
+    with open(relationships_file, 'r') as f:
+        raw_relationships = json.load(f)
+    
+    # Group satellites by canonical ID
+    satellite_groups = {}
+    for sat_id in raw_relationships.get("satellites", []):
+        canonical_id = SATELLITE_ID_MAPPINGS.get(sat_id, sat_id)
+        
+        if canonical_id not in satellite_groups:
+            satellite_groups[canonical_id] = []
+        
+        satellite_groups[canonical_id].append(sat_id)
+    
+    # Create the normalized list of satellites
+    satellites = []
+    for canonical_id, variants in satellite_groups.items():
+        # Create display name with variants
+        display_name = canonical_id
+        if len(variants) > 1:
+            variant_str = ", ".join([v for v in variants if v != canonical_id])
+            if variant_str:
+                display_name = f"{canonical_id} ({variant_str})"
+        
+        satellites.append({
+            "id": canonical_id,
+            "displayName": display_name,
+            "fileExists": True
+        })
+    
+    # Sort the satellites by ID
+    satellites.sort(key=lambda x: x["id"])
+    
+    # Return the response
+    print(json.dumps({
+        "satellites": satellites,
+        "baseDir": "/data/sat_latency",
+        "normalized": True  # Flag to indicate normalization was performed
+    }))
+
+except Exception as e:
+    import traceback
+    print(json.dumps({
+        "error": str(e),
+        "traceback": traceback.format_exc(),
+        "satellites": [],
+        "baseDir": "/data/sat_latency"
+    }))
\ No newline at end of file
diff --git a/index.html b/index.html
index 26b2cf6..d51aa4a 100644
--- a/index.html
+++ b/index.html
@@ -135,7 +135,8 @@
     <script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
     <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
     <script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
-    <script src="{{ url_for('static', filename='js/scripts.js') }}"></script>
+    <!-- Updated JS path -->
+    <script src="assets/js/scripts.js"></script>
     
     <!-- Additional script for debugging toggle -->
     <script>
diff --git a/latency_viewer.log b/latency_viewer.log
new file mode 100644
index 0000000..0949f3a
--- /dev/null
+++ b/latency_viewer.log
@@ -0,0 +1,184 @@
+2025-03-07 16:50:02,658 - WARNING - Relationships file not found: satellite_relationships.json
+2025-03-07 16:50:02,658 - WARNING - Relationships file not found: satellite_relationships.json
+2025-03-07 16:51:47,864 - WARNING - Relationships file not found: satellite_relationships.json
+2025-03-07 16:51:47,864 - WARNING - Relationships file not found: satellite_relationships.json
+2025-03-07 16:51:52,682 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': '4B'}
+2025-03-07 16:51:52,682 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "4B"
+2025-03-07 16:51:52,682 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 16:51:57,885 - INFO - Command output (first 200 chars): [{"satellite_ID": "4B", "band": null, "coverage": null, "ingest_source": null, "instrument": "GIIRS", "section": null, "start_time": "2025-03-07T00:03:28+00:00", "latency": 776.71}, {"satellite_ID": "...
+2025-03-07 16:51:57,885 - INFO - Successfully parsed JSON data: 77 records found
+2025-03-07 16:51:57,982 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': '4B'}
+2025-03-07 16:51:57,982 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "4B"
+2025-03-07 16:51:57,982 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 16:52:01,160 - INFO - Command output (first 200 chars): [{"satellite_ID": "4B", "band": null, "coverage": null, "ingest_source": null, "instrument": "GIIRS", "section": null, "start_time": "2025-03-07T00:03:28+00:00", "latency": 776.71}, {"satellite_ID": "...
+2025-03-07 16:52:01,161 - INFO - Successfully parsed JSON data: 77 records found
+2025-03-07 16:52:08,507 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': 'composite'}
+2025-03-07 16:52:08,507 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "composite"
+2025-03-07 16:52:08,507 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 16:52:11,763 - WARNING - Command returned empty output
+2025-03-07 16:52:11,763 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 16:52:11,763 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 16:52:11,763 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': 'composite'}
+2025-03-07 16:52:11,763 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "composite"
+2025-03-07 16:52:11,764 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 16:52:14,972 - WARNING - Command returned empty output
+2025-03-07 16:52:14,972 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 16:52:14,972 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 16:52:57,421 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': 'G16'}
+2025-03-07 16:52:57,422 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "G16" "g16"
+2025-03-07 16:52:57,422 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 16:53:00,726 - INFO - Command output (first 200 chars): [{"satellite_ID": "G16", "band": "Not Available", "coverage": null, "ingest_source": "inge GLM GRB-R v1.0.0 : grbdelta.ssec.wisc.edu", "instrument": "GLM", "section": null, "start_time": "2025-03-07T0...
+2025-03-07 16:53:00,745 - INFO - Successfully parsed JSON data: 8668 records found
+2025-03-07 16:53:00,918 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': 'G16'}
+2025-03-07 16:53:00,918 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "G16" "g16"
+2025-03-07 16:53:00,918 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 16:53:04,131 - INFO - Command output (first 200 chars): [{"satellite_ID": "G16", "band": "Not Available", "coverage": null, "ingest_source": "inge GLM GRB-R v1.0.0 : grbdelta.ssec.wisc.edu", "instrument": "GLM", "section": null, "start_time": "2025-03-07T0...
+2025-03-07 16:53:04,149 - INFO - Successfully parsed JSON data: 8668 records found
+2025-03-07 17:02:04,459 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': '4B', 'instrument': 'GIIRS'}
+2025-03-07 17:02:04,459 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "4B" --instrument "GIIRS"
+2025-03-07 17:02:04,459 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:02:07,843 - INFO - Command output (first 200 chars): [{"satellite_ID": "4B", "band": null, "coverage": null, "ingest_source": null, "instrument": "GIIRS", "section": null, "start_time": "2025-03-07T00:03:28+00:00", "latency": 776.71}, {"satellite_ID": "...
+2025-03-07 17:02:07,844 - INFO - Successfully parsed JSON data: 77 records found
+2025-03-07 17:02:07,863 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': '4B', 'instrument': 'GIIRS'}
+2025-03-07 17:02:07,863 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "4B" --instrument "GIIRS"
+2025-03-07 17:02:07,863 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:02:10,832 - INFO - Command output (first 200 chars): [{"satellite_ID": "4B", "band": null, "coverage": null, "ingest_source": null, "instrument": "GIIRS", "section": null, "start_time": "2025-03-07T00:03:28+00:00", "latency": 776.71}, {"satellite_ID": "...
+2025-03-07 17:02:10,832 - INFO - Successfully parsed JSON data: 77 records found
+2025-03-07 17:02:15,169 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': 'G16', 'instrument': 'ABI'}
+2025-03-07 17:02:15,169 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "G16" "g16" --instrument "ABI"
+2025-03-07 17:02:15,170 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:02:18,196 - INFO - Command output (first 200 chars): [{"satellite_ID": "G16", "band": "1", "coverage": "Mesoscale-1", "ingest_source": "inge ABI GRB-R v1.0.0 : grbdelta.ssec.wisc.edu", "instrument": "ABI", "section": null, "start_time": "2025-03-07T00:0...
+2025-03-07 17:02:18,211 - INFO - Successfully parsed JSON data: 7311 records found
+2025-03-07 17:02:18,369 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': 'G16', 'instrument': 'ABI'}
+2025-03-07 17:02:18,369 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "G16" "g16" --instrument "ABI"
+2025-03-07 17:02:18,369 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:02:21,397 - INFO - Command output (first 200 chars): [{"satellite_ID": "G16", "band": "1", "coverage": "Mesoscale-1", "ingest_source": "inge ABI GRB-R v1.0.0 : grbdelta.ssec.wisc.edu", "instrument": "ABI", "section": null, "start_time": "2025-03-07T00:0...
+2025-03-07 17:02:21,411 - INFO - Successfully parsed JSON data: 7311 records found
+2025-03-07 17:06:17,380 - INFO - Expanded satellite ID 4B to variants: ['4B']
+2025-03-07 17:06:17,380 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['4B'], 'instrument': 'GIIRS'}
+2025-03-07 17:06:17,380 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "4B" --instrument "GIIRS"
+2025-03-07 17:06:17,381 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:06:20,651 - INFO - Command output (first 200 chars): [{"satellite_ID": "4B", "band": null, "coverage": null, "ingest_source": null, "instrument": "GIIRS", "section": null, "start_time": "2025-03-07T00:03:28+00:00", "latency": 776.71}, {"satellite_ID": "...
+2025-03-07 17:06:20,651 - INFO - Successfully parsed JSON data: 77 records found
+2025-03-07 17:06:20,671 - INFO - Expanded satellite ID 4B to variants: ['4B']
+2025-03-07 17:06:20,671 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['4B'], 'instrument': 'GIIRS'}
+2025-03-07 17:06:20,671 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "4B" --instrument "GIIRS"
+2025-03-07 17:06:20,671 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:06:23,696 - INFO - Command output (first 200 chars): [{"satellite_ID": "4B", "band": null, "coverage": null, "ingest_source": null, "instrument": "GIIRS", "section": null, "start_time": "2025-03-07T00:03:28+00:00", "latency": 776.71}, {"satellite_ID": "...
+2025-03-07 17:06:23,697 - INFO - Successfully parsed JSON data: 77 records found
+2025-03-07 17:06:34,484 - INFO - Expanded satellite ID G16 to variants: ['G16', 'g16']
+2025-03-07 17:06:34,484 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['G16', 'g16'], 'instrument': 'ABI'}
+2025-03-07 17:06:34,484 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "G16" "g16" --instrument "ABI"
+2025-03-07 17:06:34,484 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:06:37,804 - INFO - Command output (first 200 chars): [{"satellite_ID": "G16", "band": "1", "coverage": "Mesoscale-1", "ingest_source": "inge ABI GRB-R v1.0.0 : grbdelta.ssec.wisc.edu", "instrument": "ABI", "section": null, "start_time": "2025-03-07T00:0...
+2025-03-07 17:06:37,819 - INFO - Successfully parsed JSON data: 7311 records found
+2025-03-07 17:06:37,962 - INFO - Expanded satellite ID G16 to variants: ['G16', 'g16']
+2025-03-07 17:06:37,962 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['G16', 'g16'], 'instrument': 'ABI'}
+2025-03-07 17:06:37,962 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "G16" "g16" --instrument "ABI"
+2025-03-07 17:06:37,962 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:06:41,024 - INFO - Command output (first 200 chars): [{"satellite_ID": "G16", "band": "1", "coverage": "Mesoscale-1", "ingest_source": "inge ABI GRB-R v1.0.0 : grbdelta.ssec.wisc.edu", "instrument": "ABI", "section": null, "start_time": "2025-03-07T00:0...
+2025-03-07 17:06:41,039 - INFO - Successfully parsed JSON data: 7311 records found
+2025-03-07 17:07:56,791 - INFO - Expanded satellite ID NOAA-20 to variants: ['NOAA-20', 'n20']
+2025-03-07 17:07:56,791 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['NOAA-20', 'n20'], 'instrument': 'VIIRS'}
+2025-03-07 17:07:56,791 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "NOAA-20" "n20" --instrument "VIIRS"
+2025-03-07 17:07:56,791 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:08:00,425 - WARNING - Command returned empty output
+2025-03-07 17:08:00,425 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 17:08:00,425 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 17:08:00,426 - INFO - Expanded satellite ID NOAA-20 to variants: ['NOAA-20', 'n20']
+2025-03-07 17:08:00,426 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['NOAA-20', 'n20'], 'instrument': 'VIIRS'}
+2025-03-07 17:08:00,426 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "NOAA-20" "n20" --instrument "VIIRS"
+2025-03-07 17:08:00,426 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:08:03,402 - WARNING - Command returned empty output
+2025-03-07 17:08:03,402 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 17:08:03,402 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 17:08:27,679 - INFO - Expanded satellite ID NOAA-19 to variants: ['NOAA-19', 'n19']
+2025-03-07 17:08:27,679 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['NOAA-19', 'n19'], 'instrument': 'amsu'}
+2025-03-07 17:08:27,679 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "NOAA-19" "n19" --instrument "amsu"
+2025-03-07 17:08:27,679 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:08:30,979 - WARNING - Command returned empty output
+2025-03-07 17:08:30,979 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 17:08:30,979 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 17:08:30,979 - INFO - Expanded satellite ID NOAA-19 to variants: ['NOAA-19', 'n19']
+2025-03-07 17:08:30,979 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['NOAA-19', 'n19'], 'instrument': 'amsu'}
+2025-03-07 17:08:30,979 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "NOAA-19" "n19" --instrument "amsu"
+2025-03-07 17:08:30,980 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:08:34,271 - WARNING - Command returned empty output
+2025-03-07 17:08:34,271 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 17:08:34,271 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 17:09:42,447 - INFO - Expanded satellite ID NOAA-18 to variants: ['NOAA-18']
+2025-03-07 17:09:42,447 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['NOAA-18'], 'instrument': 'amsu'}
+2025-03-07 17:09:42,447 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "NOAA-18" --instrument "amsu"
+2025-03-07 17:09:42,447 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:09:45,528 - WARNING - Command returned empty output
+2025-03-07 17:09:45,528 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 17:09:45,528 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 17:09:45,529 - INFO - Expanded satellite ID NOAA-18 to variants: ['NOAA-18']
+2025-03-07 17:09:45,529 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['NOAA-18'], 'instrument': 'amsu'}
+2025-03-07 17:09:45,529 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "NOAA-18" --instrument "amsu"
+2025-03-07 17:09:45,529 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:09:48,515 - WARNING - Command returned empty output
+2025-03-07 17:09:48,515 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 17:09:48,515 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 17:11:02,269 - INFO - Expanded satellite ID NOAA-15 to variants: ['NOAA-15']
+2025-03-07 17:11:02,269 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['NOAA-15'], 'instrument': 'amsu'}
+2025-03-07 17:11:02,269 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "NOAA-15" --instrument "amsu"
+2025-03-07 17:11:02,269 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:11:05,306 - WARNING - Command returned empty output
+2025-03-07 17:11:05,306 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 17:11:05,306 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 17:11:05,306 - INFO - Expanded satellite ID NOAA-15 to variants: ['NOAA-15']
+2025-03-07 17:11:05,307 - INFO - Data request - Period: 2025-03-07T00:00:00 to 2025-03-07T23:59:59, Filters: {'satellite-id': ['NOAA-15'], 'instrument': 'amsu'}
+2025-03-07 17:11:05,307 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-07T00:00:00' --until '2025-03-07T23:59:59' --output-type json --satellite-id "NOAA-15" --instrument "amsu"
+2025-03-07 17:11:05,307 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:11:08,218 - WARNING - Command returned empty output
+2025-03-07 17:11:08,218 - ERROR - Failed to parse JSON output: Expecting value: line 1 column 1 (char 0)
+2025-03-07 17:11:08,219 - ERROR - Raw output (first 500 chars): ...
+2025-03-07 17:11:37,274 - INFO - Expanded satellite ID NOAA-20 to variants: ['NOAA-20', 'n20']
+2025-03-07 17:11:37,274 - INFO - Data request - Period: 2025-03-06T00:00:00 to 2025-03-06T23:59:59, Filters: {'satellite-id': ['NOAA-20', 'n20'], 'instrument': 'VIIRS'}
+2025-03-07 17:11:37,274 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-06T00:00:00' --until '2025-03-06T23:59:59' --output-type json --satellite-id "NOAA-20" "n20" --instrument "VIIRS"
+2025-03-07 17:11:37,274 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:11:41,744 - INFO - Command output (first 200 chars): [{"satellite_ID": "n20", "band": null, "coverage": null, "ingest_source": null, "instrument": "VIIRS", "section": null, "start_time": "2025-03-06T01:28:05+00:00", "latency": 8949.698}, {"satellite_ID"...
+2025-03-07 17:11:41,744 - INFO - Successfully parsed JSON data: 22 records found
+2025-03-07 17:11:41,762 - INFO - Expanded satellite ID NOAA-20 to variants: ['NOAA-20', 'n20']
+2025-03-07 17:11:41,762 - INFO - Data request - Period: 2025-03-06T00:00:00 to 2025-03-06T23:59:59, Filters: {'satellite-id': ['NOAA-20', 'n20'], 'instrument': 'VIIRS'}
+2025-03-07 17:11:41,762 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-06T00:00:00' --until '2025-03-06T23:59:59' --output-type json --satellite-id "NOAA-20" "n20" --instrument "VIIRS"
+2025-03-07 17:11:41,762 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:11:45,033 - INFO - Command output (first 200 chars): [{"satellite_ID": "n20", "band": null, "coverage": null, "ingest_source": null, "instrument": "VIIRS", "section": null, "start_time": "2025-03-06T01:28:05+00:00", "latency": 8949.698}, {"satellite_ID"...
+2025-03-07 17:11:45,033 - INFO - Successfully parsed JSON data: 22 records found
+2025-03-07 17:21:25,901 - INFO - Expanded satellite ID composite to variants: ['composite']
+2025-03-07 17:21:25,901 - INFO - Data request - Period: 2025-03-08T00:00:00 to 2025-03-08T23:59:59, Filters: {'satellite-id': ['composite'], 'instrument': 'VIIRS'}
+2025-03-07 17:21:25,901 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-08T00:00:00' --until '2025-03-08T23:59:59' --output-type json --satellite-id "composite" --instrument "VIIRS"
+2025-03-07 17:21:25,902 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:21:28,954 - ERROR - Command failed with exit code 1: Traceback (most recent call last):
+  File "/home/oper/.mdrexler_conda/bin/sat_latency_interface", line 8, in <module>
+    sys.exit(main())
+  File "/home/oper/.mdrexler_conda/lib/python3.9/site-packages/sat_latency/_utils.py", line 50, in wrapper
+    return function(*args, **kwargs)
+  File "/home/oper/.mdrexler_conda/lib/python3.9/site-packages/sat_latency/interface.py", line 285, in main
+    df = satellite_data_from_filters(
+  File "/home/oper/.mdrexler_conda/lib/python3.9/site-packages/sat_latency/interface.py", line 266, in satellite_data_from_filters
+    df = read_satellite_data(
+  File "/home/oper/.mdrexler_conda/lib/python3.9/site-packages/sat_latency/pipeline/load.py", line 101, in read_satellite_data
+    tbl = pa.Table.from_batches(
+  File "pyarrow/table.pxi", line 4760, in pyarrow.lib.Table.from_batches
+ValueError: Must pass schema, or at least one RecordBatch
+
+2025-03-07 17:21:28,955 - INFO - Expanded satellite ID composite to variants: ['composite']
+2025-03-07 17:21:28,955 - INFO - Data request - Period: 2025-03-08T00:00:00 to 2025-03-08T23:59:59, Filters: {'satellite-id': ['composite'], 'instrument': 'VIIRS'}
+2025-03-07 17:21:28,955 - INFO - Running command: module load miniconda/3.6-base && source activate ~/.mdrexler_conda && sat_latency_interface -d /data/sat_latency --from '2025-03-08T00:00:00' --until '2025-03-08T23:59:59' --output-type json --satellite-id "composite" --instrument "VIIRS"
+2025-03-07 17:21:28,955 - INFO - Executing: sudo -u oper -i /tmp/run_sat_latency.sh
+2025-03-07 17:21:31,907 - ERROR - Command failed with exit code 1: Traceback (most recent call last):
+  File "/home/oper/.mdrexler_conda/bin/sat_latency_interface", line 8, in <module>
+    sys.exit(main())
+  File "/home/oper/.mdrexler_conda/lib/python3.9/site-packages/sat_latency/_utils.py", line 50, in wrapper
+    return function(*args, **kwargs)
+  File "/home/oper/.mdrexler_conda/lib/python3.9/site-packages/sat_latency/interface.py", line 285, in main
+    df = satellite_data_from_filters(
+  File "/home/oper/.mdrexler_conda/lib/python3.9/site-packages/sat_latency/interface.py", line 266, in satellite_data_from_filters
+    df = read_satellite_data(
+  File "/home/oper/.mdrexler_conda/lib/python3.9/site-packages/sat_latency/pipeline/load.py", line 101, in read_satellite_data
+    tbl = pa.Table.from_batches(
+  File "pyarrow/table.pxi", line 4760, in pyarrow.lib.Table.from_batches
+ValueError: Must pass schema, or at least one RecordBatch
+
diff --git a/logs/debug.log b/logs/debug.log
new file mode 100644
index 0000000..e69de29
-- 
GitLab