diff --git a/frontend/Dockerfile b/frontend/Dockerfile index ea8e5cd..b5f6445 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -2,7 +2,7 @@ FROM python:3.10-slim # Install Python dependencies -RUN pip install Flask requests tabulate qrcode +RUN pip install Flask requests tabulate pdf417 qrcode WORKDIR /app/static RUN mkdir fonts diff --git a/frontend/main.py b/frontend/main.py index 1ee5ff1..c4cf3fc 100644 --- a/frontend/main.py +++ b/frontend/main.py @@ -1,17 +1,9 @@ import os import requests import subprocess -import json -import zlib -import base64 -import qrcode -from io import StringIO -from flask import Flask, render_template_string, jsonify, request +from flask import Flask, render_template_string, jsonify, request, send_from_directory from socket import gethostbyname_ex from datetime import datetime -from tabulate import tabulate # For ASCII tables -import time # For timestamp conversion -import uuid # For report IDs app = Flask(__name__) @@ -25,16 +17,12 @@ TRACKING_METRICS_ORDER = [ "Last offset", "RMS offset", "Frequency", "Residual freq", "Skew", "Root delay", "Root dispersion", "Update interval", "Leap status" ] -# Define max widths for tracking table columns (approximate) -TRACKING_COL_WIDTHS = [18] + [24] * 3 # Metric Name + 3 Nodes # Sources table config SOURCES_COLUMNS_ORDER = [ "DWS PEER", "ModeState", "Name/IP address", "Stratum", "Poll", "Reach", "LastRx", "Last sample", "Std Dev" ] -# Define widths for sources table columns -SOURCES_COL_WIDTHS = [24, 10, 32, 10, 7, 7, 8, 15, 10] # Metric Definitions TRACKING_METRICS_DEFS = { @@ -67,142 +55,866 @@ SOURCES_METRICS_DEFS = { } # -# HTML Template - Radically simplified for TUI output +# HTML Template - DWS Design System Compliant # HTML_TEMPLATE = """
+
-{{ report_header }}
+
+
+
+ CATHODE
+
+
+
-SECTION 1: CURRENT TIME SYNCHRONIZATION
-════════════════════════════════════════════════════════════════════════════════
-TIME: --:--:--
-DATE: ----------
-STATUS: Syncing...
-CLOCK OFFSET: ---
-════════════════════════════════════════════════════════════════════════════════
+
+
+
+
+ DWS LLC NTP STATUS
+
+ GENERATED: {{ gen_time_utc }}
+
+
+
+
+ Section 1: Current Time Synchronization
+
+
+
+ TIME:
+ --:--:--
+
+
+ DATE:
+ ----------
+
+
+ STATUS:
+ Syncing...
+
+
+ CLOCK OFFSET:
+ ---
+
+
+
+
-SECTION 2: NODE TRACKING STATUS METRICS
-════════════════════════════════════════════════════════════════════════════════
+
+
+ Section 2: Node Tracking Status Metrics
+
+ {{ tracking_table_html | safe }}
+
+
-{{ tracking_table_ascii }}
+
+
+ Section 3: Upstream NTP Sources
+
+ {{ sources_table_html | safe }}
+
+
-════════════════════════════════════════════════════════════════════════════════
+
+
+ Section 4: Metric Definitions & Developer Information
+
+ Tracking Metrics Glossary
+
+ {{ tracking_glossary_html | safe }}
+
+ Sources Metrics Glossary
+
+ {{ sources_glossary_html | safe }}
+
-SECTION 3: UPSTREAM NTP SOURCES
-════════════════════════════════════════════════════════════════════════════════
+ Usage Information
+
+ Use DWS as your NTP pool by setting time.dws.rip as your NTP source.
+
-{{ sources_table_ascii }}
+
+ DWS LLC // "IT'S YOUR INTERNET, TAKE IT BACK" // https://dws.rip
+ DWS LLC // UNITED STATES OF AMERICA // 2025
+
+
+
-════════════════════════════════════════════════════════════════════════════════
-
-
-SECTION 4: METRIC DEFINITIONS & DEVELOPER INFORMATION
-════════════════════════════════════════════════════════════════════════════════
-
-TRACKING METRICS GLOSSARY:
-{{ tracking_glossary }}
-
-SOURCES METRICS GLOSSARY:
-{{ sources_glossary }}
-
-USAGE INFORMATION:
-USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
-
-DWS LLC // "IT'S YOUR INTERNET, TAKE IT BACK" // https://dws.rip
-DWS LLC // UNITED STATES OF AMERICA // 2025
-
-REPORT SNAPSHOT:
-Scan the QR code below to access this exact report state:
-
-{{ report_qr_code }}
-
-════════════════════════════════════════════════════════════════════════════════
-REPORT GENERATION COMPLETE {{ gen_time_utc }}
-END OF REPORT
-════════════════════════════════════════════════════════════════════════════════
-
+
+
+
+
@@ -309,23 +1003,6 @@ END OF REPORT
"""
-# --- US Graphics Company Style Helpers ---
-def form_feed_separator(width=80):
- """Generate a form feed separator line like vintage computer printouts."""
- return "═" * width
-
-def report_header(report_id, timestamp):
- """Generate a vintage-style report header."""
- header = []
- header.append(form_feed_separator(80))
- header.append(f"GENERATED: {timestamp}".center(80))
- header.append(form_feed_separator(80))
- return "\n".join(header)
-
-def section_separator(title, width=80):
- """Generate a section separator with title."""
- return f"\n{title}\n{form_feed_separator(width)}\n"
-
def get_reporter_ips(service_name):
try: _, _, ips = gethostbyname_ex(service_name); return ips
except Exception as e: print(f"Error resolving service IPs: {e}"); return []
@@ -333,13 +1010,11 @@ def get_reporter_ips(service_name):
# --- NEW: Helper to convert Ref time (UTC) ---
def format_ref_time(timestamp_str):
try:
- # Input is like "1761142077.558355643" (Unix timestamp with fractions)
ts = float(timestamp_str)
dt = datetime.utcfromtimestamp(ts)
- # Format like: Wed Oct 22 14:08:24 2025 (UTC)
return dt.strftime('%a %b %d %H:%M:%S %Y') + " (UTC)"
except:
- return timestamp_str # Return original if conversion fails
+ return timestamp_str
# --- NEW: Helper to format floats nicely ---
def format_float(value_str, precision=3):
@@ -347,82 +1022,29 @@ def format_float(value_str, precision=3):
f_val = float(value_str)
return f"{f_val:.{precision}f}"
except:
- return value_str # Return original if not a float
+ return value_str
-# --- Report ID Encoding/Decoding ---
-def encode_state(fragments):
- """Encode NTP state into a URL-safe base64 compressed report ID."""
- try:
- # Create minimal state representation
- state = {
- "timestamp": datetime.utcnow().isoformat() + "Z",
- "fragments": fragments
- }
- # Convert to JSON, compress with zlib, then base64 encode
- json_str = json.dumps(state, separators=(',', ':')) # Compact JSON
- compressed = zlib.compress(json_str.encode('utf-8'), level=9)
- encoded = base64.urlsafe_b64encode(compressed).decode('ascii')
- return encoded
- except Exception as e:
- print(f"Error encoding state: {e}")
- return None
-def decode_state(report_id):
- """Decode a report ID back into NTP state fragments."""
- try:
- # Base64 decode, decompress, parse JSON
- compressed = base64.urlsafe_b64decode(report_id.encode('ascii'))
- json_str = zlib.decompress(compressed).decode('utf-8')
- state = json.loads(json_str)
- return state.get("fragments", []), state.get("timestamp")
- except Exception as e:
- print(f"Error decoding state: {e}")
- return None, None
-def generate_qr_code_ascii(url):
- """Generate ASCII QR code for the given URL."""
- try:
- qr = qrcode.QRCode(
- version=1, # Small version, will auto-increase if needed
- error_correction=qrcode.constants.ERROR_CORRECT_L,
- box_size=1,
- border=2,
- )
- qr.add_data(url)
- qr.make(fit=True)
-
- # Get the QR code matrix
- matrix = qr.get_matrix()
-
- # Convert to ASCII art using block characters
- ascii_qr = []
- for row in matrix:
- line = ""
- for cell in row:
- # Use full block for black, space for white
- line += "██" if cell else " "
- ascii_qr.append(line)
-
- return "\n".join(ascii_qr)
- except Exception as e:
- print(f"Error generating QR code: {e}")
- return f"[QR Code generation failed: {e}]"
+# --- Flask Static Files Route ---
+@app.route('/static/fonts/')
+def serve_font(filename):
+ """Serve font files from the fonts directory."""
+ return send_from_directory('fonts', filename)
@app.route('/api/time')
def get_server_time():
return jsonify({"time_utc": datetime.utcnow().isoformat() + "Z"})
-# --- NEW: Endpoint to just return the raw fragments ---
-# The JavaScript will use this to get the latest offset data
@app.route('/api/fragments')
def get_fragments_json():
fragments = []
ips = get_reporter_ips(REPORTER_SERVICE)
for ip in ips:
try:
- res = requests.get(f"http://{ip}:9898/fragment.json", timeout=1) # Shorter timeout
+ res = requests.get(f"http://{ip}:9898/fragment.json", timeout=1)
if res.status_code == 200: fragments.append(res.json())
- except: pass # Ignore errors fetching fragments for this endpoint
+ except: pass
fragments.sort(key=lambda x: x.get("node_id", "z"))
return jsonify(fragments)
@@ -442,142 +1064,182 @@ def format_glossary(metrics_defs):
lines.append(f" {metric.ljust(20)} - {definition}")
return "\n".join(lines)
-def render_report(fragments, report_id, gen_time, is_historical=False):
- """Render NTP report from fragments data."""
- error_msg = "No errors."
- meta_offset_ms = "N/A"
- meta_leap_status = "Unknown"
+def format_glossary_html(metrics_defs):
+ """Format metric definitions as HTML definition list."""
+ html_parts = []
+ for metric, definition in metrics_defs.items():
+ html_parts.append(f"{metric} ")
+ html_parts.append(f"{definition} ")
+ return "\n".join(html_parts)
- nodes_list = [f.get("node_id", "unknown") for f in fragments]
+def generate_tracking_table_html(fragments, nodes_list):
+ """Generate HTML table for tracking metrics."""
+ if not fragments:
+ return 'ERROR: Could not fetch data from any reporter pods.
'
- # Generate QR code for the report URL
- report_url = f"{BASE_URL}/report/{report_id}"
- report_qr_code = generate_qr_code_ascii(report_url)
+ html = ['']
- # Generate ASCII Tracking Table with tabulate
- tracking_rows = []
+ # Table header
+ html.append('')
+ html.append('Metric ')
+ for node_id in nodes_list:
+ html.append(f'{node_id} ')
+ html.append(' ')
+
+ # Table body
+ html.append('')
for metric in TRACKING_METRICS_ORDER:
- row = [metric]
+ html.append('')
+ html.append(f'{metric} ')
+
for node_id in nodes_list:
node_data = next((f for f in fragments if f.get("node_id") == node_id), None)
value = "N/A"
if node_data and isinstance(node_data.get("tracking"), dict):
raw_value = node_data["tracking"].get(metric, "N/A")
- # Format specific fields for better readability
- if metric == "Ref time (UTC)": value = format_ref_time(raw_value)
- elif metric in ["System time", "Last offset", "RMS offset", "Residual freq", "Skew", "Root delay", "Root dispersion"]: value = format_float(raw_value, 6) # Higher precision
- elif metric == "Frequency": value = format_float(raw_value, 3)
- elif metric == "Update interval": value = format_float(raw_value, 1)
- else: value = format_value(raw_value) # Use generic formatter
- row.append(value)
- tracking_rows.append(row)
+ if metric == "Ref time (UTC)":
+ value = format_ref_time(raw_value)
+ elif metric in ["System time", "Last offset", "RMS offset", "Residual freq", "Skew", "Root delay", "Root dispersion"]:
+ value = format_float(raw_value, 6)
+ elif metric == "Frequency":
+ value = format_float(raw_value, 3)
+ elif metric == "Update interval":
+ value = format_float(raw_value, 1)
+ else:
+ value = format_value(raw_value)
- # Add summary row
- tracking_rows.append([form_feed_separator(18)] + [form_feed_separator(24)] * len(nodes_list))
- tracking_rows.append([f"TOTAL NODES: {len(nodes_list)}"] + [""] * len(nodes_list))
+ html.append(f'{value} ')
+ html.append(' ')
- tracking_table_ascii = tabulate(
- tracking_rows,
- headers=["Metric"] + nodes_list,
- tablefmt="fancy_grid",
- stralign="left",
- numalign="right"
- )
+ html.append('')
+ # Table footer with summary
+ html.append('')
+ html.append('')
+ html.append(f'')
+ html.append(f'TOTAL NODES: {len(nodes_list)}')
+ html.append(' ')
+ html.append(' ')
+ html.append('')
+
+ html.append('
')
+ return '\n'.join(html)
+
+def generate_sources_table_html(fragments):
+ """Generate HTML table for NTP sources."""
+ if not fragments:
+ return 'ERROR: Could not fetch data from any reporter pods.
'
+
+ html = ['']
+
+ # Table header
+ html.append('')
+ for col in SOURCES_COLUMNS_ORDER:
+ if col in ["Stratum", "Poll", "Reach", "LastRx", "Last sample", "Std Dev"]:
+ html.append(f'{col} ')
+ else:
+ html.append(f'{col} ')
+ html.append(' ')
+
+ # Table body
+ html.append('')
+
+ node_source_counts = {}
+ for f in fragments:
+ node_id = f.get("node_id", "unknown")
+ sources = f.get("sources", [])
+ node_source_counts[node_id] = len(sources) if sources else 0
+
+ if not sources:
+ html.append('')
+ html.append(f'{node_id} ')
+ html.append('N/A ')
+ html.append('No sources reported ')
+ html.append(' ')
+ else:
+ for source in sources:
+ html.append('')
+ html.append(f'{format_value(node_id, 24)} ')
+ html.append(f'{source.get("Mode", "?")}{source.get("State", "?")} ')
+ html.append(f'{format_value(source.get("Name/IP address", "N/A"), 32)} ')
+ html.append(f'{format_value(source.get("Stratum", "N/A"))} ')
+ html.append(f'{format_value(source.get("Poll", "N/A"))} ')
+ html.append(f'{format_value(source.get("Reach", "N/A"))} ')
+ html.append(f'{format_value(source.get("LastRx", "N/A"))} ')
+ html.append(f'{format_float(source.get("Last sample", "N/A"), 6)} ')
+ html.append(f'{format_float(source.get("Std Dev", "N/A"), 3)} ')
+ html.append(' ')
+
+ html.append('')
+
+ # Table footer with summary
+ total_sources = sum(node_source_counts.values())
+ html.append('')
+ html.append('')
+ html.append(f'')
+ html.append(f'TOTAL SOURCES: {total_sources} | ')
+ html.append(f'NODES REPORTING: {len(node_source_counts)}')
+ html.append(' ')
+ html.append(' ')
+ html.append('')
+
+ html.append('
')
+ return '\n'.join(html)
+
+
+def render_report(fragments, gen_time, is_historical=False):
+ """Render NTP report from fragments data."""
+ meta_offset_ms = "N/A"
+ meta_leap_status = "Unknown"
+
+ nodes_list = [f.get("node_id", "unknown") for f in fragments]
+
+ # Calculate metadata for meta description
total_offset_seconds = 0.0
valid_offset_count = 0
leap_statuses = set()
- # Generate Meta Description Summary
if fragments:
for frag in fragments:
tracking = frag.get("tracking", {})
if isinstance(tracking, dict) and "Error" not in tracking:
- # Collect Leap Status
leap = tracking.get("Leap status")
if leap:
leap_statuses.add(leap)
- # Collect Offset
offset_str = tracking.get("Last offset", 0.1)
try:
offset_seconds = float(offset_str)
total_offset_seconds += offset_seconds
valid_offset_count += 1
except (TypeError, ValueError):
- pass # Ignore if offset is missing or not a number
+ pass
if valid_offset_count > 0:
avg_offset_seconds = total_offset_seconds / valid_offset_count
- meta_offset_ms = f"~{(avg_offset_seconds * 1000):.1f}ms" # Use ~ for average
+ meta_offset_ms = f"~{(avg_offset_seconds * 1000):.1f}ms"
if len(leap_statuses) == 1:
meta_leap_status = leap_statuses.pop()
elif len(leap_statuses) > 1:
meta_leap_status = "Mixed"
- # else remains "Unknown" if no valid status found
- # Generate ASCII Sources Table with tabulate
- if not fragments:
- sources_table_ascii = "ERROR: Could not fetch data from any reporter pods."
- else:
- sources_rows = []
- node_source_counts = {}
+ # Generate HTML tables
+ tracking_table_html = generate_tracking_table_html(fragments, nodes_list)
+ sources_table_html = generate_sources_table_html(fragments)
- for f in fragments:
- node_id = f.get("node_id", "unknown")
- sources = f.get("sources", [])
- node_source_counts[node_id] = len(sources) if sources else 0
-
- if not sources:
- sources_rows.append([node_id, "N/A", "No sources reported", "N/A", "N/A", "N/A", "N/A", "N/A", "N/A"])
- else:
- for source in sources:
- row = [
- format_value(node_id, SOURCES_COL_WIDTHS[0]-2),
- f"{source.get('Mode', '?')}{source.get('State', '?')}",
- format_value(source.get("Name/IP address", "N/A"), SOURCES_COL_WIDTHS[2]-2),
- format_value(source.get("Stratum", "N/A")),
- format_value(source.get("Poll", "N/A")),
- format_value(source.get("Reach", "N/A")),
- format_value(source.get("LastRx", "N/A")),
- format_float(source.get("Last sample", "N/A"), 6), # Format sample offset
- format_float(source.get("Std Dev", "N/A"), 3) # Format Std Dev/Jitter
- ]
- sources_rows.append(row)
-
- # Add summary section
- sources_rows.append([form_feed_separator(SOURCES_COL_WIDTHS[0]-2)] + [form_feed_separator(w-2) for w in SOURCES_COL_WIDTHS[1:]])
- total_sources = sum(node_source_counts.values())
- summary_text = f"TOTAL SOURCES: {total_sources} | NODES REPORTING: {len(node_source_counts)}"
- sources_rows.append([summary_text] + [""] * (len(SOURCES_COLUMNS_ORDER) - 1))
-
- sources_table_ascii = tabulate(
- sources_rows,
- headers=SOURCES_COLUMNS_ORDER,
- tablefmt="fancy_grid",
- stralign="left",
- numalign="right"
- )
-
- # Generate report header
- report_header_text = report_header(report_id, gen_time)
-
- # Format glossaries
- tracking_glossary = format_glossary(TRACKING_METRICS_DEFS)
- sources_glossary = format_glossary(SOURCES_METRICS_DEFS)
+ # Generate HTML glossaries
+ tracking_glossary_html = format_glossary_html(TRACKING_METRICS_DEFS)
+ sources_glossary_html = format_glossary_html(SOURCES_METRICS_DEFS)
return render_template_string(
HTML_TEMPLATE,
gen_time_utc=gen_time,
- report_header=report_header_text,
- tracking_table_ascii=tracking_table_ascii,
- sources_table_ascii=sources_table_ascii,
- tracking_glossary=tracking_glossary,
- sources_glossary=sources_glossary,
- report_qr_code=report_qr_code,
+ tracking_table_html=tracking_table_html,
+ sources_table_html=sources_table_html,
+ tracking_glossary_html=tracking_glossary_html,
+ sources_glossary_html=sources_glossary_html,
is_historical=is_historical,
- meta_description=f"DWS NTP Pool: {meta_leap_status}. Avg Offset: {meta_offset_ms}.",
- error=error_msg
+ meta_description=f"DWS NTP Pool: {meta_leap_status}. Avg Offset: {meta_offset_ms}."
)
@app.route('/')
@@ -586,7 +1248,6 @@ def homepage():
fragments = []
ips = get_reporter_ips(REPORTER_SERVICE)
- # Fetch fragments from all reporter pods
for ip in ips:
try:
res = requests.get(f"http://{ip}:9898/fragment.json", timeout=2)
@@ -599,41 +1260,10 @@ def homepage():
fragments.sort(key=lambda x: x.get("node_id", "z"))
- # Generate timestamp and report ID
gen_time = subprocess.run(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"], capture_output=True, text=True).stdout.strip()
- # Create encoded report ID from current state
- report_id = encode_state(fragments)
- if not report_id:
- report_id = str(uuid.uuid4())[:8].upper() # Fallback to random ID
+ return render_report(fragments, gen_time)
- return render_report(fragments, report_id, gen_time)
-
-@app.route('/report/')
-def historical_report(report_id):
- """Display historical NTP report from encoded report ID."""
- fragments, timestamp = decode_state(report_id)
-
- if fragments is None:
- return render_template_string("""
-
-Invalid Report ID
-
-
-ERROR: INVALID REPORT ID
-
-The report ID you provided could not be decoded.
-Please check the URL and try again.
-
-Return to live status
-
-
- """), 400
-
- # Use the timestamp from the encoded state, or fallback to "HISTORICAL"
- gen_time = timestamp if timestamp else "HISTORICAL"
-
- return render_report(fragments, report_id, gen_time, is_historical=True)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080)
diff --git a/frontend/pyproject.toml b/frontend/pyproject.toml
index fecc023..e9775b0 100644
--- a/frontend/pyproject.toml
+++ b/frontend/pyproject.toml
@@ -6,4 +6,7 @@ readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"flask>=3.1.2",
+ "requests>=2.32.3",
+ "tabulate>=0.9.0",
+ "pdf417>=1.2.0",
]