640 lines
26 KiB
Python
640 lines
26 KiB
Python
import os
|
|
import requests
|
|
import subprocess
|
|
import json
|
|
import zlib
|
|
import base64
|
|
import qrcode
|
|
from io import StringIO
|
|
from flask import Flask, render_template_string, jsonify, request
|
|
from socket import gethostbyname_ex
|
|
from datetime import datetime
|
|
from tabulate import tabulate # For ASCII tables
|
|
import time # For timestamp conversion
|
|
import uuid # For report IDs
|
|
|
|
app = Flask(__name__)
|
|
|
|
# Config
|
|
REPORTER_SERVICE = os.environ.get("REPORTER_SERVICE", "ntp-reporter-svc.default.svc.cluster.local")
|
|
BASE_URL = os.environ.get("BASE_URL", "https://time.dws.rip")
|
|
|
|
# Tracking table config
|
|
TRACKING_METRICS_ORDER = [
|
|
"Reference ID", "Ref Source IP", "Stratum", "Ref time (UTC)", "System time",
|
|
"Last offset", "RMS offset", "Frequency", "Residual freq", "Skew",
|
|
"Root delay", "Root dispersion", "Update interval", "Leap status"
|
|
]
|
|
# Define max widths for tracking table columns (approximate)
|
|
TRACKING_COL_WIDTHS = [18] + [24] * 3 # Metric Name + 3 Nodes
|
|
|
|
# Sources table config
|
|
SOURCES_COLUMNS_ORDER = [
|
|
"DWS PEER", "ModeState", "Name/IP address", "Stratum", "Poll", "Reach",
|
|
"LastRx", "Last sample", "Std Dev"
|
|
]
|
|
# Define widths for sources table columns
|
|
SOURCES_COL_WIDTHS = [24, 10, 32, 10, 7, 7, 8, 15, 10]
|
|
|
|
# Metric Definitions
|
|
TRACKING_METRICS_DEFS = {
|
|
"Reference ID": "Identifier of current time source (IP or refclock ID)",
|
|
"Ref Source IP": "IP address of the reference time source",
|
|
"Stratum": "Distance from primary time source (lower is better, 1-16)",
|
|
"Ref time (UTC)": "Last time the reference was updated",
|
|
"System time": "Offset between system clock and reference time (seconds)",
|
|
"Last offset": "Offset of last clock update (seconds)",
|
|
"RMS offset": "Root mean square of recent offset values (long-term average)",
|
|
"Frequency": "Rate of system clock drift (ppm - parts per million)",
|
|
"Residual freq": "Residual frequency error not yet corrected",
|
|
"Skew": "Estimated error bound of frequency (accuracy metric)",
|
|
"Root delay": "Total network delay to stratum-1 server (seconds)",
|
|
"Root dispersion": "Total dispersion accumulated to stratum-1 server",
|
|
"Update interval": "Time between clock updates (seconds)",
|
|
"Leap status": "Leap second indicator (Normal, Insert, Delete, or Not synced)"
|
|
}
|
|
|
|
SOURCES_METRICS_DEFS = {
|
|
"DWS PEER": "Node identifier for this NTP daemon instance",
|
|
"ModeState": "Source mode (^=server, ==peer) & state (*=current sync)",
|
|
"Name/IP address": "Hostname or IP address of the NTP source",
|
|
"Stratum": "Stratum level of the source (1=primary reference)",
|
|
"Poll": "Polling interval to source (log2 seconds, e.g., 6 = 64s)",
|
|
"Reach": "Reachability register (377 octal = all 8 recent polls OK)",
|
|
"LastRx": "Time since last successful response from source",
|
|
"Last sample": "Offset measurement from last valid sample (seconds)",
|
|
"Std Dev": "Standard deviation of offset (jitter measurement)"
|
|
}
|
|
|
|
#
|
|
# HTML Template - Radically simplified for TUI output
|
|
#
|
|
HTML_TEMPLATE = """<!DOCTYPE html>
|
|
<html lang="en">
|
|
<head>
|
|
<meta charset="UTF-8">
|
|
<title>DWS LLC NTP STATUS</title>
|
|
<link href="https://fonts.googleapis.com/css2?family=Roboto+Mono:wght@400&display=swap" rel="stylesheet">
|
|
<style>
|
|
body {
|
|
background-color: #000;
|
|
color: #FFBF00; /* Amber */
|
|
font-size: 14px; /* Slightly smaller for density */
|
|
margin: 0;
|
|
padding: 1em;
|
|
overflow-x: auto; /* Allow horizontal scroll if needed */
|
|
}
|
|
pre {
|
|
margin: 0;
|
|
padding: 0;
|
|
white-space: pre; /* Render exactly as text */
|
|
line-height: 1.3; /* Adjust spacing slightly */
|
|
}
|
|
b { /* Style for **INFO** */
|
|
font-weight: normal; /* Monospace fonts often look bold already */
|
|
color: #FFFF00; /* Yellow for INFO */
|
|
}
|
|
/* Spans for the dynamic clock parts */
|
|
#clock-time, #clock-date, #clock-status, #clock-offset {
|
|
color: #FFFFFF; /* White for clock details */
|
|
}
|
|
#clock-time {
|
|
font-weight: bold;
|
|
}
|
|
|
|
@font-face {
|
|
font-family: 'BerkeleyMono';
|
|
src: url('/static/fonts/BerkeleyMono-Regular.woff2') format('woff2');
|
|
font-weight: 400;
|
|
font-style: normal;
|
|
}
|
|
@font-face {
|
|
font-family: 'BerkeleyMono';
|
|
src: url('/static/fonts/BerkeleyMono-Bold.woff2') format('woff2');
|
|
font-weight: 700;
|
|
font-style: normal;
|
|
}
|
|
@font-face {
|
|
font-family: 'BerkeleyMono';
|
|
src: url('/static/fonts/BerkeleyMono-Oblique.woff2') format('woff2');
|
|
font-weight: 400;
|
|
font-style: italic;
|
|
}
|
|
@font-face {
|
|
font-family: 'BerkeleyMono';
|
|
src: url('/static/fonts/BerkeleyMono-Bold-Oblique.woff2') format('woff2');
|
|
font-weight: 700;
|
|
font-style: italic;
|
|
}
|
|
body, pre {
|
|
font-family: 'BerkeleyMono', monospace;
|
|
}
|
|
</style>
|
|
<meta name="description" content="{{ meta_description }}">
|
|
</head>
|
|
<body>
|
|
<pre>
|
|
{{ report_header }}
|
|
|
|
<b>SECTION 1: CURRENT TIME SYNCHRONIZATION</b>
|
|
════════════════════════════════════════════════════════════════════════════════
|
|
TIME: <span id="clock-time">--:--:--</span>
|
|
DATE: <span id="clock-date">----------</span>
|
|
STATUS: <span id="clock-status">Syncing...</span>
|
|
CLOCK OFFSET: <span id="clock-offset">---</span>
|
|
════════════════════════════════════════════════════════════════════════════════
|
|
|
|
|
|
<b>SECTION 2: NODE TRACKING STATUS METRICS</b>
|
|
════════════════════════════════════════════════════════════════════════════════
|
|
|
|
{{ tracking_table_ascii }}
|
|
|
|
════════════════════════════════════════════════════════════════════════════════
|
|
|
|
|
|
<b>SECTION 3: UPSTREAM NTP SOURCES</b>
|
|
════════════════════════════════════════════════════════════════════════════════
|
|
|
|
{{ sources_table_ascii }}
|
|
|
|
════════════════════════════════════════════════════════════════════════════════
|
|
|
|
|
|
<b>SECTION 4: METRIC DEFINITIONS & DEVELOPER INFORMATION</b>
|
|
════════════════════════════════════════════════════════════════════════════════
|
|
|
|
<b>TRACKING METRICS GLOSSARY:</b>
|
|
{{ tracking_glossary }}
|
|
|
|
<b>SOURCES METRICS GLOSSARY:</b>
|
|
{{ sources_glossary }}
|
|
|
|
<b>USAGE INFORMATION:</b>
|
|
USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
|
|
|
|
DWS LLC // "IT'S YOUR INTERNET, TAKE IT BACK" // https://dws.rip
|
|
DWS LLC // UNITED STATES OF AMERICA // 2025
|
|
|
|
<b>REPORT SNAPSHOT:</b>
|
|
Scan the QR code below to access this exact report state:
|
|
|
|
<span style="font-size: 4px; line-height: 4px;">{{ report_qr_code }}</span>
|
|
|
|
════════════════════════════════════════════════════════════════════════════════
|
|
REPORT GENERATION COMPLETE {{ gen_time_utc }}
|
|
END OF REPORT
|
|
════════════════════════════════════════════════════════════════════════════════
|
|
</pre>
|
|
|
|
<script>
|
|
const clockTimeSpan = document.getElementById('clock-time');
|
|
const clockDateSpan = document.getElementById('clock-date');
|
|
const clockStatusSpan = document.getElementById('clock-status');
|
|
const clockOffsetSpan = document.getElementById('clock-offset');
|
|
const isHistorical = {{ 'true' if is_historical else 'false' }};
|
|
let serverTimeOffsetMs = null; // Offset between client and DWS server in milliseconds
|
|
let clockUpdateInterval = null;
|
|
let syncInterval = null;
|
|
|
|
function updateClock() {
|
|
if (serverTimeOffsetMs === null) return;
|
|
// Apply offset to client's current time to get DWS server time
|
|
const now = new Date(new Date().getTime() + serverTimeOffsetMs);
|
|
|
|
// Format time and date exactly as requested
|
|
const hours = String(now.getUTCHours()).padStart(2, '0');
|
|
const minutes = String(now.getUTCMinutes()).padStart(2, '0');
|
|
const seconds = String(now.getUTCSeconds()).padStart(2, '0');
|
|
const timeString = `${hours}:${minutes}:${seconds}`;
|
|
|
|
const dateString = now.toLocaleDateString('en-US', {
|
|
weekday: 'long', year: 'numeric', month: 'long', day: 'numeric', timeZone: 'UTC'
|
|
}) + " (UTC)";
|
|
|
|
clockTimeSpan.textContent = timeString;
|
|
clockDateSpan.textContent = dateString;
|
|
}
|
|
|
|
async function syncClockAndOffset() {
|
|
clockStatusSpan.textContent = "Syncing...";
|
|
clockOffsetSpan.textContent = "---";
|
|
try {
|
|
// Fetch current time from this server
|
|
const timeResponse = await fetch('/api/time');
|
|
if (!timeResponse.ok) throw new Error(`Time API Error ${timeResponse.status}`);
|
|
const timeData = await timeResponse.json();
|
|
const serverTime = new Date(timeData.time_utc).getTime();
|
|
const clientTime = new Date().getTime();
|
|
serverTimeOffsetMs = serverTime - clientTime; // Recalculate offset
|
|
|
|
// Fetch latest fragments to try and get *a* representative offset
|
|
// This is slightly tricky as we don't know which node served this page
|
|
// We'll just grab the first node's offset for now
|
|
const reportResponse = await fetch('/api/fragments'); // Need a new endpoint
|
|
if (!reportResponse.ok) throw new Error(`Fragment API Error ${reportResponse.status}`);
|
|
const fragments = await reportResponse.json();
|
|
|
|
let reportedOffset = "N/A";
|
|
if (fragments && fragments.length > 0 && fragments[0].tracking && fragments[0].tracking['Last offset']) {
|
|
// Try converting the offset string (e.g., "+0.000069149") to milliseconds
|
|
try {
|
|
offsetSeconds = float(fragments[0].tracking['Last offset'])
|
|
reportedOffset = `${(offsetSeconds * 1000).toFixed(0)}ms`
|
|
} catch (e) { /* ignore parse error */ }
|
|
}
|
|
|
|
clockStatusSpan.textContent = "Synced";
|
|
// Display the offset calculated between *client* and DWS server
|
|
clockOffsetSpan.textContent = `${serverTimeOffsetMs * -1}ms (vs your clock)`;
|
|
|
|
// Start interval timer ONLY if it's not already running
|
|
if (!clockUpdateInterval) {
|
|
updateClock(); // Update immediately
|
|
clockUpdateInterval = setInterval(updateClock, 1000);
|
|
}
|
|
|
|
} catch (error) {
|
|
console.error('Error syncing time/offset:', error);
|
|
clockStatusSpan.textContent = `Sync Error`;
|
|
clockOffsetSpan.textContent = `---`;
|
|
serverTimeOffsetMs = 0; // Fallback to local time
|
|
if (!clockUpdateInterval) {
|
|
updateClock();
|
|
clockUpdateInterval = setInterval(updateClock, 1000);
|
|
}
|
|
}
|
|
}
|
|
|
|
function displayHistoricalTime(timestampStr) {
|
|
// Parse the ISO timestamp and display it statically
|
|
try {
|
|
const timestamp = new Date(timestampStr);
|
|
|
|
const hours = String(timestamp.getUTCHours()).padStart(2, '0');
|
|
const minutes = String(timestamp.getUTCMinutes()).padStart(2, '0');
|
|
const seconds = String(timestamp.getUTCSeconds()).padStart(2, '0');
|
|
const timeString = `${hours}:${minutes}:${seconds}`;
|
|
|
|
const dateString = timestamp.toLocaleDateString('en-US', {
|
|
weekday: 'long', year: 'numeric', month: 'long', day: 'numeric', timeZone: 'UTC'
|
|
}) + " (UTC)";
|
|
|
|
clockTimeSpan.textContent = timeString;
|
|
clockDateSpan.textContent = dateString;
|
|
clockStatusSpan.textContent = "Historical Snapshot";
|
|
clockOffsetSpan.textContent = "N/A";
|
|
} catch (error) {
|
|
console.error('Error parsing historical timestamp:', error);
|
|
clockTimeSpan.textContent = timestampStr;
|
|
clockDateSpan.textContent = "Historical";
|
|
clockStatusSpan.textContent = "Snapshot";
|
|
clockOffsetSpan.textContent = "N/A";
|
|
}
|
|
}
|
|
|
|
// Start the sync process when the page loads & repeat (only for live reports)
|
|
document.addEventListener('DOMContentLoaded', () => {
|
|
if (isHistorical) {
|
|
// For historical reports, display static timestamp
|
|
displayHistoricalTime("{{ gen_time_utc }}");
|
|
} else {
|
|
// For live reports, sync clock in real-time
|
|
syncClockAndOffset();
|
|
syncInterval = setInterval(syncClockAndOffset, 60 * 1000); // Resync every 60 seconds
|
|
}
|
|
});
|
|
</script>
|
|
</body>
|
|
</html>
|
|
"""
|
|
|
|
# --- US Graphics Company Style Helpers ---
|
|
def form_feed_separator(width=80):
|
|
"""Generate a form feed separator line like vintage computer printouts."""
|
|
return "═" * width
|
|
|
|
def report_header(report_id, timestamp):
|
|
"""Generate a vintage-style report header."""
|
|
header = []
|
|
header.append(form_feed_separator(80))
|
|
header.append(f"GENERATED: {timestamp}".center(80))
|
|
header.append(form_feed_separator(80))
|
|
return "\n".join(header)
|
|
|
|
def section_separator(title, width=80):
|
|
"""Generate a section separator with title."""
|
|
return f"\n{title}\n{form_feed_separator(width)}\n"
|
|
|
|
def get_reporter_ips(service_name):
|
|
try: _, _, ips = gethostbyname_ex(service_name); return ips
|
|
except Exception as e: print(f"Error resolving service IPs: {e}"); return []
|
|
|
|
# --- NEW: Helper to convert Ref time (UTC) ---
|
|
def format_ref_time(timestamp_str):
|
|
try:
|
|
# Input is like "1761142077.558355643" (Unix timestamp with fractions)
|
|
ts = float(timestamp_str)
|
|
dt = datetime.utcfromtimestamp(ts)
|
|
# Format like: Wed Oct 22 14:08:24 2025 (UTC)
|
|
return dt.strftime('%a %b %d %H:%M:%S %Y') + " (UTC)"
|
|
except:
|
|
return timestamp_str # Return original if conversion fails
|
|
|
|
# --- NEW: Helper to format floats nicely ---
|
|
def format_float(value_str, precision=3):
|
|
try:
|
|
f_val = float(value_str)
|
|
return f"{f_val:.{precision}f}"
|
|
except:
|
|
return value_str # Return original if not a float
|
|
|
|
# --- Report ID Encoding/Decoding ---
|
|
def encode_state(fragments):
|
|
"""Encode NTP state into a URL-safe base64 compressed report ID."""
|
|
try:
|
|
# Create minimal state representation
|
|
state = {
|
|
"timestamp": datetime.utcnow().isoformat() + "Z",
|
|
"fragments": fragments
|
|
}
|
|
# Convert to JSON, compress with zlib, then base64 encode
|
|
json_str = json.dumps(state, separators=(',', ':')) # Compact JSON
|
|
compressed = zlib.compress(json_str.encode('utf-8'), level=9)
|
|
encoded = base64.urlsafe_b64encode(compressed).decode('ascii')
|
|
return encoded
|
|
except Exception as e:
|
|
print(f"Error encoding state: {e}")
|
|
return None
|
|
|
|
def decode_state(report_id):
|
|
"""Decode a report ID back into NTP state fragments."""
|
|
try:
|
|
# Base64 decode, decompress, parse JSON
|
|
compressed = base64.urlsafe_b64decode(report_id.encode('ascii'))
|
|
json_str = zlib.decompress(compressed).decode('utf-8')
|
|
state = json.loads(json_str)
|
|
return state.get("fragments", []), state.get("timestamp")
|
|
except Exception as e:
|
|
print(f"Error decoding state: {e}")
|
|
return None, None
|
|
|
|
def generate_qr_code_ascii(url):
|
|
"""Generate ASCII QR code for the given URL."""
|
|
try:
|
|
qr = qrcode.QRCode(
|
|
version=1, # Small version, will auto-increase if needed
|
|
error_correction=qrcode.constants.ERROR_CORRECT_L,
|
|
box_size=1,
|
|
border=2,
|
|
)
|
|
qr.add_data(url)
|
|
qr.make(fit=True)
|
|
|
|
# Get the QR code matrix
|
|
matrix = qr.get_matrix()
|
|
|
|
# Convert to ASCII art using block characters
|
|
ascii_qr = []
|
|
for row in matrix:
|
|
line = ""
|
|
for cell in row:
|
|
# Use full block for black, space for white
|
|
line += "██" if cell else " "
|
|
ascii_qr.append(line)
|
|
|
|
return "\n".join(ascii_qr)
|
|
except Exception as e:
|
|
print(f"Error generating QR code: {e}")
|
|
return f"[QR Code generation failed: {e}]"
|
|
|
|
@app.route('/api/time')
|
|
def get_server_time():
|
|
return jsonify({"time_utc": datetime.utcnow().isoformat() + "Z"})
|
|
|
|
# --- NEW: Endpoint to just return the raw fragments ---
|
|
# The JavaScript will use this to get the latest offset data
|
|
@app.route('/api/fragments')
|
|
def get_fragments_json():
|
|
fragments = []
|
|
ips = get_reporter_ips(REPORTER_SERVICE)
|
|
for ip in ips:
|
|
try:
|
|
res = requests.get(f"http://{ip}:9898/fragment.json", timeout=1) # Shorter timeout
|
|
if res.status_code == 200: fragments.append(res.json())
|
|
except: pass # Ignore errors fetching fragments for this endpoint
|
|
fragments.sort(key=lambda x: x.get("node_id", "z"))
|
|
return jsonify(fragments)
|
|
|
|
|
|
def format_value(value, max_len=25):
|
|
"""Truncates long values for table display."""
|
|
if value is None: return "N/A"
|
|
s_val = str(value)
|
|
if len(s_val) > max_len:
|
|
return s_val[:max_len-3] + "..."
|
|
return s_val
|
|
|
|
def format_glossary(metrics_defs):
|
|
"""Format metric definitions as terminal-style glossary."""
|
|
lines = []
|
|
for metric, definition in metrics_defs.items():
|
|
lines.append(f" {metric.ljust(20)} - {definition}")
|
|
return "\n".join(lines)
|
|
|
|
def render_report(fragments, report_id, gen_time, is_historical=False):
|
|
"""Render NTP report from fragments data."""
|
|
error_msg = "No errors."
|
|
meta_offset_ms = "N/A"
|
|
meta_leap_status = "Unknown"
|
|
|
|
nodes_list = [f.get("node_id", "unknown") for f in fragments]
|
|
|
|
# Generate QR code for the report URL
|
|
report_url = f"{BASE_URL}/report/{report_id}"
|
|
report_qr_code = generate_qr_code_ascii(report_url)
|
|
|
|
# Generate ASCII Tracking Table with tabulate
|
|
tracking_rows = []
|
|
for metric in TRACKING_METRICS_ORDER:
|
|
row = [metric]
|
|
for node_id in nodes_list:
|
|
node_data = next((f for f in fragments if f.get("node_id") == node_id), None)
|
|
value = "N/A"
|
|
if node_data and isinstance(node_data.get("tracking"), dict):
|
|
raw_value = node_data["tracking"].get(metric, "N/A")
|
|
# Format specific fields for better readability
|
|
if metric == "Ref time (UTC)": value = format_ref_time(raw_value)
|
|
elif metric in ["System time", "Last offset", "RMS offset", "Residual freq", "Skew", "Root delay", "Root dispersion"]: value = format_float(raw_value, 6) # Higher precision
|
|
elif metric == "Frequency": value = format_float(raw_value, 3)
|
|
elif metric == "Update interval": value = format_float(raw_value, 1)
|
|
else: value = format_value(raw_value) # Use generic formatter
|
|
row.append(value)
|
|
tracking_rows.append(row)
|
|
|
|
# Add summary row
|
|
tracking_rows.append([form_feed_separator(18)] + [form_feed_separator(24)] * len(nodes_list))
|
|
tracking_rows.append([f"TOTAL NODES: {len(nodes_list)}"] + [""] * len(nodes_list))
|
|
|
|
tracking_table_ascii = tabulate(
|
|
tracking_rows,
|
|
headers=["Metric"] + nodes_list,
|
|
tablefmt="fancy_grid",
|
|
stralign="left",
|
|
numalign="right"
|
|
)
|
|
|
|
total_offset_seconds = 0.0
|
|
valid_offset_count = 0
|
|
leap_statuses = set()
|
|
# Generate Meta Description Summary
|
|
if fragments:
|
|
for frag in fragments:
|
|
tracking = frag.get("tracking", {})
|
|
if isinstance(tracking, dict) and "Error" not in tracking:
|
|
# Collect Leap Status
|
|
leap = tracking.get("Leap status")
|
|
if leap:
|
|
leap_statuses.add(leap)
|
|
|
|
# Collect Offset
|
|
offset_str = tracking.get("Last offset", 0.1)
|
|
try:
|
|
offset_seconds = float(offset_str)
|
|
total_offset_seconds += offset_seconds
|
|
valid_offset_count += 1
|
|
except (TypeError, ValueError):
|
|
pass # Ignore if offset is missing or not a number
|
|
|
|
if valid_offset_count > 0:
|
|
avg_offset_seconds = total_offset_seconds / valid_offset_count
|
|
meta_offset_ms = f"~{(avg_offset_seconds * 1000):.1f}ms" # Use ~ for average
|
|
|
|
if len(leap_statuses) == 1:
|
|
meta_leap_status = leap_statuses.pop()
|
|
elif len(leap_statuses) > 1:
|
|
meta_leap_status = "Mixed"
|
|
# else remains "Unknown" if no valid status found
|
|
|
|
# Generate ASCII Sources Table with tabulate
|
|
if not fragments:
|
|
sources_table_ascii = "ERROR: Could not fetch data from any reporter pods."
|
|
else:
|
|
sources_rows = []
|
|
node_source_counts = {}
|
|
|
|
for f in fragments:
|
|
node_id = f.get("node_id", "unknown")
|
|
sources = f.get("sources", [])
|
|
node_source_counts[node_id] = len(sources) if sources else 0
|
|
|
|
if not sources:
|
|
sources_rows.append([node_id, "N/A", "No sources reported", "N/A", "N/A", "N/A", "N/A", "N/A", "N/A"])
|
|
else:
|
|
for source in sources:
|
|
row = [
|
|
format_value(node_id, SOURCES_COL_WIDTHS[0]-2),
|
|
f"{source.get('Mode', '?')}{source.get('State', '?')}",
|
|
format_value(source.get("Name/IP address", "N/A"), SOURCES_COL_WIDTHS[2]-2),
|
|
format_value(source.get("Stratum", "N/A")),
|
|
format_value(source.get("Poll", "N/A")),
|
|
format_value(source.get("Reach", "N/A")),
|
|
format_value(source.get("LastRx", "N/A")),
|
|
format_float(source.get("Last sample", "N/A"), 6), # Format sample offset
|
|
format_float(source.get("Std Dev", "N/A"), 3) # Format Std Dev/Jitter
|
|
]
|
|
sources_rows.append(row)
|
|
|
|
# Add summary section
|
|
sources_rows.append([form_feed_separator(SOURCES_COL_WIDTHS[0]-2)] + [form_feed_separator(w-2) for w in SOURCES_COL_WIDTHS[1:]])
|
|
total_sources = sum(node_source_counts.values())
|
|
summary_text = f"TOTAL SOURCES: {total_sources} | NODES REPORTING: {len(node_source_counts)}"
|
|
sources_rows.append([summary_text] + [""] * (len(SOURCES_COLUMNS_ORDER) - 1))
|
|
|
|
sources_table_ascii = tabulate(
|
|
sources_rows,
|
|
headers=SOURCES_COLUMNS_ORDER,
|
|
tablefmt="fancy_grid",
|
|
stralign="left",
|
|
numalign="right"
|
|
)
|
|
|
|
# Generate report header
|
|
report_header_text = report_header(report_id, gen_time)
|
|
|
|
# Format glossaries
|
|
tracking_glossary = format_glossary(TRACKING_METRICS_DEFS)
|
|
sources_glossary = format_glossary(SOURCES_METRICS_DEFS)
|
|
|
|
return render_template_string(
|
|
HTML_TEMPLATE,
|
|
gen_time_utc=gen_time,
|
|
report_header=report_header_text,
|
|
tracking_table_ascii=tracking_table_ascii,
|
|
sources_table_ascii=sources_table_ascii,
|
|
tracking_glossary=tracking_glossary,
|
|
sources_glossary=sources_glossary,
|
|
report_qr_code=report_qr_code,
|
|
is_historical=is_historical,
|
|
meta_description=f"DWS NTP Pool: {meta_leap_status}. Avg Offset: {meta_offset_ms}.",
|
|
error=error_msg
|
|
)
|
|
|
|
@app.route('/')
|
|
def homepage():
|
|
"""Live NTP status - fetches current data from all nodes."""
|
|
fragments = []
|
|
ips = get_reporter_ips(REPORTER_SERVICE)
|
|
|
|
# Fetch fragments from all reporter pods
|
|
for ip in ips:
|
|
try:
|
|
res = requests.get(f"http://{ip}:9898/fragment.json", timeout=2)
|
|
if res.status_code == 200:
|
|
fragments.append(res.json())
|
|
else:
|
|
print(f"Failed fetch from {ip}: Status {res.status_code}")
|
|
except Exception as e:
|
|
print(f"Failed connect to {ip}: {e}")
|
|
|
|
fragments.sort(key=lambda x: x.get("node_id", "z"))
|
|
|
|
# Generate timestamp and report ID
|
|
gen_time = subprocess.run(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"], capture_output=True, text=True).stdout.strip()
|
|
|
|
# Create encoded report ID from current state
|
|
report_id = encode_state(fragments)
|
|
if not report_id:
|
|
report_id = str(uuid.uuid4())[:8].upper() # Fallback to random ID
|
|
|
|
return render_report(fragments, report_id, gen_time)
|
|
|
|
@app.route('/report/<report_id>')
|
|
def historical_report(report_id):
|
|
"""Display historical NTP report from encoded report ID."""
|
|
fragments, timestamp = decode_state(report_id)
|
|
|
|
if fragments is None:
|
|
return render_template_string("""
|
|
<!DOCTYPE html>
|
|
<html><head><title>Invalid Report ID</title></head>
|
|
<body style="background: #000; color: #FFBF00; font-family: monospace; padding: 2em;">
|
|
<pre>
|
|
ERROR: INVALID REPORT ID
|
|
|
|
The report ID you provided could not be decoded.
|
|
Please check the URL and try again.
|
|
|
|
<a href="/" style="color: #FFFF00;">Return to live status</a>
|
|
</pre>
|
|
</body></html>
|
|
"""), 400
|
|
|
|
# Use the timestamp from the encoded state, or fallback to "HISTORICAL"
|
|
gen_time = timestamp if timestamp else "HISTORICAL"
|
|
|
|
return render_report(fragments, report_id, gen_time, is_historical=True)
|
|
|
|
if __name__ == '__main__':
|
|
app.run(host='0.0.0.0', port=8080)
|