before design refactor
This commit is contained in:
@ -2,7 +2,7 @@
|
|||||||
FROM python:3.10-slim
|
FROM python:3.10-slim
|
||||||
|
|
||||||
# Install Python dependencies
|
# Install Python dependencies
|
||||||
RUN pip install Flask requests tabulate
|
RUN pip install Flask requests tabulate qrcode
|
||||||
|
|
||||||
WORKDIR /app/static
|
WORKDIR /app/static
|
||||||
RUN mkdir fonts
|
RUN mkdir fonts
|
||||||
|
|||||||
291
frontend/main.py
291
frontend/main.py
@ -2,7 +2,11 @@ import os
|
|||||||
import requests
|
import requests
|
||||||
import subprocess
|
import subprocess
|
||||||
import json
|
import json
|
||||||
from flask import Flask, render_template_string, jsonify
|
import zlib
|
||||||
|
import base64
|
||||||
|
import qrcode
|
||||||
|
from io import StringIO
|
||||||
|
from flask import Flask, render_template_string, jsonify, request
|
||||||
from socket import gethostbyname_ex
|
from socket import gethostbyname_ex
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from tabulate import tabulate # For ASCII tables
|
from tabulate import tabulate # For ASCII tables
|
||||||
@ -13,6 +17,7 @@ app = Flask(__name__)
|
|||||||
|
|
||||||
# Config
|
# Config
|
||||||
REPORTER_SERVICE = os.environ.get("REPORTER_SERVICE", "ntp-reporter-svc.default.svc.cluster.local")
|
REPORTER_SERVICE = os.environ.get("REPORTER_SERVICE", "ntp-reporter-svc.default.svc.cluster.local")
|
||||||
|
BASE_URL = os.environ.get("BASE_URL", "https://time.dws.rip")
|
||||||
|
|
||||||
# Tracking table config
|
# Tracking table config
|
||||||
TRACKING_METRICS_ORDER = [
|
TRACKING_METRICS_ORDER = [
|
||||||
@ -31,6 +36,36 @@ SOURCES_COLUMNS_ORDER = [
|
|||||||
# Define widths for sources table columns
|
# Define widths for sources table columns
|
||||||
SOURCES_COL_WIDTHS = [24, 10, 32, 10, 7, 7, 8, 15, 10]
|
SOURCES_COL_WIDTHS = [24, 10, 32, 10, 7, 7, 8, 15, 10]
|
||||||
|
|
||||||
|
# Metric Definitions
|
||||||
|
TRACKING_METRICS_DEFS = {
|
||||||
|
"Reference ID": "Identifier of current time source (IP or refclock ID)",
|
||||||
|
"Ref Source IP": "IP address of the reference time source",
|
||||||
|
"Stratum": "Distance from primary time source (lower is better, 1-16)",
|
||||||
|
"Ref time (UTC)": "Last time the reference was updated",
|
||||||
|
"System time": "Offset between system clock and reference time (seconds)",
|
||||||
|
"Last offset": "Offset of last clock update (seconds)",
|
||||||
|
"RMS offset": "Root mean square of recent offset values (long-term average)",
|
||||||
|
"Frequency": "Rate of system clock drift (ppm - parts per million)",
|
||||||
|
"Residual freq": "Residual frequency error not yet corrected",
|
||||||
|
"Skew": "Estimated error bound of frequency (accuracy metric)",
|
||||||
|
"Root delay": "Total network delay to stratum-1 server (seconds)",
|
||||||
|
"Root dispersion": "Total dispersion accumulated to stratum-1 server",
|
||||||
|
"Update interval": "Time between clock updates (seconds)",
|
||||||
|
"Leap status": "Leap second indicator (Normal, Insert, Delete, or Not synced)"
|
||||||
|
}
|
||||||
|
|
||||||
|
SOURCES_METRICS_DEFS = {
|
||||||
|
"DWS PEER": "Node identifier for this NTP daemon instance",
|
||||||
|
"ModeState": "Source mode (^=server, ==peer) & state (*=current sync)",
|
||||||
|
"Name/IP address": "Hostname or IP address of the NTP source",
|
||||||
|
"Stratum": "Stratum level of the source (1=primary reference)",
|
||||||
|
"Poll": "Polling interval to source (log2 seconds, e.g., 6 = 64s)",
|
||||||
|
"Reach": "Reachability register (377 octal = all 8 recent polls OK)",
|
||||||
|
"LastRx": "Time since last successful response from source",
|
||||||
|
"Last sample": "Offset measurement from last valid sample (seconds)",
|
||||||
|
"Std Dev": "Standard deviation of offset (jitter measurement)"
|
||||||
|
}
|
||||||
|
|
||||||
#
|
#
|
||||||
# HTML Template - Radically simplified for TUI output
|
# HTML Template - Radically simplified for TUI output
|
||||||
#
|
#
|
||||||
@ -99,10 +134,6 @@ HTML_TEMPLATE = """<!DOCTYPE html>
|
|||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<pre>
|
<pre>
|
||||||
$> ./dws_ntp_report
|
|
||||||
<b>**INFO**</b>: INITIALIZING DWS NTP MONITORING SYSTEM
|
|
||||||
<b>**INFO**</b>: COLLECTING DWS NTP POOL INFORMATION
|
|
||||||
|
|
||||||
{{ report_header }}
|
{{ report_header }}
|
||||||
|
|
||||||
<b>SECTION 1: CURRENT TIME SYNCHRONIZATION</b>
|
<b>SECTION 1: CURRENT TIME SYNCHRONIZATION</b>
|
||||||
@ -116,7 +147,6 @@ CLOCK OFFSET: <span id="clock-offset">---</span>
|
|||||||
|
|
||||||
<b>SECTION 2: NODE TRACKING STATUS METRICS</b>
|
<b>SECTION 2: NODE TRACKING STATUS METRICS</b>
|
||||||
════════════════════════════════════════════════════════════════════════════════
|
════════════════════════════════════════════════════════════════════════════════
|
||||||
<b>**INFO**</b>: COLLECTING TRACKING STATUS METRICS FROM ALL NODES
|
|
||||||
|
|
||||||
{{ tracking_table_ascii }}
|
{{ tracking_table_ascii }}
|
||||||
|
|
||||||
@ -125,24 +155,35 @@ CLOCK OFFSET: <span id="clock-offset">---</span>
|
|||||||
|
|
||||||
<b>SECTION 3: UPSTREAM NTP SOURCES</b>
|
<b>SECTION 3: UPSTREAM NTP SOURCES</b>
|
||||||
════════════════════════════════════════════════════════════════════════════════
|
════════════════════════════════════════════════════════════════════════════════
|
||||||
<b>**INFO**</b>: COLLECTING UPSTREAM SOURCES METRICS FROM ALL NODES
|
|
||||||
|
|
||||||
{{ sources_table_ascii }}
|
{{ sources_table_ascii }}
|
||||||
|
|
||||||
════════════════════════════════════════════════════════════════════════════════
|
════════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
|
||||||
<b>SECTION 4: DEVELOPER INFORMATION</b>
|
<b>SECTION 4: METRIC DEFINITIONS & DEVELOPER INFORMATION</b>
|
||||||
════════════════════════════════════════════════════════════════════════════════
|
════════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
<b>TRACKING METRICS GLOSSARY:</b>
|
||||||
|
{{ tracking_glossary }}
|
||||||
|
|
||||||
|
<b>SOURCES METRICS GLOSSARY:</b>
|
||||||
|
{{ sources_glossary }}
|
||||||
|
|
||||||
|
<b>USAGE INFORMATION:</b>
|
||||||
USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
|
USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
|
||||||
|
|
||||||
<b>**INFO**</b>: DWS LLC // "IT'S YOUR INTERNET, TAKE IT BACK" // https://dws.rip
|
DWS LLC // "IT'S YOUR INTERNET, TAKE IT BACK" // https://dws.rip
|
||||||
<b>**INFO**</b>: DWS LLC // UNITED STATES OF AMERICA // 2025
|
DWS LLC // UNITED STATES OF AMERICA // 2025
|
||||||
|
|
||||||
|
<b>REPORT SNAPSHOT:</b>
|
||||||
|
Scan the QR code below to access this exact report state:
|
||||||
|
|
||||||
|
<span style="font-size: 4px; line-height: 4px;">{{ report_qr_code }}</span>
|
||||||
|
|
||||||
════════════════════════════════════════════════════════════════════════════════
|
════════════════════════════════════════════════════════════════════════════════
|
||||||
<b>**INFO**</b>: REPORT GENERATION COMPLETE {{ gen_time_utc }}
|
REPORT GENERATION COMPLETE {{ gen_time_utc }}
|
||||||
<b>**INFO**</b>: END OF REPORT
|
END OF REPORT
|
||||||
════════════════════════════════════════════════════════════════════════════════
|
════════════════════════════════════════════════════════════════════════════════
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
@ -151,6 +192,7 @@ USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
|
|||||||
const clockDateSpan = document.getElementById('clock-date');
|
const clockDateSpan = document.getElementById('clock-date');
|
||||||
const clockStatusSpan = document.getElementById('clock-status');
|
const clockStatusSpan = document.getElementById('clock-status');
|
||||||
const clockOffsetSpan = document.getElementById('clock-offset');
|
const clockOffsetSpan = document.getElementById('clock-offset');
|
||||||
|
const isHistorical = {{ 'true' if is_historical else 'false' }};
|
||||||
let serverTimeOffsetMs = null; // Offset between client and DWS server in milliseconds
|
let serverTimeOffsetMs = null; // Offset between client and DWS server in milliseconds
|
||||||
let clockUpdateInterval = null;
|
let clockUpdateInterval = null;
|
||||||
let syncInterval = null;
|
let syncInterval = null;
|
||||||
@ -159,17 +201,17 @@ USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
|
|||||||
if (serverTimeOffsetMs === null) return;
|
if (serverTimeOffsetMs === null) return;
|
||||||
// Apply offset to client's current time to get DWS server time
|
// Apply offset to client's current time to get DWS server time
|
||||||
const now = new Date(new Date().getTime() + serverTimeOffsetMs);
|
const now = new Date(new Date().getTime() + serverTimeOffsetMs);
|
||||||
|
|
||||||
// Format time and date exactly as requested
|
// Format time and date exactly as requested
|
||||||
const hours = String(now.getUTCHours()).padStart(2, '0');
|
const hours = String(now.getUTCHours()).padStart(2, '0');
|
||||||
const minutes = String(now.getUTCMinutes()).padStart(2, '0');
|
const minutes = String(now.getUTCMinutes()).padStart(2, '0');
|
||||||
const seconds = String(now.getUTCSeconds()).padStart(2, '0');
|
const seconds = String(now.getUTCSeconds()).padStart(2, '0');
|
||||||
const timeString = `${hours}:${minutes}:${seconds}`;
|
const timeString = `${hours}:${minutes}:${seconds}`;
|
||||||
|
|
||||||
const dateString = now.toLocaleDateString('en-US', {
|
const dateString = now.toLocaleDateString('en-US', {
|
||||||
weekday: 'long', year: 'numeric', month: 'long', day: 'numeric', timeZone: 'UTC'
|
weekday: 'long', year: 'numeric', month: 'long', day: 'numeric', timeZone: 'UTC'
|
||||||
}) + " (UTC)";
|
}) + " (UTC)";
|
||||||
|
|
||||||
clockTimeSpan.textContent = timeString;
|
clockTimeSpan.textContent = timeString;
|
||||||
clockDateSpan.textContent = dateString;
|
clockDateSpan.textContent = dateString;
|
||||||
}
|
}
|
||||||
@ -185,7 +227,7 @@ USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
|
|||||||
const serverTime = new Date(timeData.time_utc).getTime();
|
const serverTime = new Date(timeData.time_utc).getTime();
|
||||||
const clientTime = new Date().getTime();
|
const clientTime = new Date().getTime();
|
||||||
serverTimeOffsetMs = serverTime - clientTime; // Recalculate offset
|
serverTimeOffsetMs = serverTime - clientTime; // Recalculate offset
|
||||||
|
|
||||||
// Fetch latest fragments to try and get *a* representative offset
|
// Fetch latest fragments to try and get *a* representative offset
|
||||||
// This is slightly tricky as we don't know which node served this page
|
// This is slightly tricky as we don't know which node served this page
|
||||||
// We'll just grab the first node's offset for now
|
// We'll just grab the first node's offset for now
|
||||||
@ -204,18 +246,18 @@ USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
|
|||||||
|
|
||||||
clockStatusSpan.textContent = "Synced";
|
clockStatusSpan.textContent = "Synced";
|
||||||
// Display the offset calculated between *client* and DWS server
|
// Display the offset calculated between *client* and DWS server
|
||||||
clockOffsetSpan.textContent = `${serverTimeOffsetMs * -1}ms (vs your clock)`;
|
clockOffsetSpan.textContent = `${serverTimeOffsetMs * -1}ms (vs your clock)`;
|
||||||
|
|
||||||
// Start interval timer ONLY if it's not already running
|
// Start interval timer ONLY if it's not already running
|
||||||
if (!clockUpdateInterval) {
|
if (!clockUpdateInterval) {
|
||||||
updateClock(); // Update immediately
|
updateClock(); // Update immediately
|
||||||
clockUpdateInterval = setInterval(updateClock, 1000);
|
clockUpdateInterval = setInterval(updateClock, 1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error syncing time/offset:', error);
|
console.error('Error syncing time/offset:', error);
|
||||||
clockStatusSpan.textContent = `Sync Error`;
|
clockStatusSpan.textContent = `Sync Error`;
|
||||||
clockOffsetSpan.textContent = `---`;
|
clockOffsetSpan.textContent = `---`;
|
||||||
serverTimeOffsetMs = 0; // Fallback to local time
|
serverTimeOffsetMs = 0; // Fallback to local time
|
||||||
if (!clockUpdateInterval) {
|
if (!clockUpdateInterval) {
|
||||||
updateClock();
|
updateClock();
|
||||||
@ -223,11 +265,44 @@ USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start the sync process when the page loads & repeat
|
function displayHistoricalTime(timestampStr) {
|
||||||
|
// Parse the ISO timestamp and display it statically
|
||||||
|
try {
|
||||||
|
const timestamp = new Date(timestampStr);
|
||||||
|
|
||||||
|
const hours = String(timestamp.getUTCHours()).padStart(2, '0');
|
||||||
|
const minutes = String(timestamp.getUTCMinutes()).padStart(2, '0');
|
||||||
|
const seconds = String(timestamp.getUTCSeconds()).padStart(2, '0');
|
||||||
|
const timeString = `${hours}:${minutes}:${seconds}`;
|
||||||
|
|
||||||
|
const dateString = timestamp.toLocaleDateString('en-US', {
|
||||||
|
weekday: 'long', year: 'numeric', month: 'long', day: 'numeric', timeZone: 'UTC'
|
||||||
|
}) + " (UTC)";
|
||||||
|
|
||||||
|
clockTimeSpan.textContent = timeString;
|
||||||
|
clockDateSpan.textContent = dateString;
|
||||||
|
clockStatusSpan.textContent = "Historical Snapshot";
|
||||||
|
clockOffsetSpan.textContent = "N/A";
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error parsing historical timestamp:', error);
|
||||||
|
clockTimeSpan.textContent = timestampStr;
|
||||||
|
clockDateSpan.textContent = "Historical";
|
||||||
|
clockStatusSpan.textContent = "Snapshot";
|
||||||
|
clockOffsetSpan.textContent = "N/A";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the sync process when the page loads & repeat (only for live reports)
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
syncClockAndOffset();
|
if (isHistorical) {
|
||||||
syncInterval = setInterval(syncClockAndOffset, 60 * 1000); // Resync every 60 seconds
|
// For historical reports, display static timestamp
|
||||||
|
displayHistoricalTime("{{ gen_time_utc }}");
|
||||||
|
} else {
|
||||||
|
// For live reports, sync clock in real-time
|
||||||
|
syncClockAndOffset();
|
||||||
|
syncInterval = setInterval(syncClockAndOffset, 60 * 1000); // Resync every 60 seconds
|
||||||
|
}
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
</body>
|
</body>
|
||||||
@ -243,7 +318,7 @@ def report_header(report_id, timestamp):
|
|||||||
"""Generate a vintage-style report header."""
|
"""Generate a vintage-style report header."""
|
||||||
header = []
|
header = []
|
||||||
header.append(form_feed_separator(80))
|
header.append(form_feed_separator(80))
|
||||||
header.append(f"REPORT ID: {report_id}".ljust(40) + f"GENERATED: {timestamp}".rjust(40))
|
header.append(f"GENERATED: {timestamp}".center(80))
|
||||||
header.append(form_feed_separator(80))
|
header.append(form_feed_separator(80))
|
||||||
return "\n".join(header)
|
return "\n".join(header)
|
||||||
|
|
||||||
@ -274,6 +349,65 @@ def format_float(value_str, precision=3):
|
|||||||
except:
|
except:
|
||||||
return value_str # Return original if not a float
|
return value_str # Return original if not a float
|
||||||
|
|
||||||
|
# --- Report ID Encoding/Decoding ---
|
||||||
|
def encode_state(fragments):
|
||||||
|
"""Encode NTP state into a URL-safe base64 compressed report ID."""
|
||||||
|
try:
|
||||||
|
# Create minimal state representation
|
||||||
|
state = {
|
||||||
|
"timestamp": datetime.utcnow().isoformat() + "Z",
|
||||||
|
"fragments": fragments
|
||||||
|
}
|
||||||
|
# Convert to JSON, compress with zlib, then base64 encode
|
||||||
|
json_str = json.dumps(state, separators=(',', ':')) # Compact JSON
|
||||||
|
compressed = zlib.compress(json_str.encode('utf-8'), level=9)
|
||||||
|
encoded = base64.urlsafe_b64encode(compressed).decode('ascii')
|
||||||
|
return encoded
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error encoding state: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def decode_state(report_id):
|
||||||
|
"""Decode a report ID back into NTP state fragments."""
|
||||||
|
try:
|
||||||
|
# Base64 decode, decompress, parse JSON
|
||||||
|
compressed = base64.urlsafe_b64decode(report_id.encode('ascii'))
|
||||||
|
json_str = zlib.decompress(compressed).decode('utf-8')
|
||||||
|
state = json.loads(json_str)
|
||||||
|
return state.get("fragments", []), state.get("timestamp")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error decoding state: {e}")
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def generate_qr_code_ascii(url):
|
||||||
|
"""Generate ASCII QR code for the given URL."""
|
||||||
|
try:
|
||||||
|
qr = qrcode.QRCode(
|
||||||
|
version=1, # Small version, will auto-increase if needed
|
||||||
|
error_correction=qrcode.constants.ERROR_CORRECT_L,
|
||||||
|
box_size=1,
|
||||||
|
border=2,
|
||||||
|
)
|
||||||
|
qr.add_data(url)
|
||||||
|
qr.make(fit=True)
|
||||||
|
|
||||||
|
# Get the QR code matrix
|
||||||
|
matrix = qr.get_matrix()
|
||||||
|
|
||||||
|
# Convert to ASCII art using block characters
|
||||||
|
ascii_qr = []
|
||||||
|
for row in matrix:
|
||||||
|
line = ""
|
||||||
|
for cell in row:
|
||||||
|
# Use full block for black, space for white
|
||||||
|
line += "██" if cell else " "
|
||||||
|
ascii_qr.append(line)
|
||||||
|
|
||||||
|
return "\n".join(ascii_qr)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error generating QR code: {e}")
|
||||||
|
return f"[QR Code generation failed: {e}]"
|
||||||
|
|
||||||
@app.route('/api/time')
|
@app.route('/api/time')
|
||||||
def get_server_time():
|
def get_server_time():
|
||||||
return jsonify({"time_utc": datetime.utcnow().isoformat() + "Z"})
|
return jsonify({"time_utc": datetime.utcnow().isoformat() + "Z"})
|
||||||
@ -301,27 +435,26 @@ def format_value(value, max_len=25):
|
|||||||
return s_val[:max_len-3] + "..."
|
return s_val[:max_len-3] + "..."
|
||||||
return s_val
|
return s_val
|
||||||
|
|
||||||
@app.route('/')
|
def format_glossary(metrics_defs):
|
||||||
def homepage():
|
"""Format metric definitions as terminal-style glossary."""
|
||||||
fragments = []
|
lines = []
|
||||||
|
for metric, definition in metrics_defs.items():
|
||||||
|
lines.append(f" {metric.ljust(20)} - {definition}")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def render_report(fragments, report_id, gen_time, is_historical=False):
|
||||||
|
"""Render NTP report from fragments data."""
|
||||||
error_msg = "No errors."
|
error_msg = "No errors."
|
||||||
meta_offset_ms = "N/A"
|
meta_offset_ms = "N/A"
|
||||||
meta_leap_status = "Unknown"
|
meta_leap_status = "Unknown"
|
||||||
ips = get_reporter_ips(REPORTER_SERVICE)
|
|
||||||
if not ips: error_msg = f"Could not resolve IPs for service '{REPORTER_SERVICE}'."
|
|
||||||
|
|
||||||
# 1. Fetch fragments
|
|
||||||
for ip in ips:
|
|
||||||
try:
|
|
||||||
res = requests.get(f"http://{ip}:9898/fragment.json", timeout=2)
|
|
||||||
if res.status_code == 200: fragments.append(res.json())
|
|
||||||
else: print(f"Failed fetch from {ip}: Status {res.status_code}")
|
|
||||||
except Exception as e: print(f"Failed connect to {ip}: {e}"); error_msg = str(e)
|
|
||||||
|
|
||||||
fragments.sort(key=lambda x: x.get("node_id", "z"))
|
|
||||||
nodes_list = [f.get("node_id", "unknown") for f in fragments]
|
nodes_list = [f.get("node_id", "unknown") for f in fragments]
|
||||||
|
|
||||||
# 2. Generate ASCII Tracking Table with tabulate
|
# Generate QR code for the report URL
|
||||||
|
report_url = f"{BASE_URL}/report/{report_id}"
|
||||||
|
report_qr_code = generate_qr_code_ascii(report_url)
|
||||||
|
|
||||||
|
# Generate ASCII Tracking Table with tabulate
|
||||||
tracking_rows = []
|
tracking_rows = []
|
||||||
for metric in TRACKING_METRICS_ORDER:
|
for metric in TRACKING_METRICS_ORDER:
|
||||||
row = [metric]
|
row = [metric]
|
||||||
@ -354,7 +487,7 @@ def homepage():
|
|||||||
total_offset_seconds = 0.0
|
total_offset_seconds = 0.0
|
||||||
valid_offset_count = 0
|
valid_offset_count = 0
|
||||||
leap_statuses = set()
|
leap_statuses = set()
|
||||||
# 3. Generate Meta Description Summary (using the first node's data)
|
# Generate Meta Description Summary
|
||||||
if fragments:
|
if fragments:
|
||||||
for frag in fragments:
|
for frag in fragments:
|
||||||
tracking = frag.get("tracking", {})
|
tracking = frag.get("tracking", {})
|
||||||
@ -363,7 +496,7 @@ def homepage():
|
|||||||
leap = tracking.get("Leap status")
|
leap = tracking.get("Leap status")
|
||||||
if leap:
|
if leap:
|
||||||
leap_statuses.add(leap)
|
leap_statuses.add(leap)
|
||||||
|
|
||||||
# Collect Offset
|
# Collect Offset
|
||||||
offset_str = tracking.get("Last offset", 0.1)
|
offset_str = tracking.get("Last offset", 0.1)
|
||||||
try:
|
try:
|
||||||
@ -376,14 +509,14 @@ def homepage():
|
|||||||
if valid_offset_count > 0:
|
if valid_offset_count > 0:
|
||||||
avg_offset_seconds = total_offset_seconds / valid_offset_count
|
avg_offset_seconds = total_offset_seconds / valid_offset_count
|
||||||
meta_offset_ms = f"~{(avg_offset_seconds * 1000):.1f}ms" # Use ~ for average
|
meta_offset_ms = f"~{(avg_offset_seconds * 1000):.1f}ms" # Use ~ for average
|
||||||
|
|
||||||
if len(leap_statuses) == 1:
|
if len(leap_statuses) == 1:
|
||||||
meta_leap_status = leap_statuses.pop()
|
meta_leap_status = leap_statuses.pop()
|
||||||
elif len(leap_statuses) > 1:
|
elif len(leap_statuses) > 1:
|
||||||
meta_leap_status = "Mixed"
|
meta_leap_status = "Mixed"
|
||||||
# else remains "Unknown" if no valid status found
|
# else remains "Unknown" if no valid status found
|
||||||
|
|
||||||
# 3. Generate ASCII Sources Table with tabulate
|
# Generate ASCII Sources Table with tabulate
|
||||||
if not fragments:
|
if not fragments:
|
||||||
sources_table_ascii = "ERROR: Could not fetch data from any reporter pods."
|
sources_table_ascii = "ERROR: Could not fetch data from any reporter pods."
|
||||||
else:
|
else:
|
||||||
@ -426,21 +559,81 @@ def homepage():
|
|||||||
numalign="right"
|
numalign="right"
|
||||||
)
|
)
|
||||||
|
|
||||||
gen_time = subprocess.run(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"], capture_output=True, text=True).stdout.strip()
|
|
||||||
|
|
||||||
# Generate report header
|
# Generate report header
|
||||||
report_id = str(uuid.uuid4())[:8].upper()
|
|
||||||
report_header_text = report_header(report_id, gen_time)
|
report_header_text = report_header(report_id, gen_time)
|
||||||
|
|
||||||
|
# Format glossaries
|
||||||
|
tracking_glossary = format_glossary(TRACKING_METRICS_DEFS)
|
||||||
|
sources_glossary = format_glossary(SOURCES_METRICS_DEFS)
|
||||||
|
|
||||||
return render_template_string(
|
return render_template_string(
|
||||||
HTML_TEMPLATE,
|
HTML_TEMPLATE,
|
||||||
gen_time_utc=gen_time,
|
gen_time_utc=gen_time,
|
||||||
report_header=report_header_text,
|
report_header=report_header_text,
|
||||||
tracking_table_ascii=tracking_table_ascii,
|
tracking_table_ascii=tracking_table_ascii,
|
||||||
sources_table_ascii=sources_table_ascii,
|
sources_table_ascii=sources_table_ascii,
|
||||||
|
tracking_glossary=tracking_glossary,
|
||||||
|
sources_glossary=sources_glossary,
|
||||||
|
report_qr_code=report_qr_code,
|
||||||
|
is_historical=is_historical,
|
||||||
meta_description=f"DWS NTP Pool: {meta_leap_status}. Avg Offset: {meta_offset_ms}.",
|
meta_description=f"DWS NTP Pool: {meta_leap_status}. Avg Offset: {meta_offset_ms}.",
|
||||||
error=error_msg
|
error=error_msg
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@app.route('/')
|
||||||
|
def homepage():
|
||||||
|
"""Live NTP status - fetches current data from all nodes."""
|
||||||
|
fragments = []
|
||||||
|
ips = get_reporter_ips(REPORTER_SERVICE)
|
||||||
|
|
||||||
|
# Fetch fragments from all reporter pods
|
||||||
|
for ip in ips:
|
||||||
|
try:
|
||||||
|
res = requests.get(f"http://{ip}:9898/fragment.json", timeout=2)
|
||||||
|
if res.status_code == 200:
|
||||||
|
fragments.append(res.json())
|
||||||
|
else:
|
||||||
|
print(f"Failed fetch from {ip}: Status {res.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed connect to {ip}: {e}")
|
||||||
|
|
||||||
|
fragments.sort(key=lambda x: x.get("node_id", "z"))
|
||||||
|
|
||||||
|
# Generate timestamp and report ID
|
||||||
|
gen_time = subprocess.run(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"], capture_output=True, text=True).stdout.strip()
|
||||||
|
|
||||||
|
# Create encoded report ID from current state
|
||||||
|
report_id = encode_state(fragments)
|
||||||
|
if not report_id:
|
||||||
|
report_id = str(uuid.uuid4())[:8].upper() # Fallback to random ID
|
||||||
|
|
||||||
|
return render_report(fragments, report_id, gen_time)
|
||||||
|
|
||||||
|
@app.route('/report/<report_id>')
|
||||||
|
def historical_report(report_id):
|
||||||
|
"""Display historical NTP report from encoded report ID."""
|
||||||
|
fragments, timestamp = decode_state(report_id)
|
||||||
|
|
||||||
|
if fragments is None:
|
||||||
|
return render_template_string("""
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html><head><title>Invalid Report ID</title></head>
|
||||||
|
<body style="background: #000; color: #FFBF00; font-family: monospace; padding: 2em;">
|
||||||
|
<pre>
|
||||||
|
ERROR: INVALID REPORT ID
|
||||||
|
|
||||||
|
The report ID you provided could not be decoded.
|
||||||
|
Please check the URL and try again.
|
||||||
|
|
||||||
|
<a href="/" style="color: #FFFF00;">Return to live status</a>
|
||||||
|
</pre>
|
||||||
|
</body></html>
|
||||||
|
"""), 400
|
||||||
|
|
||||||
|
# Use the timestamp from the encoded state, or fallback to "HISTORICAL"
|
||||||
|
gen_time = timestamp if timestamp else "HISTORICAL"
|
||||||
|
|
||||||
|
return render_report(fragments, report_id, gen_time, is_historical=True)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
app.run(host='0.0.0.0', port=8080)
|
app.run(host='0.0.0.0', port=8080)
|
||||||
|
|||||||
Reference in New Issue
Block a user