import os import requests import subprocess import json from flask import Flask, render_template_string, jsonify from socket import gethostbyname_ex from datetime import datetime from tabulate import tabulate # For ASCII tables import time # For timestamp conversion import uuid # For report IDs app = Flask(__name__) # Config REPORTER_SERVICE = os.environ.get("REPORTER_SERVICE", "ntp-reporter-svc.default.svc.cluster.local") # Tracking table config TRACKING_METRICS_ORDER = [ "Reference ID", "Ref Source IP", "Stratum", "Ref time (UTC)", "System time", "Last offset", "RMS offset", "Frequency", "Residual freq", "Skew", "Root delay", "Root dispersion", "Update interval", "Leap status" ] # Define max widths for tracking table columns (approximate) TRACKING_COL_WIDTHS = [18] + [24] * 3 # Metric Name + 3 Nodes # Sources table config SOURCES_COLUMNS_ORDER = [ "DWS PEER", "ModeState", "Name/IP address", "Stratum", "Poll", "Reach", "LastRx", "Last sample", "Std Dev" ] # Define widths for sources table columns SOURCES_COL_WIDTHS = [24, 10, 32, 10, 7, 7, 8, 15, 10] # # HTML Template - Radically simplified for TUI output # HTML_TEMPLATE = """
$> ./dws_ntp_report
**INFO**: INITIALIZING DWS NTP MONITORING SYSTEM
**INFO**: COLLECTING DWS NTP POOL INFORMATION
{{ report_header }}
SECTION 1: CURRENT TIME SYNCHRONIZATION
════════════════════════════════════════════════════════════════════════════════
TIME: --:--:--
DATE: ----------
STATUS: Syncing...
CLOCK OFFSET: ---
════════════════════════════════════════════════════════════════════════════════
SECTION 2: NODE TRACKING STATUS METRICS
════════════════════════════════════════════════════════════════════════════════
**INFO**: COLLECTING TRACKING STATUS METRICS FROM ALL NODES
{{ tracking_table_ascii }}
════════════════════════════════════════════════════════════════════════════════
SECTION 3: UPSTREAM NTP SOURCES
════════════════════════════════════════════════════════════════════════════════
**INFO**: COLLECTING UPSTREAM SOURCES METRICS FROM ALL NODES
{{ sources_table_ascii }}
════════════════════════════════════════════════════════════════════════════════
SECTION 4: DEVELOPER INFORMATION
════════════════════════════════════════════════════════════════════════════════
USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
**INFO**: DWS LLC // "IT'S YOUR INTERNET, TAKE IT BACK" // https://dws.rip
**INFO**: DWS LLC // UNITED STATES OF AMERICA // 2025
════════════════════════════════════════════════════════════════════════════════
**INFO**: REPORT GENERATION COMPLETE {{ gen_time_utc }}
**INFO**: END OF REPORT
════════════════════════════════════════════════════════════════════════════════
"""
# --- US Graphics Company Style Helpers ---
def form_feed_separator(width=80):
"""Generate a form feed separator line like vintage computer printouts."""
return "═" * width
def report_header(report_id, timestamp):
"""Generate a vintage-style report header."""
header = []
header.append(form_feed_separator(80))
header.append(f"REPORT ID: {report_id}".ljust(40) + f"GENERATED: {timestamp}".rjust(40))
header.append(form_feed_separator(80))
return "\n".join(header)
def section_separator(title, width=80):
"""Generate a section separator with title."""
return f"\n{title}\n{form_feed_separator(width)}\n"
def get_reporter_ips(service_name):
try: _, _, ips = gethostbyname_ex(service_name); return ips
except Exception as e: print(f"Error resolving service IPs: {e}"); return []
# --- NEW: Helper to convert Ref time (UTC) ---
def format_ref_time(timestamp_str):
try:
# Input is like "1761142077.558355643" (Unix timestamp with fractions)
ts = float(timestamp_str)
dt = datetime.utcfromtimestamp(ts)
# Format like: Wed Oct 22 14:08:24 2025 (UTC)
return dt.strftime('%a %b %d %H:%M:%S %Y') + " (UTC)"
except:
return timestamp_str # Return original if conversion fails
# --- NEW: Helper to format floats nicely ---
def format_float(value_str, precision=3):
try:
f_val = float(value_str)
return f"{f_val:.{precision}f}"
except:
return value_str # Return original if not a float
@app.route('/api/time')
def get_server_time():
return jsonify({"time_utc": datetime.utcnow().isoformat() + "Z"})
# --- NEW: Endpoint to just return the raw fragments ---
# The JavaScript will use this to get the latest offset data
@app.route('/api/fragments')
def get_fragments_json():
fragments = []
ips = get_reporter_ips(REPORTER_SERVICE)
for ip in ips:
try:
res = requests.get(f"http://{ip}:9898/fragment.json", timeout=1) # Shorter timeout
if res.status_code == 200: fragments.append(res.json())
except: pass # Ignore errors fetching fragments for this endpoint
fragments.sort(key=lambda x: x.get("node_id", "z"))
return jsonify(fragments)
def format_value(value, max_len=25):
"""Truncates long values for table display."""
if value is None: return "N/A"
s_val = str(value)
if len(s_val) > max_len:
return s_val[:max_len-3] + "..."
return s_val
@app.route('/')
def homepage():
fragments = []
error_msg = "No errors."
meta_offset_ms = "N/A"
meta_leap_status = "Unknown"
ips = get_reporter_ips(REPORTER_SERVICE)
if not ips: error_msg = f"Could not resolve IPs for service '{REPORTER_SERVICE}'."
# 1. Fetch fragments
for ip in ips:
try:
res = requests.get(f"http://{ip}:9898/fragment.json", timeout=2)
if res.status_code == 200: fragments.append(res.json())
else: print(f"Failed fetch from {ip}: Status {res.status_code}")
except Exception as e: print(f"Failed connect to {ip}: {e}"); error_msg = str(e)
fragments.sort(key=lambda x: x.get("node_id", "z"))
nodes_list = [f.get("node_id", "unknown") for f in fragments]
# 2. Generate ASCII Tracking Table with tabulate
tracking_rows = []
for metric in TRACKING_METRICS_ORDER:
row = [metric]
for node_id in nodes_list:
node_data = next((f for f in fragments if f.get("node_id") == node_id), None)
value = "N/A"
if node_data and isinstance(node_data.get("tracking"), dict):
raw_value = node_data["tracking"].get(metric, "N/A")
# Format specific fields for better readability
if metric == "Ref time (UTC)": value = format_ref_time(raw_value)
elif metric in ["System time", "Last offset", "RMS offset", "Residual freq", "Skew", "Root delay", "Root dispersion"]: value = format_float(raw_value, 6) # Higher precision
elif metric == "Frequency": value = format_float(raw_value, 3)
elif metric == "Update interval": value = format_float(raw_value, 1)
else: value = format_value(raw_value) # Use generic formatter
row.append(value)
tracking_rows.append(row)
# Add summary row
tracking_rows.append([form_feed_separator(18)] + [form_feed_separator(24)] * len(nodes_list))
tracking_rows.append([f"TOTAL NODES: {len(nodes_list)}"] + [""] * len(nodes_list))
tracking_table_ascii = tabulate(
tracking_rows,
headers=["Metric"] + nodes_list,
tablefmt="fancy_grid",
stralign="left",
numalign="right"
)
total_offset_seconds = 0.0
valid_offset_count = 0
leap_statuses = set()
# 3. Generate Meta Description Summary (using the first node's data)
if fragments:
for frag in fragments:
tracking = frag.get("tracking", {})
if isinstance(tracking, dict) and "Error" not in tracking:
# Collect Leap Status
leap = tracking.get("Leap status")
if leap:
leap_statuses.add(leap)
# Collect Offset
offset_str = tracking.get("Last offset", 0.1)
try:
offset_seconds = float(offset_str)
total_offset_seconds += offset_seconds
valid_offset_count += 1
except (TypeError, ValueError):
pass # Ignore if offset is missing or not a number
if valid_offset_count > 0:
avg_offset_seconds = total_offset_seconds / valid_offset_count
meta_offset_ms = f"~{(avg_offset_seconds * 1000):.1f}ms" # Use ~ for average
if len(leap_statuses) == 1:
meta_leap_status = leap_statuses.pop()
elif len(leap_statuses) > 1:
meta_leap_status = "Mixed"
# else remains "Unknown" if no valid status found
# 3. Generate ASCII Sources Table with tabulate
if not fragments:
sources_table_ascii = "ERROR: Could not fetch data from any reporter pods."
else:
sources_rows = []
node_source_counts = {}
for f in fragments:
node_id = f.get("node_id", "unknown")
sources = f.get("sources", [])
node_source_counts[node_id] = len(sources) if sources else 0
if not sources:
sources_rows.append([node_id, "N/A", "No sources reported", "N/A", "N/A", "N/A", "N/A", "N/A", "N/A"])
else:
for source in sources:
row = [
format_value(node_id, SOURCES_COL_WIDTHS[0]-2),
f"{source.get('Mode', '?')}{source.get('State', '?')}",
format_value(source.get("Name/IP address", "N/A"), SOURCES_COL_WIDTHS[2]-2),
format_value(source.get("Stratum", "N/A")),
format_value(source.get("Poll", "N/A")),
format_value(source.get("Reach", "N/A")),
format_value(source.get("LastRx", "N/A")),
format_float(source.get("Last sample", "N/A"), 6), # Format sample offset
format_float(source.get("Std Dev", "N/A"), 3) # Format Std Dev/Jitter
]
sources_rows.append(row)
# Add summary section
sources_rows.append([form_feed_separator(SOURCES_COL_WIDTHS[0]-2)] + [form_feed_separator(w-2) for w in SOURCES_COL_WIDTHS[1:]])
total_sources = sum(node_source_counts.values())
summary_text = f"TOTAL SOURCES: {total_sources} | NODES REPORTING: {len(node_source_counts)}"
sources_rows.append([summary_text] + [""] * (len(SOURCES_COLUMNS_ORDER) - 1))
sources_table_ascii = tabulate(
sources_rows,
headers=SOURCES_COLUMNS_ORDER,
tablefmt="fancy_grid",
stralign="left",
numalign="right"
)
gen_time = subprocess.run(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"], capture_output=True, text=True).stdout.strip()
# Generate report header
report_id = str(uuid.uuid4())[:8].upper()
report_header_text = report_header(report_id, gen_time)
return render_template_string(
HTML_TEMPLATE,
gen_time_utc=gen_time,
report_header=report_header_text,
tracking_table_ascii=tracking_table_ascii,
sources_table_ascii=sources_table_ascii,
meta_description=f"DWS NTP Pool: {meta_leap_status}. Avg Offset: {meta_offset_ms}.",
error=error_msg
)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080)