import os import requests import subprocess import json import zlib import base64 import qrcode from io import StringIO from flask import Flask, render_template_string, jsonify, request from socket import gethostbyname_ex from datetime import datetime from tabulate import tabulate # For ASCII tables import time # For timestamp conversion import uuid # For report IDs app = Flask(__name__) # Config REPORTER_SERVICE = os.environ.get("REPORTER_SERVICE", "ntp-reporter-svc.default.svc.cluster.local") BASE_URL = os.environ.get("BASE_URL", "https://time.dws.rip") # Tracking table config TRACKING_METRICS_ORDER = [ "Reference ID", "Ref Source IP", "Stratum", "Ref time (UTC)", "System time", "Last offset", "RMS offset", "Frequency", "Residual freq", "Skew", "Root delay", "Root dispersion", "Update interval", "Leap status" ] # Define max widths for tracking table columns (approximate) TRACKING_COL_WIDTHS = [18] + [24] * 3 # Metric Name + 3 Nodes # Sources table config SOURCES_COLUMNS_ORDER = [ "DWS PEER", "ModeState", "Name/IP address", "Stratum", "Poll", "Reach", "LastRx", "Last sample", "Std Dev" ] # Define widths for sources table columns SOURCES_COL_WIDTHS = [24, 10, 32, 10, 7, 7, 8, 15, 10] # Metric Definitions TRACKING_METRICS_DEFS = { "Reference ID": "Identifier of current time source (IP or refclock ID)", "Ref Source IP": "IP address of the reference time source", "Stratum": "Distance from primary time source (lower is better, 1-16)", "Ref time (UTC)": "Last time the reference was updated", "System time": "Offset between system clock and reference time (seconds)", "Last offset": "Offset of last clock update (seconds)", "RMS offset": "Root mean square of recent offset values (long-term average)", "Frequency": "Rate of system clock drift (ppm - parts per million)", "Residual freq": "Residual frequency error not yet corrected", "Skew": "Estimated error bound of frequency (accuracy metric)", "Root delay": "Total network delay to stratum-1 server (seconds)", "Root dispersion": "Total dispersion accumulated to stratum-1 server", "Update interval": "Time between clock updates (seconds)", "Leap status": "Leap second indicator (Normal, Insert, Delete, or Not synced)" } SOURCES_METRICS_DEFS = { "DWS PEER": "Node identifier for this NTP daemon instance", "ModeState": "Source mode (^=server, ==peer) & state (*=current sync)", "Name/IP address": "Hostname or IP address of the NTP source", "Stratum": "Stratum level of the source (1=primary reference)", "Poll": "Polling interval to source (log2 seconds, e.g., 6 = 64s)", "Reach": "Reachability register (377 octal = all 8 recent polls OK)", "LastRx": "Time since last successful response from source", "Last sample": "Offset measurement from last valid sample (seconds)", "Std Dev": "Standard deviation of offset (jitter measurement)" } # # HTML Template - Radically simplified for TUI output # HTML_TEMPLATE = """
{{ report_header }}
SECTION 1: CURRENT TIME SYNCHRONIZATION
════════════════════════════════════════════════════════════════════════════════
TIME: --:--:--
DATE: ----------
STATUS: Syncing...
CLOCK OFFSET: ---
════════════════════════════════════════════════════════════════════════════════
SECTION 2: NODE TRACKING STATUS METRICS
════════════════════════════════════════════════════════════════════════════════
{{ tracking_table_ascii }}
════════════════════════════════════════════════════════════════════════════════
SECTION 3: UPSTREAM NTP SOURCES
════════════════════════════════════════════════════════════════════════════════
{{ sources_table_ascii }}
════════════════════════════════════════════════════════════════════════════════
SECTION 4: METRIC DEFINITIONS & DEVELOPER INFORMATION
════════════════════════════════════════════════════════════════════════════════
TRACKING METRICS GLOSSARY:
{{ tracking_glossary }}
SOURCES METRICS GLOSSARY:
{{ sources_glossary }}
USAGE INFORMATION:
USE DWS AS YOUR NTP POOL BY SETTING time.dws.rip AS YOUR NTP SOURCE
DWS LLC // "IT'S YOUR INTERNET, TAKE IT BACK" // https://dws.rip
DWS LLC // UNITED STATES OF AMERICA // 2025
REPORT SNAPSHOT:
Scan the QR code below to access this exact report state:
{{ report_qr_code }}
════════════════════════════════════════════════════════════════════════════════
REPORT GENERATION COMPLETE {{ gen_time_utc }}
END OF REPORT
════════════════════════════════════════════════════════════════════════════════
"""
# --- US Graphics Company Style Helpers ---
def form_feed_separator(width=80):
"""Generate a form feed separator line like vintage computer printouts."""
return "═" * width
def report_header(report_id, timestamp):
"""Generate a vintage-style report header."""
header = []
header.append(form_feed_separator(80))
header.append(f"GENERATED: {timestamp}".center(80))
header.append(form_feed_separator(80))
return "\n".join(header)
def section_separator(title, width=80):
"""Generate a section separator with title."""
return f"\n{title}\n{form_feed_separator(width)}\n"
def get_reporter_ips(service_name):
try: _, _, ips = gethostbyname_ex(service_name); return ips
except Exception as e: print(f"Error resolving service IPs: {e}"); return []
# --- NEW: Helper to convert Ref time (UTC) ---
def format_ref_time(timestamp_str):
try:
# Input is like "1761142077.558355643" (Unix timestamp with fractions)
ts = float(timestamp_str)
dt = datetime.utcfromtimestamp(ts)
# Format like: Wed Oct 22 14:08:24 2025 (UTC)
return dt.strftime('%a %b %d %H:%M:%S %Y') + " (UTC)"
except:
return timestamp_str # Return original if conversion fails
# --- NEW: Helper to format floats nicely ---
def format_float(value_str, precision=3):
try:
f_val = float(value_str)
return f"{f_val:.{precision}f}"
except:
return value_str # Return original if not a float
# --- Report ID Encoding/Decoding ---
def encode_state(fragments):
"""Encode NTP state into a URL-safe base64 compressed report ID."""
try:
# Create minimal state representation
state = {
"timestamp": datetime.utcnow().isoformat() + "Z",
"fragments": fragments
}
# Convert to JSON, compress with zlib, then base64 encode
json_str = json.dumps(state, separators=(',', ':')) # Compact JSON
compressed = zlib.compress(json_str.encode('utf-8'), level=9)
encoded = base64.urlsafe_b64encode(compressed).decode('ascii')
return encoded
except Exception as e:
print(f"Error encoding state: {e}")
return None
def decode_state(report_id):
"""Decode a report ID back into NTP state fragments."""
try:
# Base64 decode, decompress, parse JSON
compressed = base64.urlsafe_b64decode(report_id.encode('ascii'))
json_str = zlib.decompress(compressed).decode('utf-8')
state = json.loads(json_str)
return state.get("fragments", []), state.get("timestamp")
except Exception as e:
print(f"Error decoding state: {e}")
return None, None
def generate_qr_code_ascii(url):
"""Generate ASCII QR code for the given URL."""
try:
qr = qrcode.QRCode(
version=1, # Small version, will auto-increase if needed
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=1,
border=2,
)
qr.add_data(url)
qr.make(fit=True)
# Get the QR code matrix
matrix = qr.get_matrix()
# Convert to ASCII art using block characters
ascii_qr = []
for row in matrix:
line = ""
for cell in row:
# Use full block for black, space for white
line += "██" if cell else " "
ascii_qr.append(line)
return "\n".join(ascii_qr)
except Exception as e:
print(f"Error generating QR code: {e}")
return f"[QR Code generation failed: {e}]"
@app.route('/api/time')
def get_server_time():
return jsonify({"time_utc": datetime.utcnow().isoformat() + "Z"})
# --- NEW: Endpoint to just return the raw fragments ---
# The JavaScript will use this to get the latest offset data
@app.route('/api/fragments')
def get_fragments_json():
fragments = []
ips = get_reporter_ips(REPORTER_SERVICE)
for ip in ips:
try:
res = requests.get(f"http://{ip}:9898/fragment.json", timeout=1) # Shorter timeout
if res.status_code == 200: fragments.append(res.json())
except: pass # Ignore errors fetching fragments for this endpoint
fragments.sort(key=lambda x: x.get("node_id", "z"))
return jsonify(fragments)
def format_value(value, max_len=25):
"""Truncates long values for table display."""
if value is None: return "N/A"
s_val = str(value)
if len(s_val) > max_len:
return s_val[:max_len-3] + "..."
return s_val
def format_glossary(metrics_defs):
"""Format metric definitions as terminal-style glossary."""
lines = []
for metric, definition in metrics_defs.items():
lines.append(f" {metric.ljust(20)} - {definition}")
return "\n".join(lines)
def render_report(fragments, report_id, gen_time, is_historical=False):
"""Render NTP report from fragments data."""
error_msg = "No errors."
meta_offset_ms = "N/A"
meta_leap_status = "Unknown"
nodes_list = [f.get("node_id", "unknown") for f in fragments]
# Generate QR code for the report URL
report_url = f"{BASE_URL}/report/{report_id}"
report_qr_code = generate_qr_code_ascii(report_url)
# Generate ASCII Tracking Table with tabulate
tracking_rows = []
for metric in TRACKING_METRICS_ORDER:
row = [metric]
for node_id in nodes_list:
node_data = next((f for f in fragments if f.get("node_id") == node_id), None)
value = "N/A"
if node_data and isinstance(node_data.get("tracking"), dict):
raw_value = node_data["tracking"].get(metric, "N/A")
# Format specific fields for better readability
if metric == "Ref time (UTC)": value = format_ref_time(raw_value)
elif metric in ["System time", "Last offset", "RMS offset", "Residual freq", "Skew", "Root delay", "Root dispersion"]: value = format_float(raw_value, 6) # Higher precision
elif metric == "Frequency": value = format_float(raw_value, 3)
elif metric == "Update interval": value = format_float(raw_value, 1)
else: value = format_value(raw_value) # Use generic formatter
row.append(value)
tracking_rows.append(row)
# Add summary row
tracking_rows.append([form_feed_separator(18)] + [form_feed_separator(24)] * len(nodes_list))
tracking_rows.append([f"TOTAL NODES: {len(nodes_list)}"] + [""] * len(nodes_list))
tracking_table_ascii = tabulate(
tracking_rows,
headers=["Metric"] + nodes_list,
tablefmt="fancy_grid",
stralign="left",
numalign="right"
)
total_offset_seconds = 0.0
valid_offset_count = 0
leap_statuses = set()
# Generate Meta Description Summary
if fragments:
for frag in fragments:
tracking = frag.get("tracking", {})
if isinstance(tracking, dict) and "Error" not in tracking:
# Collect Leap Status
leap = tracking.get("Leap status")
if leap:
leap_statuses.add(leap)
# Collect Offset
offset_str = tracking.get("Last offset", 0.1)
try:
offset_seconds = float(offset_str)
total_offset_seconds += offset_seconds
valid_offset_count += 1
except (TypeError, ValueError):
pass # Ignore if offset is missing or not a number
if valid_offset_count > 0:
avg_offset_seconds = total_offset_seconds / valid_offset_count
meta_offset_ms = f"~{(avg_offset_seconds * 1000):.1f}ms" # Use ~ for average
if len(leap_statuses) == 1:
meta_leap_status = leap_statuses.pop()
elif len(leap_statuses) > 1:
meta_leap_status = "Mixed"
# else remains "Unknown" if no valid status found
# Generate ASCII Sources Table with tabulate
if not fragments:
sources_table_ascii = "ERROR: Could not fetch data from any reporter pods."
else:
sources_rows = []
node_source_counts = {}
for f in fragments:
node_id = f.get("node_id", "unknown")
sources = f.get("sources", [])
node_source_counts[node_id] = len(sources) if sources else 0
if not sources:
sources_rows.append([node_id, "N/A", "No sources reported", "N/A", "N/A", "N/A", "N/A", "N/A", "N/A"])
else:
for source in sources:
row = [
format_value(node_id, SOURCES_COL_WIDTHS[0]-2),
f"{source.get('Mode', '?')}{source.get('State', '?')}",
format_value(source.get("Name/IP address", "N/A"), SOURCES_COL_WIDTHS[2]-2),
format_value(source.get("Stratum", "N/A")),
format_value(source.get("Poll", "N/A")),
format_value(source.get("Reach", "N/A")),
format_value(source.get("LastRx", "N/A")),
format_float(source.get("Last sample", "N/A"), 6), # Format sample offset
format_float(source.get("Std Dev", "N/A"), 3) # Format Std Dev/Jitter
]
sources_rows.append(row)
# Add summary section
sources_rows.append([form_feed_separator(SOURCES_COL_WIDTHS[0]-2)] + [form_feed_separator(w-2) for w in SOURCES_COL_WIDTHS[1:]])
total_sources = sum(node_source_counts.values())
summary_text = f"TOTAL SOURCES: {total_sources} | NODES REPORTING: {len(node_source_counts)}"
sources_rows.append([summary_text] + [""] * (len(SOURCES_COLUMNS_ORDER) - 1))
sources_table_ascii = tabulate(
sources_rows,
headers=SOURCES_COLUMNS_ORDER,
tablefmt="fancy_grid",
stralign="left",
numalign="right"
)
# Generate report header
report_header_text = report_header(report_id, gen_time)
# Format glossaries
tracking_glossary = format_glossary(TRACKING_METRICS_DEFS)
sources_glossary = format_glossary(SOURCES_METRICS_DEFS)
return render_template_string(
HTML_TEMPLATE,
gen_time_utc=gen_time,
report_header=report_header_text,
tracking_table_ascii=tracking_table_ascii,
sources_table_ascii=sources_table_ascii,
tracking_glossary=tracking_glossary,
sources_glossary=sources_glossary,
report_qr_code=report_qr_code,
is_historical=is_historical,
meta_description=f"DWS NTP Pool: {meta_leap_status}. Avg Offset: {meta_offset_ms}.",
error=error_msg
)
@app.route('/')
def homepage():
"""Live NTP status - fetches current data from all nodes."""
fragments = []
ips = get_reporter_ips(REPORTER_SERVICE)
# Fetch fragments from all reporter pods
for ip in ips:
try:
res = requests.get(f"http://{ip}:9898/fragment.json", timeout=2)
if res.status_code == 200:
fragments.append(res.json())
else:
print(f"Failed fetch from {ip}: Status {res.status_code}")
except Exception as e:
print(f"Failed connect to {ip}: {e}")
fragments.sort(key=lambda x: x.get("node_id", "z"))
# Generate timestamp and report ID
gen_time = subprocess.run(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"], capture_output=True, text=True).stdout.strip()
# Create encoded report ID from current state
report_id = encode_state(fragments)
if not report_id:
report_id = str(uuid.uuid4())[:8].upper() # Fallback to random ID
return render_report(fragments, report_id, gen_time)
@app.route('/report/ERROR: INVALID REPORT ID The report ID you provided could not be decoded. Please check the URL and try again. Return to live status"""), 400 # Use the timestamp from the encoded state, or fallback to "HISTORICAL" gen_time = timestamp if timestamp else "HISTORICAL" return render_report(fragments, report_id, gen_time, is_historical=True) if __name__ == '__main__': app.run(host='0.0.0.0', port=8080)