import os import requests import subprocess from flask import Flask, render_template_string, jsonify, request, send_from_directory from socket import gethostbyname_ex from datetime import datetime app = Flask(__name__) # Config REPORTER_SERVICE = os.environ.get("REPORTER_SERVICE", "ntp-reporter-svc.default.svc.cluster.local") BASE_URL = os.environ.get("BASE_URL", "https://time.dws.rip") # Tracking table config TRACKING_METRICS_ORDER = [ "Reference ID", "Ref Source IP", "Stratum", "Ref time (UTC)", "System time", "Last offset", "RMS offset", "Frequency", "Residual freq", "Skew", "Root delay", "Root dispersion", "Update interval", "Leap status" ] # Sources table config SOURCES_COLUMNS_ORDER = [ "DWS PEER", "ModeState", "Name/IP address", "Stratum", "Poll", "Reach", "LastRx", "Last sample", "Std Dev" ] # Metric Definitions TRACKING_METRICS_DEFS = { "Reference ID": "Identifier of current time source (IP or refclock ID)", "Ref Source IP": "IP address of the reference time source", "Stratum": "Distance from primary time source (lower is better, 1-16)", "Ref time (UTC)": "Last time the reference was updated", "System time": "Offset between system clock and reference time (seconds)", "Last offset": "Offset of last clock update (seconds)", "RMS offset": "Root mean square of recent offset values (long-term average)", "Frequency": "Rate of system clock drift (ppm - parts per million)", "Residual freq": "Residual frequency error not yet corrected", "Skew": "Estimated error bound of frequency (accuracy metric)", "Root delay": "Total network delay to stratum-1 server (seconds)", "Root dispersion": "Total dispersion accumulated to stratum-1 server", "Update interval": "Time between clock updates (seconds)", "Leap status": "Leap second indicator (Normal, Insert, Delete, or Not synced)" } SOURCES_METRICS_DEFS = { "DWS PEER": "Node identifier for this NTP daemon instance", "ModeState": "Source mode (^=server, ==peer) & state (*=current sync)", "Name/IP address": "Hostname or IP address of the NTP source", "Stratum": "Stratum level of the source (1=primary reference)", "Poll": "Polling interval to source (log2 seconds, e.g., 6 = 64s)", "Reach": "Reachability register (377 octal = all 8 recent polls OK)", "LastRx": "Time since last successful response from source", "Last sample": "Offset measurement from last valid sample (seconds)", "Std Dev": "Standard deviation of offset (jitter measurement)" } # # HTML Template - DWS Design System Compliant # HTML_TEMPLATE = """ DWS LLC NTP STATUS
CATHODE

DWS LLC NTP STATUS

GENERATED: {{ gen_time_utc }}
Section 1: Current Time Synchronization
TIME: --:--:--
DATE: ----------
STATUS: Syncing...
CLOCK OFFSET: ---
Section 2: Node Tracking Status Metrics
{{ tracking_table_html | safe }}
Section 3: Upstream NTP Sources
{{ sources_table_html | safe }}
Section 4: Metric Definitions & Developer Information

Tracking Metrics Glossary

{{ tracking_glossary_html | safe }}

Sources Metrics Glossary

{{ sources_glossary_html | safe }}

Usage Information

Use DWS as your NTP pool by setting time.dws.rip as your NTP source.

DWS LLC // "IT'S YOUR INTERNET, TAKE IT BACK" // https://dws.rip
DWS LLC // UNITED STATES OF AMERICA // 2025
""" def get_reporter_ips(service_name): try: _, _, ips = gethostbyname_ex(service_name); return ips except Exception as e: print(f"Error resolving service IPs: {e}"); return [] # --- NEW: Helper to convert Ref time (UTC) --- def format_ref_time(timestamp_str): try: ts = float(timestamp_str) dt = datetime.utcfromtimestamp(ts) return dt.strftime('%a %b %d %H:%M:%S %Y') + " (UTC)" except: return timestamp_str # --- NEW: Helper to format floats nicely --- def format_float(value_str, precision=3): try: f_val = float(value_str) return f"{f_val:.{precision}f}" except: return value_str # --- Flask Static Files Route --- @app.route('/static/fonts/') def serve_font(filename): """Serve font files from the fonts directory.""" return send_from_directory('fonts', filename) @app.route('/api/time') def get_server_time(): return jsonify({"time_utc": datetime.utcnow().isoformat() + "Z"}) @app.route('/api/fragments') def get_fragments_json(): fragments = [] ips = get_reporter_ips(REPORTER_SERVICE) for ip in ips: try: res = requests.get(f"http://{ip}:9898/fragment.json", timeout=1) if res.status_code == 200: fragments.append(res.json()) except: pass fragments.sort(key=lambda x: x.get("node_id", "z")) return jsonify(fragments) def format_value(value, max_len=25): """Truncates long values for table display.""" if value is None: return "N/A" s_val = str(value) if len(s_val) > max_len: return s_val[:max_len-3] + "..." return s_val def format_glossary(metrics_defs): """Format metric definitions as terminal-style glossary.""" lines = [] for metric, definition in metrics_defs.items(): lines.append(f" {metric.ljust(20)} - {definition}") return "\n".join(lines) def format_glossary_html(metrics_defs): """Format metric definitions as HTML definition list.""" html_parts = [] for metric, definition in metrics_defs.items(): html_parts.append(f"
{metric}
") html_parts.append(f"
{definition}
") return "\n".join(html_parts) def generate_tracking_table_html(fragments, nodes_list): """Generate HTML table for tracking metrics.""" if not fragments: return '

ERROR: Could not fetch data from any reporter pods.

' html = [''] # Table header html.append('') html.append('') for node_id in nodes_list: html.append(f'') html.append('') # Table body html.append('') for metric in TRACKING_METRICS_ORDER: html.append('') html.append(f'') for node_id in nodes_list: node_data = next((f for f in fragments if f.get("node_id") == node_id), None) value = "N/A" if node_data and isinstance(node_data.get("tracking"), dict): raw_value = node_data["tracking"].get(metric, "N/A") if metric == "Ref time (UTC)": value = format_ref_time(raw_value) elif metric in ["System time", "Last offset", "RMS offset", "Residual freq", "Skew", "Root delay", "Root dispersion"]: value = format_float(raw_value, 6) elif metric == "Frequency": value = format_float(raw_value, 3) elif metric == "Update interval": value = format_float(raw_value, 1) else: value = format_value(raw_value) html.append(f'') html.append('') html.append('') # Table footer with summary html.append('') html.append('') html.append(f'') html.append('') html.append('') html.append('
Metric{node_id}
{metric}{value}
') html.append(f'TOTAL NODES: {len(nodes_list)}') html.append('
') return '\n'.join(html) def generate_sources_table_html(fragments): """Generate HTML table for NTP sources.""" if not fragments: return '

ERROR: Could not fetch data from any reporter pods.

' html = [''] # Table header html.append('') for col in SOURCES_COLUMNS_ORDER: if col in ["Stratum", "Poll", "Reach", "LastRx", "Last sample", "Std Dev"]: html.append(f'') else: html.append(f'') html.append('') # Table body html.append('') node_source_counts = {} for f in fragments: node_id = f.get("node_id", "unknown") sources = f.get("sources", []) node_source_counts[node_id] = len(sources) if sources else 0 if not sources: html.append('') html.append(f'') html.append('') html.append('') html.append('') else: for source in sources: html.append('') html.append(f'') html.append(f'') html.append(f'') html.append(f'') html.append(f'') html.append(f'') html.append(f'') html.append(f'') html.append(f'') html.append('') html.append('') # Table footer with summary total_sources = sum(node_source_counts.values()) html.append('') html.append('') html.append(f'') html.append('') html.append('') html.append('
{col}{col}
{node_id}N/ANo sources reported
{format_value(node_id, 24)}{source.get("Mode", "?")}{source.get("State", "?")}{format_value(source.get("Name/IP address", "N/A"), 32)}{format_value(source.get("Stratum", "N/A"))}{format_value(source.get("Poll", "N/A"))}{format_value(source.get("Reach", "N/A"))}{format_value(source.get("LastRx", "N/A"))}{format_float(source.get("Last sample", "N/A"), 6)}{format_float(source.get("Std Dev", "N/A"), 3)}
') html.append(f'TOTAL SOURCES: {total_sources} | ') html.append(f'NODES REPORTING: {len(node_source_counts)}') html.append('
') return '\n'.join(html) def render_report(fragments, gen_time, is_historical=False): """Render NTP report from fragments data.""" meta_offset_ms = "N/A" meta_leap_status = "Unknown" nodes_list = [f.get("node_id", "unknown") for f in fragments] # Calculate metadata for meta description total_offset_seconds = 0.0 valid_offset_count = 0 leap_statuses = set() if fragments: for frag in fragments: tracking = frag.get("tracking", {}) if isinstance(tracking, dict) and "Error" not in tracking: leap = tracking.get("Leap status") if leap: leap_statuses.add(leap) offset_str = tracking.get("Last offset", 0.1) try: offset_seconds = float(offset_str) total_offset_seconds += offset_seconds valid_offset_count += 1 except (TypeError, ValueError): pass if valid_offset_count > 0: avg_offset_seconds = total_offset_seconds / valid_offset_count meta_offset_ms = f"~{(avg_offset_seconds * 1000):.1f}ms" if len(leap_statuses) == 1: meta_leap_status = leap_statuses.pop() elif len(leap_statuses) > 1: meta_leap_status = "Mixed" # Generate HTML tables tracking_table_html = generate_tracking_table_html(fragments, nodes_list) sources_table_html = generate_sources_table_html(fragments) # Generate HTML glossaries tracking_glossary_html = format_glossary_html(TRACKING_METRICS_DEFS) sources_glossary_html = format_glossary_html(SOURCES_METRICS_DEFS) return render_template_string( HTML_TEMPLATE, gen_time_utc=gen_time, tracking_table_html=tracking_table_html, sources_table_html=sources_table_html, tracking_glossary_html=tracking_glossary_html, sources_glossary_html=sources_glossary_html, is_historical=is_historical, meta_description=f"DWS NTP Pool: {meta_leap_status}. Avg Offset: {meta_offset_ms}." ) @app.route('/') def homepage(): """Live NTP status - fetches current data from all nodes.""" fragments = [] ips = get_reporter_ips(REPORTER_SERVICE) for ip in ips: try: res = requests.get(f"http://{ip}:9898/fragment.json", timeout=2) if res.status_code == 200: fragments.append(res.json()) else: print(f"Failed fetch from {ip}: Status {res.status_code}") except Exception as e: print(f"Failed connect to {ip}: {e}") fragments.sort(key=lambda x: x.get("node_id", "z")) gen_time = subprocess.run(["date", "-u", "+%Y-%m-%dT%H:%M:%SZ"], capture_output=True, text=True).stdout.strip() return render_report(fragments, gen_time) if __name__ == '__main__': app.run(host='0.0.0.0', port=8080)