Spaces:
Running
Running
Upload 3 files
Browse files- Quasar_axrvi_ranker.py +4 -1
- hub_dashboard_service.py +170 -198
- ranker_logs_api.py +54 -0
Quasar_axrvi_ranker.py
CHANGED
|
@@ -4613,7 +4613,10 @@ class QuasarAXRVIBridge:
|
|
| 4613 |
self.log_bridge: Optional[object] = None
|
| 4614 |
if self.enable_logging:
|
| 4615 |
try:
|
| 4616 |
-
|
|
|
|
|
|
|
|
|
|
| 4617 |
self.log_bridge = RankerLogBridge(self.ranker_logger)
|
| 4618 |
except Exception as e:
|
| 4619 |
logger.warning(f"Structured logging unavailable: {e} β continuing without")
|
|
|
|
| 4613 |
self.log_bridge: Optional[object] = None
|
| 4614 |
if self.enable_logging:
|
| 4615 |
try:
|
| 4616 |
+
# FIX: use RANKER_LOG_DIR env var so the dashboard service can
|
| 4617 |
+
# always locate the log files via the same variable.
|
| 4618 |
+
_rl_log_dir = os.environ.get("RANKER_LOG_DIR", "./ranker_logs")
|
| 4619 |
+
self.ranker_logger = RankerLogger(log_dir=_rl_log_dir)
|
| 4620 |
self.log_bridge = RankerLogBridge(self.ranker_logger)
|
| 4621 |
except Exception as e:
|
| 4622 |
logger.warning(f"Structured logging unavailable: {e} β continuing without")
|
hub_dashboard_service.py
CHANGED
|
@@ -30,6 +30,15 @@ import websocket
|
|
| 30 |
from flask import Flask, jsonify, request, send_from_directory
|
| 31 |
from flask_cors import CORS
|
| 32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
# ββ Logging βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 34 |
logging.basicConfig(
|
| 35 |
level=logging.INFO,
|
|
@@ -405,8 +414,158 @@ class TradeLogParser:
|
|
| 405 |
|
| 406 |
|
| 407 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 408 |
-
# SECTION
|
| 409 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 410 |
|
| 411 |
from dataclasses import dataclass, field
|
| 412 |
|
|
@@ -542,6 +701,15 @@ _hub_subscriber.start()
|
|
| 542 |
app = Flask(__name__)
|
| 543 |
CORS(app)
|
| 544 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 545 |
|
| 546 |
@app.route("/")
|
| 547 |
def index():
|
|
@@ -592,206 +760,10 @@ def api_trades_closed():
|
|
| 592 |
})
|
| 593 |
|
| 594 |
|
| 595 |
-
@app.route("/api/ranker/logs/recent")
|
| 596 |
-
def api_logs_recent():
|
| 597 |
-
"""Get recent ranker logs β reads directly from log files.
|
| 598 |
-
v2.3 FIX: wrapped in try/except so internal errors return JSON (not HTML 500).
|
| 599 |
-
Also scans multiple candidate directories so relative vs absolute paths both work.
|
| 600 |
-
"""
|
| 601 |
-
try:
|
| 602 |
-
limit = int(request.args.get("limit", 50))
|
| 603 |
-
category = request.args.get("category") # optional filter
|
| 604 |
-
|
| 605 |
-
logs = []
|
| 606 |
-
|
| 607 |
-
# ββ Find log files β try several candidate directories ββββββββββββββββ
|
| 608 |
-
# The ranker may write to ./ranker_logs/ (relative to its cwd) which
|
| 609 |
-
# resolves differently from the service's _LOG_DIR. We try all candidates
|
| 610 |
-
# and use the first one that contains *.log* files.
|
| 611 |
-
candidate_dirs = [
|
| 612 |
-
_LOG_DIR, # /app/ranker_logs
|
| 613 |
-
str(Path(__file__).parent / "ranker_logs"), # next to this file
|
| 614 |
-
"./ranker_logs", # cwd-relative
|
| 615 |
-
str(Path.home() / "ranker_logs"), # home dir
|
| 616 |
-
]
|
| 617 |
-
files = []
|
| 618 |
-
found_dir = None
|
| 619 |
-
for cdir in candidate_dirs:
|
| 620 |
-
pattern = str(Path(cdir) / "*.log*")
|
| 621 |
-
found = sorted(glob.glob(pattern))
|
| 622 |
-
if found:
|
| 623 |
-
files = found
|
| 624 |
-
found_dir = cdir
|
| 625 |
-
logger.debug(f"[api_logs_recent] found {len(files)} log file(s) in {cdir}")
|
| 626 |
-
break
|
| 627 |
-
|
| 628 |
-
if not files:
|
| 629 |
-
searched = ", ".join(candidate_dirs)
|
| 630 |
-
return jsonify({
|
| 631 |
-
"logs": [],
|
| 632 |
-
"count": 0,
|
| 633 |
-
"error": f"No *.log* files found. Searched: {searched}",
|
| 634 |
-
})
|
| 635 |
-
|
| 636 |
-
# Read enough lines from the tail of the 3 newest files
|
| 637 |
-
raw_lines = []
|
| 638 |
-
for fpath in files[-3:]:
|
| 639 |
-
try:
|
| 640 |
-
with open(fpath, "r", encoding="utf-8", errors="replace") as f:
|
| 641 |
-
lines = f.readlines()
|
| 642 |
-
raw_lines.extend(lines[-200:])
|
| 643 |
-
except OSError:
|
| 644 |
-
pass
|
| 645 |
-
|
| 646 |
-
# Newest first
|
| 647 |
-
raw_lines.reverse()
|
| 648 |
-
|
| 649 |
-
# Pre-compiled patterns
|
| 650 |
-
_CAT_RE = re.compile(r'\|\s*(INFO|DEBUG|WARNING|ERROR|CRITICAL)\s*\|\s*([A-Z_]+)\s*\|')
|
| 651 |
-
_TRAINING_RE = re.compile(
|
| 652 |
-
r'step=(\d+)\s*\|\s*loss=([\d.]+)\s*\|\s*lr=([\d.eE+\-]+)\s*\|\s*assets=(\d+)'
|
| 653 |
-
)
|
| 654 |
-
_JSON_RE = re.compile(r'(\{.*\})\s*$')
|
| 655 |
-
_ASSET_RE = re.compile(r'\|\s*TRADE\s*\|\s*(\w+)\s*\|')
|
| 656 |
-
|
| 657 |
-
for line in raw_lines[:limit]:
|
| 658 |
-
if not line.strip():
|
| 659 |
-
continue
|
| 660 |
-
ts_match = re.search(r'\[(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})\]', line)
|
| 661 |
-
if not ts_match:
|
| 662 |
-
continue
|
| 663 |
-
|
| 664 |
-
ts = ts_match.group(1)
|
| 665 |
-
|
| 666 |
-
# Extract level + category from "[ts] | LEVEL | CATEGORY | ..."
|
| 667 |
-
cat_m = _CAT_RE.search(line)
|
| 668 |
-
level = cat_m.group(1) if cat_m else "INFO"
|
| 669 |
-
category_field = cat_m.group(2).strip() if cat_m else ""
|
| 670 |
-
|
| 671 |
-
if category and category.upper() not in line.upper():
|
| 672 |
-
continue
|
| 673 |
-
|
| 674 |
-
# Try to extract asset name (present for SIGNAL / TRADE lines)
|
| 675 |
-
asset_m = _ASSET_RE.search(line)
|
| 676 |
-
asset = asset_m.group(1) if asset_m else None
|
| 677 |
-
|
| 678 |
-
entry = {
|
| 679 |
-
"timestamp": ts,
|
| 680 |
-
"level": level,
|
| 681 |
-
"category": category_field,
|
| 682 |
-
"message": line.strip(),
|
| 683 |
-
"asset": asset,
|
| 684 |
-
"data": None,
|
| 685 |
-
}
|
| 686 |
-
|
| 687 |
-
if category_field == "TRAINING":
|
| 688 |
-
tm = _TRAINING_RE.search(line)
|
| 689 |
-
if tm:
|
| 690 |
-
entry["data"] = {
|
| 691 |
-
"step": int(tm.group(1)),
|
| 692 |
-
"loss": float(tm.group(2)),
|
| 693 |
-
"lr": float(tm.group(3)),
|
| 694 |
-
"asset_count": int(tm.group(4)),
|
| 695 |
-
}
|
| 696 |
-
else:
|
| 697 |
-
# Fallback: JSON blob at end of line
|
| 698 |
-
jm = _JSON_RE.search(line)
|
| 699 |
-
if jm:
|
| 700 |
-
try:
|
| 701 |
-
blob = json.loads(jm.group(1))
|
| 702 |
-
# Normalise key names so JS always sees step/loss/lr/asset_count
|
| 703 |
-
if "step" in blob:
|
| 704 |
-
entry["data"] = {
|
| 705 |
-
"step": blob.get("step", 0),
|
| 706 |
-
"loss": blob.get("loss", 0.0),
|
| 707 |
-
"lr": blob.get("lr", 0.0),
|
| 708 |
-
"asset_count": blob.get("asset_count", blob.get("assets", 0)),
|
| 709 |
-
}
|
| 710 |
-
except (ValueError, KeyError):
|
| 711 |
-
pass
|
| 712 |
-
|
| 713 |
-
logs.append(entry)
|
| 714 |
-
|
| 715 |
-
return jsonify({
|
| 716 |
-
"logs": logs,
|
| 717 |
-
"count": len(logs),
|
| 718 |
-
"log_dir": found_dir,
|
| 719 |
-
})
|
| 720 |
-
|
| 721 |
-
except Exception as exc:
|
| 722 |
-
logger.exception(f"[api_logs_recent] unhandled error: {exc}")
|
| 723 |
-
return jsonify({"logs": [], "count": 0, "error": str(exc)}), 200
|
| 724 |
-
|
| 725 |
-
|
| 726 |
-
@app.route("/api/ranker/logs/stats")
|
| 727 |
-
def api_logs_stats():
|
| 728 |
-
"""Return aggregate stats parsed from log files.
|
| 729 |
-
Previously missing endpoint β dashboard Logs tab called this and got 404.
|
| 730 |
-
"""
|
| 731 |
-
try:
|
| 732 |
-
by_category: dict = {}
|
| 733 |
-
by_level: dict = {}
|
| 734 |
-
by_asset: dict = {}
|
| 735 |
-
errors: dict = {}
|
| 736 |
-
total = 0
|
| 737 |
-
|
| 738 |
-
candidate_dirs = [
|
| 739 |
-
_LOG_DIR,
|
| 740 |
-
str(Path(__file__).parent / "ranker_logs"),
|
| 741 |
-
"./ranker_logs",
|
| 742 |
-
str(Path.home() / "ranker_logs"),
|
| 743 |
-
]
|
| 744 |
-
files = []
|
| 745 |
-
for cdir in candidate_dirs:
|
| 746 |
-
found = sorted(glob.glob(str(Path(cdir) / "*.log*")))
|
| 747 |
-
if found:
|
| 748 |
-
files = found
|
| 749 |
-
break
|
| 750 |
-
|
| 751 |
-
_CAT_RE = re.compile(r'\|\s*(INFO|DEBUG|WARNING|ERROR|CRITICAL)\s*\|\s*([A-Z_]+)\s*\|')
|
| 752 |
-
_ASSET_RE = re.compile(r'\|\s*TRADE\s*\|\s*(\w+)\s*\|')
|
| 753 |
-
|
| 754 |
-
for fpath in files[-3:]:
|
| 755 |
-
try:
|
| 756 |
-
with open(fpath, "r", encoding="utf-8", errors="replace") as f:
|
| 757 |
-
for line in f:
|
| 758 |
-
if not line.strip():
|
| 759 |
-
continue
|
| 760 |
-
m = _CAT_RE.search(line)
|
| 761 |
-
if not m:
|
| 762 |
-
continue
|
| 763 |
-
level = m.group(1)
|
| 764 |
-
cat = m.group(2).strip()
|
| 765 |
-
by_level[level] = by_level.get(level, 0) + 1
|
| 766 |
-
by_category[cat] = by_category.get(cat, 0) + 1
|
| 767 |
-
total += 1
|
| 768 |
-
if level in ("ERROR", "CRITICAL"):
|
| 769 |
-
errors[cat] = errors.get(cat, 0) + 1
|
| 770 |
-
am = _ASSET_RE.search(line)
|
| 771 |
-
if am:
|
| 772 |
-
a = am.group(1)
|
| 773 |
-
by_asset[a] = by_asset.get(a, 0) + 1
|
| 774 |
-
except OSError:
|
| 775 |
-
pass
|
| 776 |
-
|
| 777 |
-
return jsonify({
|
| 778 |
-
"total_events": total,
|
| 779 |
-
"by_level": by_level,
|
| 780 |
-
"by_category": by_category,
|
| 781 |
-
"by_asset": by_asset,
|
| 782 |
-
"errors": errors,
|
| 783 |
-
"buffer_size": total,
|
| 784 |
-
"buffer_capacity": total,
|
| 785 |
-
})
|
| 786 |
-
except Exception as exc:
|
| 787 |
-
logger.exception(f"[api_logs_stats] error: {exc}")
|
| 788 |
-
return jsonify({"total_events": 0, "by_level": {}, "by_category": {},
|
| 789 |
-
"by_asset": {}, "errors": {}, "error": str(exc)}), 200
|
| 790 |
-
|
| 791 |
|
| 792 |
@app.route("/api/health")
|
| 793 |
def health():
|
| 794 |
-
return jsonify({"status": "ok", "version": "v2.
|
| 795 |
|
| 796 |
|
| 797 |
if __name__ == "__main__":
|
|
|
|
| 30 |
from flask import Flask, jsonify, request, send_from_directory
|
| 31 |
from flask_cors import CORS
|
| 32 |
|
| 33 |
+
# ββ Ranker Logs Blueprint βββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 34 |
+
# Import lazily so the service starts even if ranker_logs_api.py is absent.
|
| 35 |
+
try:
|
| 36 |
+
from ranker_logs_api import ranker_logs_bp, init_ranker_logs_api as _init_bp
|
| 37 |
+
_RANKER_LOGS_BP_AVAILABLE = True
|
| 38 |
+
except ImportError:
|
| 39 |
+
_RANKER_LOGS_BP_AVAILABLE = False
|
| 40 |
+
logger_import_warning = "ranker_logs_api.py not found β Blueprint endpoints disabled"
|
| 41 |
+
|
| 42 |
# ββ Logging βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 43 |
logging.basicConfig(
|
| 44 |
level=logging.INFO,
|
|
|
|
| 414 |
|
| 415 |
|
| 416 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 417 |
+
# SECTION 2b β FILE-BASED LOGGER ADAPTER
|
| 418 |
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 419 |
+
# The ranker_logs_api Blueprint expects a RankerLogger-style object with get_recent(),
|
| 420 |
+
# get_by_asset(), get_by_level(), get_stats(), export_json(), and clear_buffer().
|
| 421 |
+
# This adapter satisfies that interface by reading from the same log FILES that the
|
| 422 |
+
# TradeLogParser uses β no in-memory ranker process required in the dashboard service.
|
| 423 |
+
|
| 424 |
+
class FileBasedLoggerAdapter:
|
| 425 |
+
"""
|
| 426 |
+
Implements the RankerLogger interface expected by ranker_logs_api.py Blueprint,
|
| 427 |
+
but reads from disk log files instead of an in-memory buffer.
|
| 428 |
+
This lets the dashboard service power ALL Blueprint endpoints without needing a
|
| 429 |
+
live RankerLogger instance.
|
| 430 |
+
"""
|
| 431 |
+
|
| 432 |
+
# ββ Shared compiled patterns βββββββββββββββββββββββββββββββββββββββββββββββ
|
| 433 |
+
_CAT_RE = re.compile(r'\|\s*(INFO|DEBUG|WARNING|ERROR|CRITICAL)\s*\|\s*([A-Z_]+)\s*\|')
|
| 434 |
+
_ASSET_RE = re.compile(r'\|\s*(?:TRADE|SIGNAL)\s*\|\s*(\w+)\s*\|')
|
| 435 |
+
_TS_RE = re.compile(r'\[(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})\]')
|
| 436 |
+
|
| 437 |
+
def __init__(self, log_dir: str = _LOG_DIR):
|
| 438 |
+
self._log_dir = log_dir
|
| 439 |
+
self._lock = threading.RLock()
|
| 440 |
+
|
| 441 |
+
# ββ Internal helpers βββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 442 |
+
|
| 443 |
+
def _find_files(self) -> list:
|
| 444 |
+
candidate_dirs = [
|
| 445 |
+
self._log_dir,
|
| 446 |
+
str(Path(__file__).parent / "ranker_logs"),
|
| 447 |
+
"./ranker_logs",
|
| 448 |
+
str(Path.home() / "ranker_logs"),
|
| 449 |
+
]
|
| 450 |
+
for cdir in candidate_dirs:
|
| 451 |
+
found = sorted(glob.glob(str(Path(cdir) / "*.log*")))
|
| 452 |
+
if found:
|
| 453 |
+
return found
|
| 454 |
+
return []
|
| 455 |
+
|
| 456 |
+
def _read_lines(self, n_tail: int = 500) -> list:
|
| 457 |
+
"""Return up to n_tail most-recent lines across the 3 newest log files."""
|
| 458 |
+
files = self._find_files()
|
| 459 |
+
raw = []
|
| 460 |
+
for fpath in files[-3:]:
|
| 461 |
+
try:
|
| 462 |
+
with open(fpath, "r", encoding="utf-8", errors="replace") as f:
|
| 463 |
+
raw.extend(f.readlines()[-n_tail:])
|
| 464 |
+
except OSError:
|
| 465 |
+
pass
|
| 466 |
+
raw.reverse() # newest first
|
| 467 |
+
return raw
|
| 468 |
+
|
| 469 |
+
def _line_to_entry(self, line: str) -> dict | None:
|
| 470 |
+
ts_m = self._TS_RE.search(line)
|
| 471 |
+
if not ts_m:
|
| 472 |
+
return None
|
| 473 |
+
cat_m = self._CAT_RE.search(line)
|
| 474 |
+
level = cat_m.group(1) if cat_m else "INFO"
|
| 475 |
+
cat = cat_m.group(2).strip() if cat_m else ""
|
| 476 |
+
ast_m = self._ASSET_RE.search(line)
|
| 477 |
+
asset = ast_m.group(1) if ast_m else None
|
| 478 |
+
# Build a minimal dict compatible with what the Blueprint's callers expect.
|
| 479 |
+
return {
|
| 480 |
+
"timestamp": ts_m.group(1),
|
| 481 |
+
"level": level,
|
| 482 |
+
"category": cat,
|
| 483 |
+
"message": line.strip(),
|
| 484 |
+
"asset": asset,
|
| 485 |
+
"data": None,
|
| 486 |
+
}
|
| 487 |
+
|
| 488 |
+
# ββ RankerLogger interface βββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 489 |
+
|
| 490 |
+
def get_recent(self, n: int = 50, category: str | None = None) -> list:
|
| 491 |
+
entries = []
|
| 492 |
+
for line in self._read_lines(n_tail=max(n * 3, 200)):
|
| 493 |
+
e = self._line_to_entry(line)
|
| 494 |
+
if e is None:
|
| 495 |
+
continue
|
| 496 |
+
if category and category.upper() not in line.upper():
|
| 497 |
+
continue
|
| 498 |
+
entries.append(e)
|
| 499 |
+
if len(entries) >= n:
|
| 500 |
+
break
|
| 501 |
+
return entries
|
| 502 |
+
|
| 503 |
+
def get_by_asset(self, asset: str, n: int = 30) -> list:
|
| 504 |
+
entries = []
|
| 505 |
+
for line in self._read_lines(n_tail=500):
|
| 506 |
+
if asset.upper() not in line.upper():
|
| 507 |
+
continue
|
| 508 |
+
e = self._line_to_entry(line)
|
| 509 |
+
if e:
|
| 510 |
+
entries.append(e)
|
| 511 |
+
if len(entries) >= n:
|
| 512 |
+
break
|
| 513 |
+
return entries
|
| 514 |
+
|
| 515 |
+
def get_by_level(self, level: str, n: int = 50) -> list:
|
| 516 |
+
entries = []
|
| 517 |
+
for line in self._read_lines(n_tail=500):
|
| 518 |
+
e = self._line_to_entry(line)
|
| 519 |
+
if e and e["level"].upper() == level.upper():
|
| 520 |
+
entries.append(e)
|
| 521 |
+
if len(entries) >= n:
|
| 522 |
+
break
|
| 523 |
+
return entries
|
| 524 |
+
|
| 525 |
+
def get_stats(self) -> dict:
|
| 526 |
+
by_category: dict = {}
|
| 527 |
+
by_level: dict = {}
|
| 528 |
+
by_asset: dict = {}
|
| 529 |
+
errors: dict = {}
|
| 530 |
+
total = 0
|
| 531 |
+
for line in self._read_lines(n_tail=2000):
|
| 532 |
+
e = self._line_to_entry(line)
|
| 533 |
+
if not e:
|
| 534 |
+
continue
|
| 535 |
+
total += 1
|
| 536 |
+
by_level[e["level"]] = by_level.get(e["level"], 0) + 1
|
| 537 |
+
by_category[e["category"]] = by_category.get(e["category"], 0) + 1
|
| 538 |
+
if e["asset"]:
|
| 539 |
+
by_asset[e["asset"]] = by_asset.get(e["asset"], 0) + 1
|
| 540 |
+
if e["level"] in ("ERROR", "CRITICAL"):
|
| 541 |
+
errors[e["category"]] = errors.get(e["category"], 0) + 1
|
| 542 |
+
return {
|
| 543 |
+
"total_events": total,
|
| 544 |
+
"by_level": by_level,
|
| 545 |
+
"by_category": by_category,
|
| 546 |
+
"by_asset": by_asset,
|
| 547 |
+
"errors": errors,
|
| 548 |
+
"buffer_size": total,
|
| 549 |
+
"buffer_capacity": total,
|
| 550 |
+
}
|
| 551 |
+
|
| 552 |
+
def export_json(self, filepath: str, n: int = 500):
|
| 553 |
+
import json as _json
|
| 554 |
+
entries = self.get_recent(n)
|
| 555 |
+
with open(filepath, "w") as f:
|
| 556 |
+
_json.dump({
|
| 557 |
+
"export_time": datetime.utcnow().isoformat(),
|
| 558 |
+
"count": len(entries),
|
| 559 |
+
"logs": entries,
|
| 560 |
+
}, f, indent=2)
|
| 561 |
+
|
| 562 |
+
def clear_buffer(self):
|
| 563 |
+
# File-based adapter has no in-memory buffer to clear.
|
| 564 |
+
# No-op β files are managed by the ranker process itself.
|
| 565 |
+
pass
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
|
| 569 |
|
| 570 |
from dataclasses import dataclass, field
|
| 571 |
|
|
|
|
| 701 |
app = Flask(__name__)
|
| 702 |
CORS(app)
|
| 703 |
|
| 704 |
+
# ββ Register ranker logs Blueprint (powers /export, /clear, /asset/<a>, /level/<l>) ββ
|
| 705 |
+
if _RANKER_LOGS_BP_AVAILABLE:
|
| 706 |
+
_file_logger_adapter = FileBasedLoggerAdapter(log_dir=_LOG_DIR)
|
| 707 |
+
_init_bp(_file_logger_adapter)
|
| 708 |
+
app.register_blueprint(ranker_logs_bp)
|
| 709 |
+
logger.info("[Blueprint] ranker_logs_bp registered β all /api/ranker/logs/* endpoints active")
|
| 710 |
+
else:
|
| 711 |
+
logger.warning("[Blueprint] ranker_logs_api.py not found β inline routes only")
|
| 712 |
+
|
| 713 |
|
| 714 |
@app.route("/")
|
| 715 |
def index():
|
|
|
|
| 760 |
})
|
| 761 |
|
| 762 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 763 |
|
| 764 |
@app.route("/api/health")
|
| 765 |
def health():
|
| 766 |
+
return jsonify({"status": "ok", "version": "v2.4-fixed"})
|
| 767 |
|
| 768 |
|
| 769 |
if __name__ == "__main__":
|
ranker_logs_api.py
CHANGED
|
@@ -12,10 +12,63 @@ from flask import Blueprint, jsonify, request, send_file
|
|
| 12 |
from typing import Optional
|
| 13 |
from pathlib import Path
|
| 14 |
|
|
|
|
|
|
|
|
|
|
| 15 |
ranker_logs_bp = Blueprint("ranker_logs", __name__, url_prefix="/api/ranker/logs")
|
| 16 |
|
| 17 |
_logger: Optional[object] = None
|
| 18 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
|
| 20 |
def init_ranker_logs_api(ranker_logger):
|
| 21 |
"""Call this from hub_dashboard_service.py during initialization."""
|
|
@@ -33,6 +86,7 @@ def get_recent_logs():
|
|
| 33 |
category = request.args.get("category")
|
| 34 |
|
| 35 |
entries = _logger.get_recent(n=limit, category=category)
|
|
|
|
| 36 |
return jsonify({
|
| 37 |
"logs": entries,
|
| 38 |
"count": len(entries),
|
|
|
|
| 12 |
from typing import Optional
|
| 13 |
from pathlib import Path
|
| 14 |
|
| 15 |
+
import re as _re
|
| 16 |
+
import json as _json
|
| 17 |
+
|
| 18 |
ranker_logs_bp = Blueprint("ranker_logs", __name__, url_prefix="/api/ranker/logs")
|
| 19 |
|
| 20 |
_logger: Optional[object] = None
|
| 21 |
|
| 22 |
+
# ββ Training-metrics extractor ββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 23 |
+
# Parses the structured training line emitted by ranker_logging.training_update():
|
| 24 |
+
# [ts] | DEBUG | TRAINING | step=1 | loss=0.1234 | lr=0.000100 | assets=7
|
| 25 |
+
_TRAINING_RE = _re.compile(
|
| 26 |
+
r'step=(\d+)\s*\|\s*loss=([\d.]+)\s*\|\s*lr=([\d.eE+\-]+)\s*\|\s*assets=(\d+)'
|
| 27 |
+
)
|
| 28 |
+
_JSON_BLOB_RE = _re.compile(r'(\{.*\})\s*$')
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _enrich_entry(entry: dict) -> dict:
|
| 32 |
+
"""
|
| 33 |
+
Attach a parsed `data` dict to TRAINING log entries so the dashboard
|
| 34 |
+
KPI cards (Loss / LR / Step / Assets) always have structured values.
|
| 35 |
+
Works from both the pipe-delimited text and any trailing JSON metadata blob.
|
| 36 |
+
"""
|
| 37 |
+
if entry.get("category", "").upper() != "TRAINING":
|
| 38 |
+
return entry
|
| 39 |
+
if entry.get("data"): # already enriched (e.g. from in-memory logger)
|
| 40 |
+
return entry
|
| 41 |
+
|
| 42 |
+
msg = entry.get("message", "")
|
| 43 |
+
|
| 44 |
+
# 1. Try pipe-delimited format
|
| 45 |
+
m = _TRAINING_RE.search(msg)
|
| 46 |
+
if m:
|
| 47 |
+
entry["data"] = {
|
| 48 |
+
"step": int(m.group(1)),
|
| 49 |
+
"loss": float(m.group(2)),
|
| 50 |
+
"lr": float(m.group(3)),
|
| 51 |
+
"asset_count": int(m.group(4)),
|
| 52 |
+
}
|
| 53 |
+
return entry
|
| 54 |
+
|
| 55 |
+
# 2. Fallback: trailing JSON blob
|
| 56 |
+
jm = _JSON_BLOB_RE.search(msg)
|
| 57 |
+
if jm:
|
| 58 |
+
try:
|
| 59 |
+
blob = _json.loads(jm.group(1))
|
| 60 |
+
if "step" in blob:
|
| 61 |
+
entry["data"] = {
|
| 62 |
+
"step": blob.get("step", 0),
|
| 63 |
+
"loss": blob.get("loss", 0.0),
|
| 64 |
+
"lr": blob.get("lr", 0.0),
|
| 65 |
+
"asset_count": blob.get("asset_count", blob.get("assets", 0)),
|
| 66 |
+
}
|
| 67 |
+
except (ValueError, KeyError):
|
| 68 |
+
pass
|
| 69 |
+
|
| 70 |
+
return entry
|
| 71 |
+
|
| 72 |
|
| 73 |
def init_ranker_logs_api(ranker_logger):
|
| 74 |
"""Call this from hub_dashboard_service.py during initialization."""
|
|
|
|
| 86 |
category = request.args.get("category")
|
| 87 |
|
| 88 |
entries = _logger.get_recent(n=limit, category=category)
|
| 89 |
+
entries = [_enrich_entry(e) for e in entries]
|
| 90 |
return jsonify({
|
| 91 |
"logs": entries,
|
| 92 |
"count": len(entries),
|