2026-01-29 02:21:44 +08:00
|
|
|
from __future__ import annotations
|
2026-01-26 02:13:06 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
from typing import Any, Iterable
|
2026-01-26 02:13:06 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
from web.database import query_db
|
2026-01-26 02:13:06 +08:00
|
|
|
|
2026-01-26 21:10:42 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
class FeatureService:
|
2026-01-26 21:10:42 +08:00
|
|
|
@staticmethod
|
2026-01-29 02:21:44 +08:00
|
|
|
def _normalize_features(row: dict[str, Any] | None) -> dict[str, Any] | None:
|
|
|
|
|
if not row:
|
2026-01-27 00:57:35 +08:00
|
|
|
return None
|
2026-01-26 21:10:42 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
f = dict(row)
|
|
|
|
|
|
|
|
|
|
alias_map: dict[str, str] = {
|
|
|
|
|
"matches_played": "total_matches",
|
|
|
|
|
"rounds_played": "total_rounds",
|
|
|
|
|
"basic_avg_rating": "core_avg_rating",
|
|
|
|
|
"basic_avg_rating2": "core_avg_rating2",
|
|
|
|
|
"basic_avg_kd": "core_avg_kd",
|
|
|
|
|
"basic_avg_adr": "core_avg_adr",
|
|
|
|
|
"basic_avg_kast": "core_avg_kast",
|
|
|
|
|
"basic_avg_rws": "core_avg_rws",
|
|
|
|
|
"basic_avg_headshot_kills": "core_avg_hs_kills",
|
|
|
|
|
"basic_headshot_rate": "core_hs_rate",
|
|
|
|
|
"basic_avg_assisted_kill": "core_avg_assists",
|
|
|
|
|
"basic_avg_awp_kill": "core_avg_awp_kills",
|
|
|
|
|
"basic_avg_knife_kill": "core_avg_knife_kills",
|
|
|
|
|
"basic_avg_zeus_kill": "core_avg_zeus_kills",
|
|
|
|
|
"basic_zeus_pick_rate": "core_zeus_buy_rate",
|
|
|
|
|
"basic_avg_mvps": "core_avg_mvps",
|
|
|
|
|
"basic_avg_plants": "core_avg_plants",
|
|
|
|
|
"basic_avg_defuses": "core_avg_defuses",
|
|
|
|
|
"basic_avg_flash_assists": "core_avg_flash_assists",
|
|
|
|
|
"basic_avg_first_kill": "tac_avg_fk",
|
|
|
|
|
"basic_avg_first_death": "tac_avg_fd",
|
|
|
|
|
"basic_first_kill_rate": "tac_fk_rate",
|
|
|
|
|
"basic_first_death_rate": "tac_fd_rate",
|
|
|
|
|
"basic_avg_kill_2": "tac_avg_2k",
|
|
|
|
|
"basic_avg_kill_3": "tac_avg_3k",
|
|
|
|
|
"basic_avg_kill_4": "tac_avg_4k",
|
|
|
|
|
"basic_avg_kill_5": "tac_avg_5k",
|
|
|
|
|
"util_usage_rate": "tac_util_usage_rate",
|
|
|
|
|
"util_avg_nade_dmg": "tac_util_nade_dmg_per_round",
|
|
|
|
|
"util_avg_flash_time": "tac_util_flash_time_per_round",
|
|
|
|
|
"util_avg_flash_enemy": "tac_util_flash_enemies_per_round",
|
|
|
|
|
"eco_avg_damage_per_1k": "tac_eco_dmg_per_1k",
|
|
|
|
|
"eco_rating_eco_rounds": "tac_eco_kpr_eco_rounds",
|
|
|
|
|
"pace_trade_kill_rate": "int_trade_kill_rate",
|
|
|
|
|
"pace_avg_time_to_first_contact": "int_timing_first_contact_time",
|
|
|
|
|
"score_sta": "score_stability",
|
|
|
|
|
"score_bat": "score_aim",
|
|
|
|
|
"score_hps": "score_clutch",
|
|
|
|
|
"score_ptl": "score_pistol",
|
|
|
|
|
"score_tct": "score_defense",
|
|
|
|
|
"score_util": "score_utility",
|
|
|
|
|
"score_eco": "score_economy",
|
|
|
|
|
"score_pace": "score_pace",
|
|
|
|
|
"side_rating_ct": "meta_side_ct_rating",
|
|
|
|
|
"side_rating_t": "meta_side_t_rating",
|
|
|
|
|
"side_kd_ct": "meta_side_ct_kd",
|
|
|
|
|
"side_kd_t": "meta_side_t_kd",
|
|
|
|
|
"side_win_rate_ct": "meta_side_ct_win_rate",
|
|
|
|
|
"side_win_rate_t": "meta_side_t_win_rate",
|
|
|
|
|
"side_first_kill_rate_ct": "meta_side_ct_fk_rate",
|
|
|
|
|
"side_first_kill_rate_t": "meta_side_t_fk_rate",
|
|
|
|
|
"sta_rating_volatility": "meta_rating_volatility",
|
|
|
|
|
"sta_recent_form_rating": "meta_recent_form_rating",
|
|
|
|
|
"sta_win_rating": "meta_win_rating",
|
|
|
|
|
"sta_loss_rating": "meta_loss_rating",
|
|
|
|
|
"map_best_map": "meta_map_best_map",
|
|
|
|
|
"map_best_rating": "meta_map_best_rating",
|
|
|
|
|
"map_worst_map": "meta_map_worst_map",
|
|
|
|
|
"map_worst_rating": "meta_map_worst_rating",
|
|
|
|
|
"map_pool_size": "meta_map_pool_size",
|
|
|
|
|
"map_diversity": "meta_map_diversity",
|
|
|
|
|
}
|
2026-01-28 01:20:26 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
for legacy_key, l3_key in alias_map.items():
|
2026-01-29 03:17:24 +08:00
|
|
|
legacy_val = f.get(legacy_key)
|
|
|
|
|
l3_val = f.get(l3_key)
|
|
|
|
|
if legacy_val is None and l3_val is not None:
|
|
|
|
|
f[legacy_key] = l3_val
|
|
|
|
|
elif l3_val is None and legacy_val is not None:
|
|
|
|
|
f[l3_key] = legacy_val
|
2026-01-28 01:20:26 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
if f.get("matches_played") is None:
|
|
|
|
|
f["matches_played"] = f.get("total_matches", 0) or 0
|
|
|
|
|
if f.get("rounds_played") is None:
|
|
|
|
|
f["rounds_played"] = f.get("total_rounds", 0) or 0
|
2026-01-26 21:10:42 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
return f
|
2026-01-26 21:10:42 +08:00
|
|
|
|
2026-01-27 21:26:07 +08:00
|
|
|
@staticmethod
|
2026-01-29 02:21:44 +08:00
|
|
|
def get_player_features(steam_id: str) -> dict[str, Any] | None:
|
|
|
|
|
row = query_db("l3", "SELECT * FROM dm_player_features WHERE steam_id_64 = ?", [steam_id], one=True)
|
|
|
|
|
return FeatureService._normalize_features(dict(row) if row else None)
|
2026-01-27 21:26:07 +08:00
|
|
|
|
|
|
|
|
@staticmethod
|
2026-01-29 02:21:44 +08:00
|
|
|
def _attach_player_dim(players: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|
|
|
|
if not players:
|
|
|
|
|
return players
|
|
|
|
|
steam_ids = [p["steam_id_64"] for p in players if p.get("steam_id_64")]
|
|
|
|
|
if not steam_ids:
|
|
|
|
|
return players
|
|
|
|
|
|
|
|
|
|
placeholders = ",".join("?" for _ in steam_ids)
|
|
|
|
|
dim_rows = query_db(
|
|
|
|
|
"l2",
|
|
|
|
|
f"SELECT steam_id_64, username, avatar_url FROM dim_players WHERE steam_id_64 IN ({placeholders})",
|
|
|
|
|
steam_ids,
|
|
|
|
|
)
|
|
|
|
|
dim_map = {str(r["steam_id_64"]): dict(r) for r in dim_rows} if dim_rows else {}
|
2026-01-27 21:26:07 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
# Import StatsService here to avoid circular dependency
|
|
|
|
|
from web.services.stats_service import StatsService
|
2026-01-27 21:26:07 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
out: list[dict[str, Any]] = []
|
|
|
|
|
for p in players:
|
|
|
|
|
sid = str(p.get("steam_id_64"))
|
|
|
|
|
d = dim_map.get(sid, {})
|
|
|
|
|
merged = dict(p)
|
|
|
|
|
merged.setdefault("username", d.get("username") or sid)
|
2026-01-27 21:26:07 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
# Resolve avatar URL (check local override first)
|
|
|
|
|
db_avatar_url = d.get("avatar_url")
|
|
|
|
|
merged.setdefault("avatar_url", StatsService.resolve_avatar_url(sid, db_avatar_url))
|
2026-01-27 21:26:07 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
out.append(merged)
|
|
|
|
|
return out
|
2026-01-26 21:10:42 +08:00
|
|
|
|
|
|
|
|
@staticmethod
|
2026-01-29 02:21:44 +08:00
|
|
|
def get_players_list(page: int = 1, per_page: int = 20, sort_by: str = "rating", search: str | None = None):
|
|
|
|
|
offset = (page - 1) * per_page
|
2026-01-26 21:10:42 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
sort_map = {
|
|
|
|
|
"rating": "core_avg_rating",
|
|
|
|
|
"kd": "core_avg_kd",
|
|
|
|
|
"kast": "core_avg_kast",
|
|
|
|
|
"matches": "total_matches",
|
|
|
|
|
}
|
|
|
|
|
order_col = sort_map.get(sort_by, "core_avg_rating")
|
2026-01-26 21:10:42 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
where = []
|
|
|
|
|
args: list[Any] = []
|
|
|
|
|
if search:
|
|
|
|
|
where.append("steam_id_64 IN (SELECT steam_id_64 FROM dim_players WHERE username LIKE ?)")
|
|
|
|
|
args.append(f"%{search}%")
|
|
|
|
|
where_sql = f"WHERE {' AND '.join(where)}" if where else ""
|
|
|
|
|
|
|
|
|
|
rows = query_db(
|
|
|
|
|
"l3",
|
|
|
|
|
f"SELECT * FROM dm_player_features {where_sql} ORDER BY {order_col} DESC LIMIT ? OFFSET ?",
|
|
|
|
|
args + [per_page, offset],
|
2026-01-27 21:26:07 +08:00
|
|
|
)
|
2026-01-29 02:21:44 +08:00
|
|
|
total_row = query_db("l3", f"SELECT COUNT(*) as cnt FROM dm_player_features {where_sql}", args, one=True)
|
|
|
|
|
total = int(total_row["cnt"]) if total_row else 0
|
2026-01-27 21:26:07 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
players = [FeatureService._normalize_features(dict(r)) for r in rows] if rows else []
|
|
|
|
|
players = [p for p in players if p]
|
|
|
|
|
players = FeatureService._attach_player_dim(players)
|
|
|
|
|
return players, total
|
2026-01-26 02:13:06 +08:00
|
|
|
|
|
|
|
|
@staticmethod
|
2026-01-29 02:21:44 +08:00
|
|
|
def get_roster_features_distribution(target_steam_id: str):
|
2026-01-26 21:10:42 +08:00
|
|
|
from web.services.web_service import WebService
|
|
|
|
|
import json
|
2026-01-29 02:21:44 +08:00
|
|
|
|
2026-01-26 21:10:42 +08:00
|
|
|
lineups = WebService.get_lineups()
|
2026-01-29 02:21:44 +08:00
|
|
|
roster_ids: list[str] = []
|
2026-01-26 21:10:42 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
# Try to find a lineup containing this player
|
|
|
|
|
if lineups:
|
|
|
|
|
for lineup in lineups:
|
|
|
|
|
try:
|
|
|
|
|
p_ids = [str(i) for i in json.loads(lineup.get("player_ids_json") or "[]")]
|
|
|
|
|
if str(target_steam_id) in p_ids:
|
|
|
|
|
roster_ids = p_ids
|
|
|
|
|
break
|
|
|
|
|
except Exception:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# If not found in any lineup, use the most recent lineup as a fallback context
|
|
|
|
|
if not roster_ids and lineups:
|
|
|
|
|
try:
|
|
|
|
|
roster_ids = [str(i) for i in json.loads(lineups[0].get("player_ids_json") or "[]")]
|
|
|
|
|
except Exception:
|
|
|
|
|
roster_ids = []
|
|
|
|
|
|
|
|
|
|
# If still no roster (e.g. no lineups at all), fallback to a "Global Context" (Top 50 active players)
|
|
|
|
|
# This ensures we always have a distribution to compare against
|
|
|
|
|
if not roster_ids:
|
|
|
|
|
rows = query_db("l3", "SELECT steam_id_64 FROM dm_player_features ORDER BY last_match_date DESC LIMIT 50")
|
|
|
|
|
roster_ids = [str(r['steam_id_64']) for r in rows] if rows else []
|
|
|
|
|
|
|
|
|
|
# Ensure target player is in the list
|
|
|
|
|
if str(target_steam_id) not in roster_ids:
|
|
|
|
|
roster_ids.append(str(target_steam_id))
|
|
|
|
|
|
|
|
|
|
if not roster_ids:
|
2026-01-26 21:10:42 +08:00
|
|
|
return None
|
2026-01-29 02:21:44 +08:00
|
|
|
|
|
|
|
|
placeholders = ",".join("?" for _ in roster_ids)
|
|
|
|
|
rows = query_db("l3", f"SELECT * FROM dm_player_features WHERE steam_id_64 IN ({placeholders})", roster_ids)
|
2026-01-26 21:10:42 +08:00
|
|
|
if not rows:
|
|
|
|
|
return None
|
2026-01-29 02:21:44 +08:00
|
|
|
|
|
|
|
|
stats_map = {str(r["steam_id_64"]): FeatureService._normalize_features(dict(r)) for r in rows}
|
2026-01-26 21:10:42 +08:00
|
|
|
target_steam_id = str(target_steam_id)
|
|
|
|
|
if target_steam_id not in stats_map:
|
2026-01-29 02:21:44 +08:00
|
|
|
stats_map[target_steam_id] = {}
|
|
|
|
|
|
|
|
|
|
# Define excluded keys (metadata, text fields)
|
|
|
|
|
excluded_keys = {
|
|
|
|
|
"steam_id_64", "last_updated", "first_match_date", "last_match_date",
|
|
|
|
|
"core_top_weapon", "int_pos_favorite_position", "meta_side_preference",
|
|
|
|
|
"meta_map_best_map", "meta_map_worst_map", "tier_classification",
|
|
|
|
|
"username", "avatar_url"
|
|
|
|
|
}
|
2026-01-27 21:26:07 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
# Get all keys from the first available player record to determine what to calculate
|
|
|
|
|
sample_keys = []
|
|
|
|
|
for p in stats_map.values():
|
|
|
|
|
if p:
|
|
|
|
|
sample_keys = list(p.keys())
|
|
|
|
|
break
|
2026-01-26 21:10:42 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
lower_is_better = {"int_timing_first_contact_time", "tac_avg_fd", "core_avg_match_duration"}
|
|
|
|
|
|
|
|
|
|
result: dict[str, Any] = {}
|
|
|
|
|
for m in sample_keys:
|
|
|
|
|
if m in excluded_keys:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Check if value is numeric (using the first non-None value found)
|
|
|
|
|
is_numeric = False
|
2026-01-27 21:26:07 +08:00
|
|
|
for p in stats_map.values():
|
2026-01-29 02:21:44 +08:00
|
|
|
val = (p or {}).get(m)
|
|
|
|
|
if val is not None:
|
|
|
|
|
if isinstance(val, (int, float)):
|
|
|
|
|
is_numeric = True
|
|
|
|
|
break
|
2026-01-27 21:26:07 +08:00
|
|
|
|
2026-01-29 02:21:44 +08:00
|
|
|
if not is_numeric:
|
2026-01-26 21:10:42 +08:00
|
|
|
continue
|
2026-01-29 02:21:44 +08:00
|
|
|
|
|
|
|
|
values = []
|
|
|
|
|
for p in stats_map.values():
|
|
|
|
|
v = (p or {}).get(m)
|
|
|
|
|
try:
|
|
|
|
|
values.append(float(v) if v is not None else 0.0)
|
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
|
values.append(0.0)
|
|
|
|
|
|
|
|
|
|
target_val_raw = (stats_map.get(target_steam_id) or {}).get(m)
|
|
|
|
|
try:
|
|
|
|
|
target_val = float(target_val_raw) if target_val_raw is not None else 0.0
|
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
|
target_val = 0.0
|
|
|
|
|
|
|
|
|
|
is_reverse = m not in lower_is_better
|
|
|
|
|
# Sort values. For standard metrics, higher is better (reverse=True).
|
|
|
|
|
# For lower-is-better (like death rate, contact time), we want sort ascending.
|
|
|
|
|
values_sorted = sorted(values, reverse=is_reverse)
|
2026-01-26 21:10:42 +08:00
|
|
|
|
|
|
|
|
try:
|
2026-01-29 02:21:44 +08:00
|
|
|
# Find rank. Index is 0-based, so +1.
|
|
|
|
|
# Note: this finds the first occurrence.
|
|
|
|
|
rank = values_sorted.index(target_val) + 1
|
2026-01-26 21:10:42 +08:00
|
|
|
except ValueError:
|
2026-01-29 02:21:44 +08:00
|
|
|
rank = len(values_sorted)
|
2026-01-26 21:10:42 +08:00
|
|
|
|
|
|
|
|
result[m] = {
|
2026-01-29 02:21:44 +08:00
|
|
|
"val": target_val,
|
|
|
|
|
"rank": rank,
|
|
|
|
|
"total": len(values_sorted),
|
|
|
|
|
"min": min(values_sorted) if values_sorted else 0,
|
|
|
|
|
"max": max(values_sorted) if values_sorted else 0,
|
|
|
|
|
"avg": (sum(values_sorted) / len(values_sorted)) if values_sorted else 0,
|
|
|
|
|
"inverted": not is_reverse,
|
2026-01-26 21:10:42 +08:00
|
|
|
}
|
|
|
|
|
return result
|
2026-01-29 02:21:44 +08:00
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def rebuild_all_features(min_matches: int = 5):
|
|
|
|
|
import warnings
|
|
|
|
|
|
|
|
|
|
warnings.warn(
|
|
|
|
|
"FeatureService.rebuild_all_features() 已废弃,请直接运行 database/L3/L3_Builder.py",
|
|
|
|
|
DeprecationWarning,
|
|
|
|
|
stacklevel=2,
|
|
|
|
|
)
|
|
|
|
|
return -1
|