solorpower_crawler/tools/check_db.py
2026-03-30 13:01:18 +09:00

67 lines
2.0 KiB
Python

import sys
from pathlib import Path
# Add parent directory to sys.path to allow importing from root
sys.path.append(str(Path(__file__).parent.parent))
from dotenv import load_dotenv
# 로드 환경 변수 (database 임포트 전에 실행)
load_dotenv()
from database import get_supabase_client
from datetime import datetime, timedelta
def check_db_data():
client = get_supabase_client()
if not client:
print("❌ Supabase connection failed")
return
# Check data from yesterday 18:00 to 20:00
# Note: DB stores in KST or UTC?
# recover_data.py used KST time in 'created_at' string.
# Let's query based on string range.
# KST 18:00 - 20:00 is UTC 09:00 - 11:00
start_time = "2026-02-12 09:00:00"
end_time = "2026-02-12 11:15:00"
print(f"🔍 Checking DB data from {start_time} to {end_time} (UTC)...")
try:
response = client.table("solar_logs").select("*") \
.gte("created_at", start_time) \
.lte("created_at", end_time) \
.order("created_at") \
.execute()
data = response.data
if not data:
print("⚠️ No data found in this range.")
return
print(f"✅ Found {len(data)} records.\n")
# Group by timestamp to see snapshot completeness
timestamps = {}
for item in data:
ts = item['created_at']
if ts not in timestamps:
timestamps[ts] = []
timestamps[ts].append(item)
for ts in sorted(timestamps.keys()):
items = timestamps[ts]
print(f"{ts} - {len(items)} plants")
for item in items:
print(f" - {item['plant_id']}: {item['current_kw']} kW / {item['today_kwh']} kWh")
print("-" * 50)
except Exception as e:
print(f"❌ Error querying DB: {e}")
if __name__ == "__main__":
check_db_data()