solorpower_crawler/tests/fill_today_data.py

54 lines
1.9 KiB
Python

import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from dotenv import load_dotenv
load_dotenv()
from datetime import datetime
from database import get_supabase_client, save_history
from config import get_all_plants
from crawlers.kremc import fetch_history_hourly as fetch_kremc
from crawlers.cmsolar import fetch_history_hourly as fetch_cmsolar
def cleanup_history(plant_id, today_str):
client = get_supabase_client()
# Delete 'History' status records for today to avoid duplicates/bad data
# Filter by created_at >= today's start and status='History'
# Simple approach: delete records with status='History' created today
# KST date string is tricky for created_at (UTC), but status='History' is unique to our manual script
try:
res = client.table('solar_logs').delete().eq('plant_id', plant_id).eq('status', 'History').execute()
print(f"[{plant_id}] Cleaned up {len(res.data)} old history records.")
except Exception as e:
print(f"[{plant_id}] Cleanup failed (might be empty): {e}")
def fill_today_data():
plants = get_all_plants()
kremc_plant = next((p for p in plants if p['id'] == 'kremc-05'), None)
cmsolar_plant = next((p for p in plants if p['id'] == 'cmsolar-10'), None)
today = "2026-01-29"
print(f"Filling data for {today}...")
# 1. KREMC (5호기) - Skip as it's done
# if kremc_plant: ...
# 2. CMSolar (10호기)
if cmsolar_plant:
print("\n--- Processing CMSolar (10호기) ---")
cleanup_history('cmsolar-10', today)
try:
results = fetch_cmsolar(cmsolar_plant, today, today)
print(f"Fetched results: {results}")
if results:
save_history(results, 'hourly')
print("Saved CMSolar data.")
except Exception as e:
print(f"CMSolar Error: {e}")
if __name__ == "__main__":
fill_today_data()