This commit is contained in:
haneulai 2026-03-30 13:01:18 +09:00
parent 20ef587800
commit ac0cbbc44a
37 changed files with 2621 additions and 215 deletions

50
DEVELOPMENT.md Normal file
View File

@ -0,0 +1,50 @@
# 윈도우 개발 환경 가이드
## 1. 개요
이 프로젝트는 Windows와 NAS(리눅스) 환경 모두에서 동작하도록 구성되어 있습니다.
NAS 배포 전 Windows 환경에서 테스트 및 데이터 복구를 수행할 수 있습니다.
## 2. 가상환경 (Windows)
윈도우용 가상환경은 `venv_win` 폴더에 구성되어 있습니다.
```powershell
# 가상환경 활성화
.\venv_win\Scripts\activate
# 의존성 설치
pip install -r requirements.txt
```
## 3. 개발 도구 (tools 폴더)
`tools/` 폴더에는 데이터 검증 및 복구를 위한 유틸리티 스크립트가 포함되어 있습니다.
### 3.1 DB 데이터 조회 (check_db.py)
특정 시간대의 Supabase 데이터가 정상적으로 저장되었는지 확인합니다.
- UTC 기준으로 조회하므로 KST 변환에 유의하세요.
- 사용법:
```powershell
python tools/check_db.py
```
### 3.2 로그 기반 데이터 복구 (recover_from_log.py)
`cron.log` 등의 로그 파일을 파싱하여 누락된 데이터를 DB에 다시 채워넣습니다.
- `clean_recover.py` 기능을 개선하여 포함했습니다.
- 사용법:
```powershell
python tools/recover_from_log.py
```
## 4. 실행 및 테스트
메인 크롤러 실행:
```powershell
python main.py
```
GUI 대시보드 실행 (테스트용):
```powershell
python crawler_gui.py
```
## 5. 배포 시 주의사항
- `crawler_manager.py``site_data.db`는 로컬에 생성되므로 배포 시 제외하거나 초기화 상태로 배포하세요.
- `.env` 파일의 API 키가 만료되지 않았는지 확인하세요.

144
alert_manager.py Normal file
View File

@ -0,0 +1,144 @@
import sqlite3
import requests
from datetime import datetime
from pathlib import Path
from config import TELEGRAM_BOT_TOKEN
class AlertManager:
"""
발전소 이상 감지 텔레그램 알림 관리
- 상태(정상/이상) DB에 저장하여 중복 알림 방지
"""
def __init__(self, db_path: str = None):
"""
DB 연결 테이블 초기화
"""
if db_path is None:
# crawler_manager와 같은 DB 파일 사용
db_path = Path(__file__).parent / "crawler_manager.db"
self.db_path = str(db_path)
self._init_db()
def _init_db(self):
"""알림 히스토리 테이블 생성"""
with sqlite3.connect(self.db_path) as conn:
cursor = conn.cursor()
# site_id: 발전소 ID
# alert_status: 'NORMAL' (정상), 'ALERT' (이상 발생 및 알림 전송됨)
# last_alert_time: 마지막 알림 전송 시간
cursor.execute("""
CREATE TABLE IF NOT EXISTS alert_history (
site_id TEXT PRIMARY KEY,
alert_status TEXT DEFAULT 'NORMAL',
last_alert_time TEXT
)
""")
conn.commit()
def send_telegram_message(self, chat_id, message):
"""텔레그램 메시지 전송"""
if not TELEGRAM_BOT_TOKEN:
print(" ⚠️ 텔레그램 토큰이 설정되지 않았습니다.")
return False
if not chat_id:
# Chat ID가 설정되지 않은 경우 조용히 리턴 (로그는 호출부에서 처리)
return False
url = f"https://api.telegram.org/bot{TELEGRAM_BOT_TOKEN}/sendMessage"
try:
payload = {"chat_id": chat_id, "text": message}
response = requests.post(url, json=payload, timeout=5)
if response.status_code == 200:
print(f" 🔔 텔레그램 알림 전송 성공")
return True
else:
print(f" ❌ 텔레그램 전송 실패 ({response.status_code}): {response.text}")
return False
except Exception as e:
print(f" ❌ 텔레그램 전송 중 에러: {e}")
return False
def check_and_alert(self, plant_info: dict, current_kw: float):
"""
발전량을 체크하고 필요 알림 전송
- 오전 10 ~ 오후 5시에만 동작
- 상태 변경 시에만 알림 (중복 방지)
"""
# 1. 시간 체크 (오전 10시 ~ 오후 5시)
now = datetime.now()
if not (10 <= now.hour <= 17):
return
site_id = plant_info.get('id')
plant_name = plant_info.get('display_name', plant_info.get('name'))
chat_id = plant_info.get('telegram_chat_id')
if not site_id:
return
# 2. 현재 DB 상태 확인
current_status = 'NORMAL'
with sqlite3.connect(self.db_path) as conn:
cursor = conn.cursor()
cursor.execute("SELECT alert_status FROM alert_history WHERE site_id = ?", (site_id,))
row = cursor.fetchone()
if row:
current_status = row[0]
else:
# 초기값 생성
cursor.execute("INSERT INTO alert_history (site_id, alert_status) VALUES (?, ?)", (site_id, 'NORMAL'))
conn.commit()
# 3. 상태 전이 로직
new_status = current_status
# [Case A] 발전량 0 (이상 감지)
if current_kw == 0:
if current_status == 'NORMAL':
# NORMAL -> ALERT: 알림 전송
print(f" 🚨 [Alert] {plant_name} 발전량 0kW 감지! 알림 전송 시도...")
if chat_id:
message = (
f"🚨 [긴급] 발전소 이상 감지!\n\n"
f"- 발전소: {plant_name}\n"
f"- 상태: 발전량 0kW\n"
f"- 시간: {now.strftime('%Y-%m-%d %H:%M:%S')}"
)
if self.send_telegram_message(chat_id, message):
new_status = 'ALERT'
else:
print(f" ⚠️ {plant_name}: Chat ID 오류로 알림 실패")
# 전송 실패해도 상태를 ALERT로 할 것인가?
# 실패했다면 다음에 다시 시도해야 하므로 NORMAL 유지
else:
print(f" ⚠️ {plant_name}: 설정된 Chat ID가 없습니다. (config.py 확인)")
else:
# 이미 ALERT 상태: 중복 알림 생략
pass
# [Case B] 발전량 > 0 (정상 복구)
else:
if current_status == 'ALERT':
# ALERT -> NORMAL: 상태 리셋
print(f" ✅ [Alert] {plant_name} 정상 복구됨 ({current_kw}kW)")
# 복구 알림은 옵션 (현재는 생략)
new_status = 'NORMAL'
# 4. 상태 변경 시 DB 업데이트
if new_status != current_status:
with sqlite3.connect(self.db_path) as conn:
cursor = conn.cursor()
cursor.execute("""
UPDATE alert_history
SET alert_status = ?, last_alert_time = ?
WHERE site_id = ?
""", (new_status, now.isoformat(), site_id))
conn.commit()

View File

@ -2,6 +2,17 @@
# config.py - 다중 업체(Multi-Tenant) 설정 관리 # config.py - 다중 업체(Multi-Tenant) 설정 관리
# ========================================== # ==========================================
# ---------------------------------------------------------
# [프록시 설정 - 클라우드 이전용]
# 오라클 서버 등 외부 망에서 접속할 때 NAS의 인터넷을 빌려 쓰기 위한 설정입니다.
# ---------------------------------------------------------
USE_PROXY = False # True로 변경하면 모든 크롤링이 아래 프록시를 경유합니다.
PROXY_URL = "http://100.83.7.81:3128"
PROXIES = {
"http": PROXY_URL,
"https": PROXY_URL,
}
# --------------------------------------------------------- # ---------------------------------------------------------
# [시스템 상수] 각 크롤러 시스템의 URL 및 엔드포인트 # [시스템 상수] 각 크롤러 시스템의 URL 및 엔드포인트
# --------------------------------------------------------- # ---------------------------------------------------------
@ -35,6 +46,13 @@ SYSTEM_CONSTANTS = {
} }
} }
# ---------------------------------------------------------
# [텔레그램 봇 설정]
# ---------------------------------------------------------
# https://t.me/BotFather 로 생성한 봇 토큰
# 사용자는 봇에게 먼저 메시지를 보내야 Chat ID를 알 수 있습니다.
TELEGRAM_BOT_TOKEN = '8273363609:AAEGv4abJSORNkap6XO_mqbnBKemBOEjugI'
# --------------------------------------------------------- # ---------------------------------------------------------
# [업체 목록] 업체 > 발전소 계층 구조 # [업체 목록] 업체 > 발전소 계층 구조
# --------------------------------------------------------- # ---------------------------------------------------------
@ -42,6 +60,7 @@ COMPANIES = [
{ {
'company_id': 'sunwind', 'company_id': 'sunwind',
'company_name': '태양과바람', 'company_name': '태양과바람',
'telegram_chat_id': -5217904468, # TODO: 봇에게 메시지를 보내고 Chat ID를 확인하여 이곳에 입력하세요
'plants': [ 'plants': [
# NREMS 계열 - 1, 2호기 (분리 처리) # NREMS 계열 - 1, 2호기 (분리 처리)
# id는 크롤러 내부에서 'nrems-01', 'nrems-02'로 분리 할당 # id는 크롤러 내부에서 'nrems-01', 'nrems-02'로 분리 할당
@ -176,6 +195,7 @@ def get_all_plants():
for company in COMPANIES: for company in COMPANIES:
company_id = company.get('company_id', '') company_id = company.get('company_id', '')
company_name = company.get('company_name', '') company_name = company.get('company_name', '')
telegram_chat_id = company.get('telegram_chat_id')
for plant in company.get('plants', []): for plant in company.get('plants', []):
plant_type = plant.get('type', '') plant_type = plant.get('type', '')
@ -184,6 +204,7 @@ def get_all_plants():
plant_info = { plant_info = {
'company_id': company_id, 'company_id': company_id,
'company_name': company_name, 'company_name': company_name,
'telegram_chat_id': telegram_chat_id,
'id': plant.get('id', ''), # DB용 고유 ID 'id': plant.get('id', ''), # DB용 고유 ID
'name': plant.get('name', ''), 'name': plant.get('name', ''),
'display_name': plant.get('display_name', plant.get('name', '')), 'display_name': plant.get('display_name', plant.get('name', '')),

View File

@ -2,7 +2,14 @@
# crawler_manager.py - 크롤링 스케줄 최적화 미들웨어 # crawler_manager.py - 크롤링 스케줄 최적화 미들웨어
# ========================================== # ==========================================
# NAS 리소스 절약을 위해 SQLite 기반으로 각 사이트의 # NAS 리소스 절약을 위해 SQLite 기반으로 각 사이트의
# 업데이트 패턴을 학습하고 최적 시점에만 크롤링 실행 # 업데이트 패턴을 학습하고, 데이터가 실제로 변경된 시점에만 DB 저장
#
# [설계 원칙]
# - 크롤링(HTTP 요청) 자체는 항상 허용 (야간 제외)
# → 원격 서버가 언제 업데이트할지 모르므로 주기적으로 확인해야 함
# - DB 저장은 데이터가 실제로 변경되었을 때만 실행
# → 중복 저장 방지 + NAS I/O 절약
# - 업데이트 패턴 학습은 부가 기능 (로깅용)
import sqlite3 import sqlite3
from datetime import datetime, timedelta from datetime import datetime, timedelta
@ -11,10 +18,11 @@ from pathlib import Path
class CrawlerManager: class CrawlerManager:
""" """
크롤링 스케줄을 자동으로 최적화하는 매니저 클래스 크롤링 DB 저장을 최적화하는 매니저 클래스
- LEARNING 상태: 모든 크롤링 허용 (패턴 학습 ) - should_run: 야간(21~05) 여부만 체크 False면 크롤링 자체를 스킵
- OPTIMIZED 상태: 학습된 업데이트 시점 전후에만 크롤링 허용 - should_save: 데이터가 실제로 변경되었는지 확인 False면 DB 저장 스킵
- analyze_and_optimize: 업데이트 패턴 학습 (로깅/모니터링 목적)
""" """
def __init__(self, db_path: str = None): def __init__(self, db_path: str = None):
@ -34,20 +42,45 @@ class CrawlerManager:
"""테이블이 없으면 생성""" """테이블이 없으면 생성"""
with sqlite3.connect(self.db_path) as conn: with sqlite3.connect(self.db_path) as conn:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute(""" cursor.executescript("""
CREATE TABLE IF NOT EXISTS site_rules ( CREATE TABLE IF NOT EXISTS site_rules (
site_id TEXT PRIMARY KEY, site_id TEXT PRIMARY KEY,
status TEXT DEFAULT 'LEARNING', status TEXT DEFAULT 'LEARNING',
target_minute INTEGER DEFAULT -1, target_minute INTEGER DEFAULT -1,
start_date TEXT, start_date TEXT,
last_run TEXT last_run TEXT
) );
CREATE TABLE IF NOT EXISTS site_data (
site_id TEXT PRIMARY KEY,
kw REAL,
today_kwh REAL,
updated_at TEXT
);
CREATE TABLE IF NOT EXISTS update_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
site_id TEXT,
detected_minute INTEGER,
detected_at TEXT
);
""") """)
conn.commit() conn.commit()
def _get_connection(self) -> sqlite3.Connection: def _get_connection(self) -> sqlite3.Connection:
"""SQLite 연결 반환""" """SQLite 연결 반환 (타임아웃 설정 추가)"""
return sqlite3.connect(self.db_path) return sqlite3.connect(self.db_path, timeout=10.0)
def _cleanup_old_history(self):
"""오래된 히스토리 정리 (30일 이상 지난 데이터 삭제)"""
try:
with self._get_connection() as conn:
cursor = conn.cursor()
limit_date = (datetime.now() - timedelta(days=30)).isoformat()
cursor.execute("DELETE FROM update_history WHERE detected_at < ?", (limit_date,))
conn.commit()
except Exception as e:
print(f"⚠️ [CrawlerManager] 히스토리 정리 실패: {e}")
def register_site(self, site_id: str) -> bool: def register_site(self, site_id: str) -> bool:
""" """
@ -62,12 +95,10 @@ class CrawlerManager:
with self._get_connection() as conn: with self._get_connection() as conn:
cursor = conn.cursor() cursor = conn.cursor()
# 이미 존재하는지 확인
cursor.execute("SELECT 1 FROM site_rules WHERE site_id = ?", (site_id,)) cursor.execute("SELECT 1 FROM site_rules WHERE site_id = ?", (site_id,))
if cursor.fetchone(): if cursor.fetchone():
return False return False
# 새로 등록
today = datetime.now().strftime("%Y-%m-%d") today = datetime.now().strftime("%Y-%m-%d")
cursor.execute(""" cursor.execute("""
INSERT INTO site_rules (site_id, status, target_minute, start_date, last_run) INSERT INTO site_rules (site_id, status, target_minute, start_date, last_run)
@ -80,89 +111,182 @@ class CrawlerManager:
def should_run(self, site_id: str) -> bool: def should_run(self, site_id: str) -> bool:
""" """
현재 시점에 해당 사이트를 크롤링해야 하는지 판단 현재 시점에 해당 사이트를 크롤링(HTTP 요청)해야 하는지 판단.
[변경 사항]
이전: OPTIMIZED 상태면 특정 (minute) 윈도우에서만 크롤링 허용
문제: 원격 서버 업데이트 시점을 놓쳐 시계열 데이터 누락
현재: 야간(21~05)에만 False 반환, 외에는 항상 크롤링 허용
DB 저장 여부는 should_save()에서 별도 결정
Args: Args:
site_id: 사이트 식별자 site_id: 사이트 식별자
Returns: Returns:
bool: 크롤링 실행 여부 bool: 크롤링 실행 여부 (야간이면 False)
""" """
now = datetime.now() now = datetime.now()
current_hour = now.hour current_hour = now.hour
current_minute = now.minute current_minute = now.minute
# 야간 모드: 21시 ~ 05시에는 크롤링 중지 # 야간 모드: 21시 ~ 05시에는 크롤링 중지 (발전 없는 시간대)
if current_hour >= 21 or current_hour < 5: if current_hour >= 21 or current_hour < 5:
return False return False
with self._get_connection() as conn: # 히스토리 정리 (05시 정각에 1회)
cursor = conn.cursor() if current_minute == 0 and current_hour == 5:
cursor.execute(""" self._cleanup_old_history()
SELECT status, target_minute, last_run
FROM site_rules
WHERE site_id = ?
""", (site_id,))
row = cursor.fetchone()
# 등록되지 않은 사이트면 일단 등록 후 True 반환 # 사이트 등록 (미등록 사이트 자동 등록)
if not row:
self.register_site(site_id) self.register_site(site_id)
# 항상 크롤링 허용 (데이터 변경 여부는 should_save에서 판단)
return True return True
status, target_minute, last_run = row def should_save(self, site_id: str, current_data: dict) -> bool:
# LEARNING 상태: 항상 실행 허용 (패턴 학습 목적)
if status == "LEARNING":
return True
# OPTIMIZED 상태: 최적화된 시간대에만 실행
if status == "OPTIMIZED" and target_minute >= 0:
# target_minute 이후 10분 윈도우 내에서만 허용
# 예: target_minute=15 → 15~24분 사이에만 실행
window_start = target_minute
window_end = (target_minute + 10) % 60
# 윈도우가 시간 경계를 넘는 경우 (예: 55~04분)
if window_start <= window_end:
in_window = window_start <= current_minute < window_end
else:
in_window = current_minute >= window_start or current_minute < window_end
if not in_window:
return False
# 중복 실행 방지: 최근 1시간 내 실행 이력이 있으면 스킵
if last_run:
try:
last_run_dt = datetime.fromisoformat(last_run)
if now - last_run_dt < timedelta(hours=1):
return False
except (ValueError, TypeError):
pass
return True
# 기타 상태는 기본적으로 허용
return True
def update_optimization(self, site_id: str, detected_minute: int) -> bool:
""" """
사이트의 업데이트 패턴이 감지되면 OPTIMIZED 상태로 전환 수집한 데이터를 DB에 저장해야 하는지 판단.
원격 서버의 데이터가 이전 수집 시점과 달라졌을 때만 True 반환.
이를 통해 중복 저장을 방지하고 NAS I/O를 절약.
[저장 조건]
- today_kwh(금일 발전량) 증가했을 : 반드시 저장 (핵심 지표)
- kw(현재 출력) 변했을 : 저장 (실시간 상태 반영)
- 마지막 저장 1시간 이상 경과했을 : 강제 저장 (heartbeat)
데이터가 정체돼도 최소 1시간에 1번은 기록 보장
Args: Args:
site_id: 사이트 식별자 site_id: 사이트 식별자
detected_minute: 업데이트가 감지된 (0~59) current_data: {'kw': float, 'today': float}
Returns: Returns:
bool: 업데이트 성공 여부 bool: DB에 저장해야 하면 True
""" """
if not 0 <= detected_minute <= 59: new_kw = float(current_data.get('kw', 0))
print(f" ⚠️ [CrawlerManager] 유효하지 않은 minute 값: {detected_minute}") new_today = float(current_data.get('today', 0))
return False now = datetime.now()
with self._get_connection() as conn: with self._get_connection() as conn:
cursor = conn.cursor() cursor = conn.cursor()
# 이전 데이터 조회
cursor.execute(
"SELECT kw, today_kwh, updated_at FROM site_data WHERE site_id = ?",
(site_id,)
)
row = cursor.fetchone()
should_save = False
if not row:
# 첫 수집 → 반드시 저장
should_save = True
else:
last_kw, last_today, last_updated_at = row
# 1. 금일 발전량이 증가했으면 저장
if new_today - last_today > 0.001:
should_save = True
# 2. 현재 출력(kW)이 변했으면 저장
elif abs(new_kw - last_kw) > 0.001:
should_save = True
# 3. 1시간 이상 저장 없었으면 강제 heartbeat 저장
elif last_updated_at:
try:
last_dt = datetime.fromisoformat(last_updated_at)
if now - last_dt >= timedelta(hours=1):
should_save = True
except (ValueError, TypeError):
should_save = True
if should_save:
# 현재 상태를 캐시에 업데이트
cursor.execute("""
INSERT INTO site_data (site_id, kw, today_kwh, updated_at)
VALUES (?, ?, ?, ?)
ON CONFLICT(site_id) DO UPDATE SET
kw = excluded.kw,
today_kwh = excluded.today_kwh,
updated_at = excluded.updated_at
""", (site_id, new_kw, new_today, now.isoformat()))
conn.commit()
return should_save
def check_data_change(self, site_id: str, current_data: dict) -> bool:
"""
[하위 호환용] should_save의 별칭.
기존 main.py 코드와의 호환성을 위해 유지.
내부적으로 should_save를 호출하며, 패턴 분석도 함께 수행.
"""
return self.should_save(site_id, current_data)
def analyze_and_optimize(self, site_id: str):
"""
업데이트 패턴 분석 기록 (모니터링/로깅 목적).
데이터 변경이 감지되었을 호출하여 원격 서버의 업데이트 패턴을 학습.
정보는 현재 크롤링 스케줄 제어에는 사용하지 않으며,
향후 분석이나 시각화를 위한 참고 데이터로만 활용.
"""
now = datetime.now()
current_minute = now.minute
# 히스토리 기록
with self._get_connection() as conn:
cursor = conn.cursor()
cursor.execute("""
INSERT INTO update_history (site_id, detected_minute, detected_at)
VALUES (?, ?, ?)
""", (site_id, current_minute, now.isoformat()))
# 최근 기록 조회 (최대 5개)
cursor.execute("""
SELECT detected_minute
FROM update_history
WHERE site_id = ?
ORDER BY id DESC
LIMIT 5
""", (site_id,))
minutes = [r[0] for r in cursor.fetchall()]
conn.commit()
# 패턴 분석 (최소 3회 이상 데이터 필요)
if len(minutes) < 3:
return
recent = minutes[:3]
avg = sum(recent) / len(recent)
# 최대 편차가 5분 이내면 패턴 안정 (참고 정보로만 기록)
is_consistent = all(abs(m - avg) <= 5 for m in recent)
if is_consistent:
target = int(avg)
# 스케줄 제어에는 사용하지 않지만, 상태 기록은 유지 (모니터링용)
self._record_pattern(site_id, target)
else:
print(f" 📊 [CrawlerManager] '{site_id}' 패턴 분석 중... 최근: {recent}")
def _record_pattern(self, site_id: str, detected_minute: int):
"""
감지된 업데이트 패턴을 DB에 기록 (모니터링용).
크롤링 스케줄 제어에는 영향을 주지 않음.
"""
if not 0 <= detected_minute <= 59:
return
with self._get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT status, target_minute FROM site_rules WHERE site_id = ?", (site_id,))
row = cursor.fetchone()
if row and row[0] == 'OPTIMIZED' and abs(row[1] - detected_minute) <= 2:
return # 이미 동일한 패턴 기록됨
cursor.execute(""" cursor.execute("""
UPDATE site_rules UPDATE site_rules
SET status = 'OPTIMIZED', target_minute = ? SET status = 'OPTIMIZED', target_minute = ?
@ -171,11 +295,15 @@ class CrawlerManager:
conn.commit() conn.commit()
if cursor.rowcount > 0: if cursor.rowcount > 0:
print(f" ✅ [CrawlerManager] '{site_id}' → OPTIMIZED (매시 {detected_minute}분)") print(f" 📌 [CrawlerManager] '{site_id}' 업데이트 패턴 감지: 매시 {detected_minute}분 경 (참고용)")
def update_optimization(self, site_id: str, detected_minute: int) -> bool:
"""
[하위 호환용] 패턴 기록 메서드.
내부적으로 _record_pattern을 호출.
"""
self._record_pattern(site_id, detected_minute)
return True return True
else:
print(f" ⚠️ [CrawlerManager] '{site_id}' 사이트를 찾을 수 없음")
return False
def record_run(self, site_id: str): def record_run(self, site_id: str):
""" """
@ -274,96 +402,55 @@ class CrawlerManager:
# ========================================== # ==========================================
# Example Usage (main.py에서의 활용 예시) # main.py 연동 방식 (변경 없음 - 하위 호환 유지)
# ========================================== # ==========================================
# #
# from crawler_manager import CrawlerManager # main.py에서의 사용 흐름:
# from crawlers import get_crawler
# from config import get_all_plants
# #
# def main(): # 1. should_run(site_id)
# # 매니저 초기화 # → 야간이면 False (크롤링 자체 스킵)
# manager = CrawlerManager() # → 그 외에는 항상 True (항상 HTTP 요청)
# #
# # 모든 발전소 순회 # 2. 크롤링(HTTP 요청) 실행
# for plant in get_all_plants():
# site_id = plant.get('id', '')
# #
# if not site_id: # 3. record_run(item_id) ← 크롤링 성공 기록
# continue
# #
# # 1. 사이트 등록 (최초 1회) # 4. check_data_change(item_id, item) ← should_save와 동일
# manager.register_site(site_id) # → True: 데이터 변경됨 → DB 저장 진행
# → False: 변경 없음 → DB 저장 스킵
# #
# # 2. 실행 여부 확인 # 5. analyze_and_optimize(item_id) ← 패턴 학습 (선택적)
# if not manager.should_run(site_id):
# print(f" ⏭️ {site_id} 스킵 (최적화 윈도우 외)")
# continue
#
# # 3. 크롤링 실행
# try:
# crawler_func = get_crawler(plant['type'])
# data = crawler_func(plant)
#
# if data:
# # 4. 실행 기록
# manager.record_run(site_id)
#
# # 5. (옵션) 패턴 분석 후 최적화
# # 예: 데이터가 항상 매시 10분에 갱신된다면
# # manager.update_optimization(site_id, 10)
#
# except Exception as e:
# print(f" ❌ {site_id} 오류: {e}")
#
# if __name__ == "__main__":
# main()
# #
# ========================================== # ==========================================
# Cron 예시 (5분마다 실행) # Cron 설정 (10분마다 실행 권장)
# ========================================== # ==========================================
# */5 * * * * cd /volume1/dev/SolorPower/crawler && \ # */10 * * * * cd /volume1/dev/SolorPower/crawler && \
# /volume1/dev/SolorPower/crawler/venv/bin/python main.py >> cron.log 2>&1 # /volume1/dev/SolorPower/crawler/venv/bin/python main.py >> cron.log 2>&1
#
# - LEARNING 사이트는 5분마다 크롤링 (패턴 학습)
# - OPTIMIZED 사이트는 학습된 시점 직후 10분 윈도우에서만 크롤링
# - 야간(21시~05시)에는 모든 크롤링 중지
# ========================================== # ==========================================
if __name__ == "__main__": if __name__ == "__main__":
# 테스트 코드
manager = CrawlerManager() manager = CrawlerManager()
print("=== CrawlerManager 테스트 ===\n") print("=== CrawlerManager 테스트 ===\n")
# 사이트 등록
test_sites = ["nrems-01", "nrems-02", "kremc-05"] test_sites = ["nrems-01", "nrems-02", "kremc-05"]
for site_id in test_sites: for site_id in test_sites:
manager.register_site(site_id) manager.register_site(site_id)
# 현재 상태 출력
print("\n[등록된 사이트]") print("\n[등록된 사이트]")
for site in manager.get_all_sites(): for site in manager.get_all_sites():
print(f" {site['site_id']}: {site['status']} (target: {site['target_minute']}분)") print(f" {site['site_id']}: {site['status']} (target: {site['target_minute']}분)")
# should_run 테스트
print("\n[should_run 테스트]") print("\n[should_run 테스트]")
for site_id in test_sites: for site_id in test_sites:
result = manager.should_run(site_id) result = manager.should_run(site_id)
print(f" {site_id}: {'✅ 실행' if result else '⏭️ 스킵'}") print(f" {site_id}: {'✅ 실행' if result else '⏭️ 스킵 (야간)'}")
# 최적화 적용 print("\n[should_save 테스트]")
print("\n[최적화 적용]") test_data = {'kw': 15.5, 'today': 120.0}
manager.update_optimization("nrems-01", 15) # 매시 15분에 업데이트 for site_id in test_sites:
manager.update_optimization("kremc-05", 30) # 매시 30분에 업데이트 result = manager.should_save(site_id, test_data)
print(f" {site_id}: {'✅ 저장' if result else '⏭️ 스킵 (변경 없음)'}")
# 최적화 후 상태
print("\n[최적화 후 상태]")
for site in manager.get_all_sites():
print(f" {site['site_id']}: {site['status']} (target: {site['target_minute']}분)")
# 실행 기록
manager.record_run("nrems-01")
print("\n=== 테스트 완료 ===") print("\n=== 테스트 완료 ===")

View File

@ -18,7 +18,22 @@ def safe_float(value):
def create_session(): def create_session():
"""기본 설정된 requests 세션 생성""" """기본 설정된 requests 세션 생성"""
import os
import sys
# 상위 경로의 config.py 불러오기 처리
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
try:
from config import USE_PROXY, PROXIES
except ImportError:
USE_PROXY = False
PROXIES = None
session = requests.Session() session = requests.Session()
if USE_PROXY and PROXIES:
session.proxies.update(PROXIES)
return session return session
def get_default_headers(): def get_default_headers():

View File

@ -113,8 +113,8 @@ def save_to_supabase(data_list):
"plant_id": plant_id, "plant_id": plant_id,
"date": kst_date_str, "date": kst_date_str,
"total_generation": today_val, "total_generation": today_val,
"created_at": kst_now, # 생성/수정일 "created_at": kst_now
"updated_at": kst_now # updated_at은 자동으로 NOW()로 설정됨 (DB 기본값)
}) })
if daily_records: if daily_records:
@ -213,10 +213,15 @@ def save_history(data_list, data_type='hourly'):
# Ensure timezone is sent to Supabase to prevent UTC assumption # Ensure timezone is sent to Supabase to prevent UTC assumption
final_created_at = dt_ts.isoformat() final_created_at = dt_ts.isoformat()
if item.get('current_kw') is not None:
current_kw = float(item['current_kw'])
else:
current_kw = float(item.get('generation_kwh', 0))
records.append({ records.append({
'plant_id': item['plant_id'], 'plant_id': item['plant_id'],
'created_at': final_created_at, 'created_at': final_created_at,
'current_kw': float(item.get('current_kw', 0) or item.get('generation_kwh', 0)), 'current_kw': current_kw,
'today_kwh': float(item.get('generation_kwh', 0)), 'today_kwh': float(item.get('generation_kwh', 0)),
'status': 'History' 'status': 'History'
}) })
@ -246,10 +251,6 @@ def save_history(data_list, data_type='hourly'):
# upsert 사용 # upsert 사용
if data_type == 'hourly': if data_type == 'hourly':
# hourly는 시간값 중복 시 업데이트? solar_logs는 보통 log table이라 pk가 id일 수 있음.
# 하지만 과거 내역이므로 중복 방지가 필요. created_at 기준?
# solar_logs에 unique constraints가 plant_id, created_at에 있는지 불확실.
# 일단 insert로 시도
client.table(table_name).insert(records).execute() client.table(table_name).insert(records).execute()
elif data_type == 'daily': elif data_type == 'daily':
client.table(table_name).upsert(records, on_conflict="plant_id, date").execute() client.table(table_name).upsert(records, on_conflict="plant_id, date").execute()
@ -269,23 +270,17 @@ def save_history(data_list, data_type='hourly'):
monthly_upserts = [] monthly_upserts = []
for (pid, m_key) in updated_months: for (pid, m_key) in updated_months:
# 2. 해당 월의 Daily 합계 조회 (DB Aggregation) # 2. 해당 월의 Daily 합계 조회 (DB Aggregation)
# start_date ~ end_date 범위 쿼리가 필요하지만, import calendar
# supabase-py에서는 .select('total_generation.sum()') 같은 게 잘 안됨. try:
# 그냥 해당 월 데이터를 가져와서 파이썬에서 합산 (데이터 최대 31개라 매우 가벼움) year, month_int = map(int, m_key.split('-'))
_, last_day = calendar.monthrange(year, month_int)
start_d = f"{m_key}-01" except:
# end_d 로직 복잡하므로 그냥 문자열 필터로 (YYYY-MM-01 ~ YYYY-MM-31) last_day = 31
# like는 지원 안 할 수 있으므로 date >= start AND date <= end
# 다음달 1일 전까지
# 쿼리: select total_generation where plant_id=X and date like 'YYYY-MM%'
# but 'like' operator might differ.
# Simpler: gte "YYYY-MM-01", lte "YYYY-MM-31"
d_res = client.table("daily_stats").select("total_generation") \ d_res = client.table("daily_stats").select("total_generation") \
.eq("plant_id", pid) \ .eq("plant_id", pid) \
.gte("date", f"{m_key}-01") \ .gte("date", f"{m_key}-01") \
.lte("date", f"{m_key}-31") \ .lte("date", f"{m_key}-{last_day}") \
.execute() .execute()
total_gen = sum(r['total_generation'] or 0 for r in d_res.data) total_gen = sum(r['total_generation'] or 0 for r in d_res.data)

49
main.py
View File

@ -17,6 +17,7 @@ from config import get_all_plants
from database import save_to_supabase, save_to_console from database import save_to_supabase, save_to_console
from crawlers import get_crawler from crawlers import get_crawler
from crawler_manager import CrawlerManager from crawler_manager import CrawlerManager
from alert_manager import AlertManager
# 스마트 스케줄러 초기화 # 스마트 스케줄러 초기화
crawler_manager = CrawlerManager() crawler_manager = CrawlerManager()
@ -52,6 +53,9 @@ def integrated_monitoring(save_to_db=True, company_filter=None, force_run=False)
total_results = [] total_results = []
skipped_count = 0 skipped_count = 0
# 알림 매니저 초기화
alert_manager = AlertManager()
for plant in all_plants: for plant in all_plants:
plant_type = plant['type'] plant_type = plant['type']
plant_name = plant.get('display_name', plant.get('name', 'Unknown')) plant_name = plant.get('display_name', plant.get('name', 'Unknown'))
@ -65,19 +69,12 @@ def integrated_monitoring(save_to_db=True, company_filter=None, force_run=False)
else: else:
site_ids = [plant.get('id', '')] site_ids = [plant.get('id', '')]
# 스마트 스케줄러 확인 (force_run이 아닌 경우) # 야간 시간대 체크 (force_run이 아닌 경우)
if not force_run: if not force_run:
# 모든 site_id에 대해 should_run 확인 (하나라도 실행해야 하면 실행) # 대표 site_id 하나로 야간 여부 확인 (모든 사이트 동일 조건)
should_run_any = False representative_id = site_ids[0] if site_ids else ''
for site_id in site_ids: if representative_id and not crawler_manager.should_run(representative_id):
if site_id: print(f" ⏭️ [{plant_type.upper()}] {plant_name} 스킵 (야간 시간대)")
crawler_manager.register_site(site_id)
if crawler_manager.should_run(site_id):
should_run_any = True
break
if not should_run_any:
print(f" ⏭️ [{plant_type.upper()}] {plant_name} 스킵 (스케줄 외)")
skipped_count += 1 skipped_count += 1
continue continue
@ -88,17 +85,37 @@ def integrated_monitoring(save_to_db=True, company_filter=None, force_run=False)
if crawler_func: if crawler_func:
data = crawler_func(plant) data = crawler_func(plant)
if data: if data:
# company_id, company_name 주입 # company_id, company_name 주입 + 변경 여부 판단
for item in data: for item in data:
item['company_id'] = company_id item['company_id'] = company_id
item['company_name'] = company_name item['company_name'] = company_name
item['_data_changed'] = False # 기본값: 저장 안 함
# 크롤링 성공 시 실행 기록
item_id = item.get('id', '') item_id = item.get('id', '')
# 알림은 항상 체크 (0kW 감지 목적)
alert_info = plant.copy()
alert_info['id'] = item_id
alert_info['name'] = item.get('name', plant_name)
alert_manager.check_and_alert(alert_info, item.get('kw', 0))
if item_id: if item_id:
# 크롤링 성공 기록 (항상)
crawler_manager.record_run(item_id) crawler_manager.record_run(item_id)
total_results.extend(data) # 데이터 변경 여부 확인
# → 원격 서버가 실제로 업데이트했는지 감지
# → True면 DB 저장 대상 / False면 중복 저장 방지
if crawler_manager.check_data_change(item_id, item):
crawler_manager.analyze_and_optimize(item_id)
item['_data_changed'] = True
else:
print(f" ⏸️ [{item_id}] 데이터 변경 없음, DB 저장 스킵")
# 변경된 항목만 DB 저장 대상에 포함
for item in data:
if item.pop('_data_changed', False):
total_results.append(item)
else: else:
print(f" ⚠️ 알 수 없는 크롤러 타입: {plant_type}") print(f" ⚠️ 알 수 없는 크롤러 타입: {plant_type}")
except Exception as e: except Exception as e:
@ -120,7 +137,7 @@ def integrated_monitoring(save_to_db=True, company_filter=None, force_run=False)
print("-" * 75) print("-" * 75)
if skipped_count > 0: if skipped_count > 0:
print(f"📊 스킵된 사이트: {skipped_count}개 (스케줄 외)") print(f"📊 스킵된 사이트: {skipped_count}개 (야간 시간대)")
if total_results: if total_results:
# 콘솔 출력 # 콘솔 출력

139
scripts_archive/README.md Normal file
View File

@ -0,0 +1,139 @@
# Scripts Archive - 2월 데이터 패치
## 📅 작업 일시
2026년 2월 27일
## 🎯 작업 목적
5호기(kremc-05)와 9호기(nrems-09)의 2월 데이터를 Supabase DB에 완전히 크롤링하고 저장
## ⚠️ 발견된 문제
1. **중복 저장**: 시간별 데이터가 5~6배 중복 저장됨
2. **current_kw = 0**: 과거 데이터의 current_kw가 0으로 저장되어 웹 차트가 안 나옴
## ✅ 완료된 작업
### 1. 데이터 크롤링 및 저장
- **대상**: 5호기, 9호기
- **기간**: 2026년 2월 1일 ~ 2월 27일
- **데이터 유형**: 시간별(hourly), 일별(daily), 월별(monthly)
### 2. 중복 데이터 정리
- 시간별 데이터가 중복 저장된 문제 발견 및 해결
- 각 시간대별로 최신 레코드만 유지하도록 정리
- **5호기**: 2,949건 중복 제거
- **9호기**: 2,839건 중복 제거
### 3. current_kw 업데이트 문제 해결
- **문제**: 과거 데이터의 current_kw가 0으로 저장되어 웹 차트가 표시되지 않음
- **원인**: 과거 데이터 크롤링 시 current_kw 필드가 0으로 저장됨
- **해결**: current_kw를 today_kwh(시간별 발전량) 값으로 업데이트
- **5호기**: 263건 업데이트
- **9호기**: 308건 업데이트
### 4. 최종 결과
#### 5호기 (kremc-05)
- ✅ 시간별 데이터: 646건
- ✅ 일별 데이터: 27건 (2/1~2/27)
- ✅ 2월 총 발전량: 3,702 kWh
- ✅ 일평균: 137.11 kWh
- ✅ 월별 통계: 자동 갱신 완료
#### 9호기 (nrems-09)
- ✅ 시간별 데이터: 646건
- ✅ 일별 데이터: 27건 (2/1~2/27)
- ✅ 2월 총 발전량: 9,230 kWh
- ✅ 일평균: 341.85 kWh
- ✅ 월별 통계: 자동 갱신 완료
## 📁 아카이브된 스크립트
### 1. `fetch_february.py`
- **목적**: 5호기와 9호기의 2월 전체 데이터 크롤링
- **기능**:
- 시간별 데이터 수집 (2/1~2/27)
- 일별 데이터 수집 (2/1~2/27)
- Supabase DB 저장
### 2. `verify_february_data.py`
- **목적**: Supabase DB에 저장된 2월 데이터 검증
- **기능**:
- 시간별/일별/월별 데이터 개수 확인
- 발전량 통계 집계
- 샘플 데이터 출력
### 3. `check_feb_gaps.py`
- **목적**: 2월 시간별 데이터의 날짜별 누락 확인
- **기능**:
- 2월 1일~27일 각 날짜의 시간별 데이터 개수 확인
- 완전 누락/부분 누락 날짜 보고
### 4. `clean_feb_duplicates.py`
- **목적**: 중복 저장된 시간별 데이터 정리
- **기능**:
- 같은 plant_id와 시간대의 중복 레코드 탐지
- 가장 최신 레코드만 유지, 나머지 삭제
- 날짜별 중복 제거 현황 출력
### 5. `fill_today_feb.py`
- **목적**: 2월 27일(오늘) 누락 시간대 보완
- **기능**:
- 현재 DB에 있는 시간대 확인
- 누락된 시간대만 추가 크롤링
- 일별 통계 업데이트
### 6. `check_current_kw.py`
- **목적**: DB에 저장된 시간별 데이터의 current_kw 값 확인
- **기능**:
- 특정 날짜의 시간별 데이터 조회
- current_kw와 today_kwh 값 비교
- current_kw=0인 레코드 개수 통계
### 7. `update_current_kw.py`
- **목적**: 2월 데이터의 current_kw를 today_kwh로 업데이트
- **기능**:
- current_kw가 0이고 today_kwh가 0이 아닌 레코드 탐지
- current_kw를 today_kwh 값으로 업데이트
- 날짜별 업데이트 현황 출력
- **배경**: 과거 데이터 크롤링 시 current_kw가 0으로 저장되어 웹 차트가 안 나오는 문제 해결
### 8. `test_api.py`
- **목적**: API 엔드포인트 호출 테스트
- **기능**:
- /plants/{plant_id}/stats/today 엔드포인트 테스트
- 시간별 데이터 응답 확인
- current_kw와 today_kwh 값 출력
### 9. `verify_feb_final.py`
- **목적**: 2월 데이터 최종 검증 (간단 버전)
- **기능**:
- 시간별/일별/월별 데이터 개수 확인
- 발전량 통계 요약
- DB 저장 상태 최종 확인
## 🔧 사용 방법
모든 스크립트는 crawler 가상환경에서 실행:
```powershell
cd d:\dev\etc\SolorPower\crawler
.\venv_win\Scripts\Activate.ps1
# 스크립트 실행 예시
python scripts_archive/fetch_february.py
python scripts_archive/verify_february_data.py
python scripts_archive/check_feb_gaps.py
python scripts_archive/clean_feb_duplicates.py
python scripts_archive/fill_today_feb.py
```
## 📝 참고사항
- 이 스크립트들은 일회성 패치 작업용입니다
- 정규 크롤링은 `main.py``crawler_manager.py`를 사용하세요
- 유사한 데이터 패치 작업이 필요할 경우 이 스크립트들을 참고하여 수정 가능
## ⚠️ 주의사항
- `clean_feb_duplicates.py`는 데이터를 삭제하므로 신중히 사용
- 중복 제거 전 반드시 DB 백업 권장
- 시간대 필터링 시 KST(UTC+9) 타임존 고려 필요

View File

@ -0,0 +1,89 @@
"""
DB에 저장된 시간별 데이터의 current_kw 확인
"""
import sys
import os
from datetime import datetime
from dotenv import load_dotenv
load_dotenv()
if sys.platform.startswith('win'):
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(current_dir)
from database import get_supabase_client
def check_current_kw(plant_id, plant_name, date_str):
"""특정 날짜의 시간별 데이터 current_kw 값 확인"""
print(f"\n{'='*70}")
print(f"🔍 [{plant_name}] {date_str} 시간별 데이터 확인")
print(f"{'='*70}")
client = get_supabase_client()
if not client:
return
# 해당 날짜의 시간별 데이터 조회
result = client.table("solar_logs") \
.select("created_at, current_kw, today_kwh") \
.eq("plant_id", plant_id) \
.gte("created_at", f"{date_str}T00:00:00+09:00") \
.lt("created_at", f"{date_str}T23:59:59+09:00") \
.order("created_at", desc=False) \
.limit(30) \
.execute()
if not result.data:
print(" ❌ 데이터 없음")
return
print(f"{len(result.data)}건 (최대 30건만 표시)\n")
print(f" {'시간':<20} | {'current_kw':>12} | {'today_kwh':>12}")
print(f" {'-'*20}+{'-'*14}+{'-'*14}")
current_kw_zero_count = 0
current_kw_nonzero_count = 0
for record in result.data:
created_at = record['created_at']
current_kw = record.get('current_kw', 0) or 0
today_kwh = record.get('today_kwh', 0) or 0
if current_kw == 0:
current_kw_zero_count += 1
else:
current_kw_nonzero_count += 1
print(f" {created_at:<20} | {current_kw:>12.2f} | {today_kwh:>12.2f}")
print(f"\n 📊 통계:")
print(f" current_kw = 0: {current_kw_zero_count}")
print(f" current_kw ≠ 0: {current_kw_nonzero_count}")
if current_kw_zero_count == len(result.data):
print(f"\n ⚠️ 모든 current_kw 값이 0입니다!")
print(f" ⚠️ 과거 데이터는 current_kw 대신 today_kwh(시간별 발전량)가 저장됩니다.")
print(f" ⚠️ 차트는 today_kwh를 사용해야 합니다.")
def main():
plants = [
('kremc-05', '5호기'),
('nrems-09', '9호기')
]
dates = ['2026-02-25', '2026-02-01']
for plant_id, plant_name in plants:
for date_str in dates:
check_current_kw(plant_id, plant_name, date_str)
print(f"\n{'='*70}\n")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,91 @@
"""
2 데이터 누락 확인 스크립트
정확히 어느 날짜의 데이터가 누락되었는지 확인
"""
import sys
import os
from datetime import datetime, timedelta
from dotenv import load_dotenv
load_dotenv()
if sys.platform.startswith('win'):
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(current_dir)
from database import get_supabase_client
def check_gaps(plant_id, plant_name):
"""날짜별 시간별 데이터 누락 확인"""
print(f"\n{'='*70}")
print(f"🔍 [{plant_name}] 2월 시간별 데이터 누락 확인")
print(f"{'='*70}")
client = get_supabase_client()
if client is None:
return
# 2월 1일부터 27일까지 확인
start = datetime(2026, 2, 1)
end = datetime(2026, 2, 27)
current = start
missing_dates = []
partial_dates = []
while current <= end:
date_str = current.strftime("%Y-%m-%d")
# 해당 날짜의 시간별 데이터 개수 확인
result = client.table("solar_logs") \
.select("*", count='exact') \
.eq("plant_id", plant_id) \
.gte("created_at", f"{date_str}T00:00:00+09:00") \
.lt("created_at", f"{(current + timedelta(days=1)).strftime('%Y-%m-%d')}T00:00:00+09:00") \
.execute()
count = result.count if hasattr(result, 'count') else len(result.data)
if count == 0:
missing_dates.append(date_str)
print(f"{date_str}: 데이터 없음")
elif count < 24:
partial_dates.append((date_str, count))
print(f" ⚠️ {date_str}: {count}건 (불완전)")
else:
print(f"{date_str}: {count}")
current += timedelta(days=1)
print(f"\n📊 요약:")
print(f" 완전 누락: {len(missing_dates)}")
print(f" 부분 누락: {len(partial_dates)}")
if missing_dates:
print(f"\n 누락된 날짜:")
for d in missing_dates:
print(f" - {d}")
if partial_dates:
print(f"\n 부분 누락된 날짜:")
for d, c in partial_dates:
print(f" - {d}: {c}/24건")
def main():
plants = [
('kremc-05', '5호기'),
('nrems-09', '9호기')
]
for plant_id, plant_name in plants:
check_gaps(plant_id, plant_name)
print(f"\n{'='*70}\n")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,108 @@
"""
2 시간별 데이터 중복 제거 스크립트
같은 plant_id와 시간대에 중복된 데이터를 정리
"""
import sys
import os
from datetime import datetime, timedelta
from dotenv import load_dotenv
load_dotenv()
if sys.platform.startswith('win'):
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(current_dir)
from database import get_supabase_client
def clean_duplicates(plant_id, plant_name):
"""중복 데이터 제거 - 같은 시간대에 가장 최신 레코드만 유지"""
print(f"\n{'='*70}")
print(f"🧹 [{plant_name}] 중복 데이터 정리 중...")
print(f"{'='*70}")
client = get_supabase_client()
if client is None:
return
# 2월 1일부터 27일까지
start = datetime(2026, 2, 1)
end = datetime(2026, 2, 27)
total_deleted = 0
current = start
while current <= end:
date_str = current.strftime("%Y-%m-%d")
# 해당 날짜의 모든 시간별 데이터 가져오기
result = client.table("solar_logs") \
.select("*") \
.eq("plant_id", plant_id) \
.gte("created_at", f"{date_str}T00:00:00+09:00") \
.lt("created_at", f"{(current + timedelta(days=1)).strftime('%Y-%m-%d')}T00:00:00+09:00") \
.order("created_at", desc=False) \
.execute()
if not result.data:
current += timedelta(days=1)
continue
# 시간대별로 그룹화 (created_at의 시간 부분으로)
hour_groups = {}
for record in result.data:
# created_at에서 날짜+시간만 추출 (분/초 제거)
ts = record['created_at']
hour_key = ts[:13] # 2026-02-01T00 형식
if hour_key not in hour_groups:
hour_groups[hour_key] = []
hour_groups[hour_key].append(record)
# 각 시간대별로 중복 제거 (가장 최근 id만 유지)
deleted_count = 0
for hour_key, records in hour_groups.items():
if len(records) > 1:
# id 기준으로 정렬 (가장 큰 id가 최신)
records.sort(key=lambda x: x['id'], reverse=True)
# 첫 번째(최신)를 제외한 나머지 삭제
for old_record in records[1:]:
try:
client.table("solar_logs").delete().eq("id", old_record['id']).execute()
deleted_count += 1
except Exception as e:
print(f" ⚠️ 삭제 실패 (id: {old_record['id']}): {e}")
if deleted_count > 0:
print(f" 🧹 {date_str}: {deleted_count}건 중복 제거 (남은 시간대: {len(hour_groups)}개)")
total_deleted += deleted_count
current += timedelta(days=1)
print(f"\n✅ [{plant_name}] 총 {total_deleted}건 중복 제거 완료")
def main():
plants = [
('kremc-05', '5호기'),
('nrems-09', '9호기')
]
print("\n" + "="*70)
print("🧹 2월 시간별 데이터 중복 제거 시작")
print("="*70)
for plant_id, plant_name in plants:
clean_duplicates(plant_id, plant_name)
print("\n" + "="*70)
print("🎉 중복 제거 완료!")
print("="*70 + "\n")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,135 @@
"""
2 데이터 크롤링 스크립트
5호기(kremc-05), 9호기(nrems-09) 2 일별/시간별 데이터를 수집합니다.
"""
import sys
import os
import importlib
from datetime import datetime
from dotenv import load_dotenv
# .env 로드
load_dotenv()
# Windows 인코딩 문제 해결
if sys.platform.startswith('win'):
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
# 프로젝트 루트 경로 추가
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(current_dir)
from config import get_all_plants
from database import save_history
def get_plant_config(target_id):
"""플랜트 설정 가져오기"""
plants = get_all_plants()
for p in plants:
if p.get('id') == target_id:
return p
return None
def fetch_february_data(plant_config):
"""2월 데이터 수집"""
plant_id = plant_config['id']
plant_type = plant_config['type']
plant_name = plant_config['name']
print(f"\n{'='*60}")
print(f"🚀 [{plant_name}] 2월 데이터 수집 시작 ({plant_id})")
print(f" 타입: {plant_type}")
print(f"{'='*60}")
# 크롤러 모듈 동적 임포트
try:
crawler_module = importlib.import_module(f"crawlers.{plant_type}")
except ImportError:
print(f"❌ 크롤러 모듈을 찾을 수 없습니다: crawlers/{plant_type}.py")
return
# 2월 데이터 범위 설정
now = datetime.now()
year = now.year
# 2월 1일부터 오늘까지 (또는 2월 말일까지)
start_date = f"{year}-02-01"
# 현재가 2월이면 오늘까지, 3월 이후면 2월 마지막 날까지
if now.month == 2:
end_date = now.strftime("%Y-%m-%d")
else:
# 2월 마지막 날 (윤년 고려)
if year % 4 == 0 and (year % 100 != 0 or year % 400 == 0):
end_date = f"{year}-02-29"
else:
end_date = f"{year}-02-28"
print(f"\n📅 수집 기간: {start_date} ~ {end_date}")
# 1. 시간별 데이터 수집
try:
print(f"\n⏳ [Hourly] 시간별 데이터 수집 중...")
if hasattr(crawler_module, 'fetch_history_hourly'):
hourly_data = crawler_module.fetch_history_hourly(plant_config, start_date, end_date)
if hourly_data:
print(f"{len(hourly_data)}개 시간별 데이터 수집 완료")
save_history(hourly_data, 'hourly')
print(f" ✅ DB 저장 완료")
else:
print(" ⚠️ 데이터 없음")
else:
print(f" ⚠️ {plant_type}는 시간별 이력 수집을 지원하지 않음")
except Exception as e:
print(f"❌ [Hourly] 에러: {e}")
import traceback
traceback.print_exc()
# 2. 일별 데이터 수집
try:
print(f"\n⏳ [Daily] 일별 데이터 수집 중...")
if hasattr(crawler_module, 'fetch_history_daily'):
daily_data = crawler_module.fetch_history_daily(plant_config, start_date, end_date)
if daily_data:
print(f"{len(daily_data)}개 일별 데이터 수집 완료")
save_history(daily_data, 'daily')
print(f" ✅ DB 저장 완료")
else:
print(" ⚠️ 데이터 없음")
else:
print(f" ⚠️ {plant_type}는 일별 이력 수집을 지원하지 않음")
except Exception as e:
print(f"❌ [Daily] 에러: {e}")
import traceback
traceback.print_exc()
print(f"\n✅ [{plant_name}] 모든 작업 완료\n")
def main():
"""메인 실행 함수"""
target_plants = ['kremc-05', 'nrems-09'] # 5호기, 9호기
print("\n" + "="*60)
print("🌞 2월 데이터 크롤링 시작")
print(f"대상: 5호기(kremc-05), 9호기(nrems-09)")
print("="*60)
for plant_id in target_plants:
cfg = get_plant_config(plant_id)
if cfg:
fetch_february_data(cfg)
else:
print(f"❌ 설정을 찾을 수 없습니다: {plant_id}")
print("\n" + "="*60)
print("🎉 모든 작업 완료!")
print("="*60 + "\n")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,127 @@
"""
2 27 누락 시간대 보완 크롤링
"""
import sys
import os
import importlib
from datetime import datetime
from dotenv import load_dotenv
load_dotenv()
if sys.platform.startswith('win'):
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(current_dir)
from config import get_all_plants
from database import save_history, get_supabase_client
def get_plant_config(target_id):
plants = get_all_plants()
for p in plants:
if p.get('id') == target_id:
return p
return None
def fill_today(plant_config):
plant_id = plant_config['id']
plant_type = plant_config['type']
plant_name = plant_config['name']
print(f"\n{'='*60}")
print(f"🚀 [{plant_name}] 오늘 데이터 보완 ({plant_id})")
print(f"{'='*60}")
# 크롤러 모듈 동적 임포트
try:
crawler_module = importlib.import_module(f"crawlers.{plant_type}")
except ImportError:
print(f"❌ 크롤러 모듈을 찾을 수 없습니다: crawlers/{plant_type}.py")
return
today = datetime.now().strftime("%Y-%m-%d")
# 1. 현재 DB에 있는 시간대 확인
client = get_supabase_client()
if client:
result = client.table("solar_logs") \
.select("created_at") \
.eq("plant_id", plant_id) \
.gte("created_at", f"{today}T00:00:00+09:00") \
.lt("created_at", f"{today}T23:59:59+09:00") \
.execute()
existing_hours = set()
for rec in result.data:
hour = rec['created_at'][:13] # 2026-02-27T00 형식
existing_hours.add(hour)
print(f" 현재 DB에 있는 시간대: {len(existing_hours)}")
print(f" {sorted(existing_hours)[:5]}... (샘플)")
# 2. 시간별 데이터 크롤링
try:
print(f"\n⏳ [Hourly] 오늘 시간별 데이터 수집 중...")
if hasattr(crawler_module, 'fetch_history_hourly'):
hourly_data = crawler_module.fetch_history_hourly(plant_config, today, today)
if hourly_data:
print(f"{len(hourly_data)}개 시간별 데이터 수집 완료")
save_history(hourly_data, 'hourly')
print(f" ✅ DB 저장 완료")
else:
print(" ⚠️ 데이터 없음")
else:
print(f" ⚠️ {plant_type}는 시간별 이력 수집을 지원하지 않음")
except Exception as e:
print(f"❌ [Hourly] 에러: {e}")
import traceback
traceback.print_exc()
# 3. 일별 데이터도 업데이트
try:
print(f"\n⏳ [Daily] 오늘 일별 데이터 업데이트 중...")
if hasattr(crawler_module, 'fetch_history_daily'):
daily_data = crawler_module.fetch_history_daily(plant_config, today, today)
if daily_data:
print(f"{len(daily_data)}개 일별 데이터 수집 완료")
save_history(daily_data, 'daily')
print(f" ✅ DB 저장 완료")
else:
print(" ⚠️ 데이터 없음")
else:
print(f" ⚠️ {plant_type}는 일별 이력 수집을 지원하지 않음")
except Exception as e:
print(f"❌ [Daily] 에러: {e}")
import traceback
traceback.print_exc()
print(f"\n✅ [{plant_name}] 작업 완료\n")
def main():
target_plants = ['kremc-05', 'nrems-09']
print("\n" + "="*60)
print("🌞 오늘 데이터 보완 크롤링")
print("="*60)
for plant_id in target_plants:
cfg = get_plant_config(plant_id)
if cfg:
fill_today(cfg)
else:
print(f"❌ 설정을 찾을 수 없습니다: {plant_id}")
print("\n" + "="*60)
print("🎉 모든 작업 완료!")
print("="*60 + "\n")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,74 @@
"""
API 호출 테스트 - 5호기와 9호기의 2 25 시간별 데이터 확인
"""
import requests
import json
def test_api(plant_id, plant_name, date):
url = f"https://solorpower.dadot.net/plants/{plant_id}/stats/today?date={date}"
print(f"\n{'='*70}")
print(f"🔍 [{plant_name}] API 호출: {date}")
print(f"{'='*70}")
print(f"URL: {url}\n")
try:
response = requests.get(url, timeout=10)
print(f"Status Code: {response.status_code}")
if response.status_code == 200:
data = response.json()
print(f"Status: {data.get('status')}")
print(f"Plant ID: {data.get('plant_id')}")
print(f"Date: {data.get('date')}")
print(f"Count: {data.get('count')}\n")
hourly_data = data.get('data', [])
# 데이터가 있는 시간대만 출력
has_data_count = 0
print("시간별 데이터 (데이터가 있는 시간만):")
for item in hourly_data:
if item.get('has_data'):
has_data_count += 1
print(f" {item['label']:>4}: current_kw={item['current_kw']:>8.2f}, today_kwh={item['today_kwh']:>8.2f}")
if has_data_count == 0:
print(" ❌ 데이터가 있는 시간대가 없습니다!")
# 전체 응답 출력
print("\n전체 응답:")
print(json.dumps(data, indent=2, ensure_ascii=False))
else:
print(f"\n✅ 총 {has_data_count}개 시간대에 데이터 있음")
else:
print(f"❌ API 호출 실패")
print(response.text)
except Exception as e:
print(f"❌ 에러 발생: {e}")
import traceback
traceback.print_exc()
def main():
print("\n" + "="*70)
print("🌐 API 호출 테스트")
print("="*70)
# 2월 25일 데이터 확인
test_api("kremc-05", "5호기", "2026-02-25")
test_api("nrems-09", "9호기", "2026-02-25")
# 2월 1일도 확인
test_api("kremc-05", "5호기", "2026-02-01")
test_api("nrems-09", "9호기", "2026-02-01")
print("\n" + "="*70)
print("테스트 완료")
print("="*70 + "\n")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,95 @@
"""
2 데이터의 current_kw 업데이트
과거 데이터의 경우 current_kw = today_kwh (시간별 발전량) 설정
"""
import sys
import os
from datetime import datetime, timedelta
from dotenv import load_dotenv
load_dotenv()
if sys.platform.startswith('win'):
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(current_dir)
from database import get_supabase_client
def update_current_kw(plant_id, plant_name):
"""2월 데이터의 current_kw를 today_kwh로 업데이트"""
print(f"\n{'='*70}")
print(f"🔧 [{plant_name}] current_kw 업데이트 중...")
print(f"{'='*70}")
client = get_supabase_client()
if not client:
return
# 2월 1일부터 27일까지
start = datetime(2026, 2, 1)
end = datetime(2026, 2, 27)
total_updated = 0
current = start
while current <= end:
date_str = current.strftime("%Y-%m-%d")
# 해당 날짜의 모든 시간별 데이터 가져오기
result = client.table("solar_logs") \
.select("id, current_kw, today_kwh") \
.eq("plant_id", plant_id) \
.gte("created_at", f"{date_str}T00:00:00") \
.lt("created_at", f"{(current + timedelta(days=1)).strftime('%Y-%m-%d')}T00:00:00") \
.execute()
if not result.data:
current += timedelta(days=1)
continue
# current_kw가 0이고 today_kwh가 0이 아닌 레코드만 업데이트
updated_count = 0
for record in result.data:
if record['current_kw'] == 0 and record['today_kwh'] != 0:
try:
# current_kw를 today_kwh로 업데이트
client.table("solar_logs") \
.update({"current_kw": record['today_kwh']}) \
.eq("id", record['id']) \
.execute()
updated_count += 1
except Exception as e:
print(f" ⚠️ 업데이트 실패 (id: {record['id']}): {e}")
if updated_count > 0:
print(f"{date_str}: {updated_count}건 업데이트")
total_updated += updated_count
current += timedelta(days=1)
print(f"\n✅ [{plant_name}] 총 {total_updated}건 업데이트 완료")
def main():
plants = [
('kremc-05', '5호기'),
('nrems-09', '9호기')
]
print("\n" + "="*70)
print("🔧 2월 데이터 current_kw 업데이트 시작")
print("="*70)
for plant_id, plant_name in plants:
update_current_kw(plant_id, plant_name)
print("\n" + "="*70)
print("🎉 current_kw 업데이트 완료!")
print("="*70 + "\n")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,81 @@
"""
2 데이터 최종 검증
"""
import sys
import os
from datetime import datetime
from dotenv import load_dotenv
load_dotenv()
if sys.platform.startswith('win'):
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(current_dir)
from database import get_supabase_client
def final_check():
client = get_supabase_client()
if not client:
print("❌ Supabase 연결 실패")
return
print("\n" + "="*70)
print("📊 2월 데이터 최종 검증 결과")
print("="*70)
plants = [
('kremc-05', '5호기'),
('nrems-09', '9호기')
]
for plant_id, plant_name in plants:
print(f"\n🏭 [{plant_name}] ({plant_id})")
print("-" * 70)
# 시간별 데이터
hourly = client.table("solar_logs") \
.select("*", count='exact') \
.eq("plant_id", plant_id) \
.gte("created_at", "2026-02-01T00:00:00+09:00") \
.lte("created_at", "2026-02-27T23:59:59+09:00") \
.execute()
hourly_count = hourly.count if hasattr(hourly, 'count') else len(hourly.data)
# 일별 데이터
daily = client.table("daily_stats") \
.select("*", count='exact') \
.eq("plant_id", plant_id) \
.gte("date", "2026-02-01") \
.lte("date", "2026-02-27") \
.execute()
daily_count = daily.count if hasattr(daily, 'count') else len(daily.data)
total_gen = sum(r.get('total_generation', 0) for r in daily.data)
avg_gen = total_gen / daily_count if daily_count > 0 else 0
# 월별 통계
monthly = client.table("monthly_stats") \
.select("*") \
.eq("plant_id", plant_id) \
.eq("month", "2026-02") \
.execute()
monthly_gen = monthly.data[0].get('total_generation', 0) if monthly.data else 0
print(f" ✅ 시간별 데이터 (Hourly): {hourly_count}")
print(f" ✅ 일별 데이터 (Daily): {daily_count}")
print(f" 📈 2월 총 발전량: {total_gen:,.2f} kWh")
print(f" 📈 일평균 발전량: {avg_gen:,.2f} kWh/day")
print(f" 📊 월별 통계: {monthly_gen:,.2f} kWh")
print("\n" + "="*70)
print("✅ 모든 데이터가 Supabase DB에 정상 저장되었습니다!")
print("="*70 + "\n")
if __name__ == "__main__":
final_check()

View File

@ -0,0 +1,180 @@
"""
2 데이터 검증 스크립트
5호기(kremc-05), 9호기(nrems-09) 2 데이터가 DB에 제대로 저장되었는지 확인
"""
import sys
import os
from datetime import datetime
from dotenv import load_dotenv
# .env 로드
load_dotenv()
# Windows 인코딩 문제 해결
if sys.platform.startswith('win'):
sys.stdout.reconfigure(encoding='utf-8')
sys.stderr.reconfigure(encoding='utf-8')
# 프로젝트 루트 경로 추가
current_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(current_dir)
from database import get_supabase_client
def verify_data(plant_id, plant_name):
"""특정 발전소의 2월 데이터 검증"""
print(f"\n{'='*70}")
print(f"🔍 [{plant_name}] 2월 데이터 검증 중...")
print(f"{'='*70}")
client = get_supabase_client()
if client is None:
print("❌ Supabase 연결 실패")
return
# 2월 데이터 범위 설정
now = datetime.now()
year = now.year
start_date = f"{year}-02-01"
if now.month == 2:
end_date = now.strftime("%Y-%m-%d")
else:
# 2월 마지막 날
if year % 4 == 0 and (year % 100 != 0 or year % 400 == 0):
end_date = f"{year}-02-29"
else:
end_date = f"{year}-02-28"
month_str = f"{year}-02"
try:
# 1. 시간별 데이터 확인 (solar_logs)
print(f"\n📊 [Hourly] 시간별 데이터 (solar_logs)")
print(f" 조회 기간: {start_date} ~ {end_date}")
hourly_result = client.table("solar_logs") \
.select("*", count='exact') \
.eq("plant_id", plant_id) \
.gte("created_at", f"{start_date}T00:00:00+09:00") \
.lte("created_at", f"{end_date}T23:59:59+09:00") \
.order("created_at", desc=False) \
.execute()
hourly_count = hourly_result.count if hasattr(hourly_result, 'count') else len(hourly_result.data)
if hourly_count > 0:
print(f" ✅ 총 {hourly_count}건의 시간별 데이터 발견")
# 날짜별 카운트 집계
dates = {}
total_kwh = 0
for record in hourly_result.data:
date_str = record['created_at'][:10]
dates[date_str] = dates.get(date_str, 0) + 1
total_kwh += record.get('today_kwh', 0)
print(f" 📅 {len(dates)}일간의 데이터")
# 처음 3일과 마지막 3일 샘플 표시
sorted_dates = sorted(dates.keys())
print(f"\n [샘플 - 처음 3일]")
for d in sorted_dates[:3]:
print(f" {d}: {dates[d]}")
if len(sorted_dates) > 6:
print(f" ... ({len(sorted_dates) - 6}일 생략) ...")
print(f"\n [샘플 - 마지막 3일]")
for d in sorted_dates[-3:]:
print(f" {d}: {dates[d]}")
print(f"\n 💡 평균 발전량 합계: {total_kwh / len(hourly_result.data):.2f} kWh/시간")
else:
print(f" ⚠️ 시간별 데이터가 없습니다!")
# 2. 일별 데이터 확인 (daily_stats)
print(f"\n📊 [Daily] 일별 데이터 (daily_stats)")
print(f" 조회 기간: {start_date} ~ {end_date}")
daily_result = client.table("daily_stats") \
.select("*", count='exact') \
.eq("plant_id", plant_id) \
.gte("date", start_date) \
.lte("date", end_date) \
.order("date", desc=False) \
.execute()
daily_count = daily_result.count if hasattr(daily_result, 'count') else len(daily_result.data)
if daily_count > 0:
print(f" ✅ 총 {daily_count}건의 일별 데이터 발견")
total_generation = sum(r.get('total_generation', 0) for r in daily_result.data)
avg_generation = total_generation / daily_count if daily_count > 0 else 0
print(f" 📈 2월 총 발전량: {total_generation:.2f} kWh")
print(f" 📈 일평균 발전량: {avg_generation:.2f} kWh")
# 처음 5일과 마지막 5일 샘플 표시
print(f"\n [샘플 - 처음 5일]")
for record in daily_result.data[:5]:
print(f" {record['date']}: {record.get('total_generation', 0):.2f} kWh")
if len(daily_result.data) > 10:
print(f" ... ({len(daily_result.data) - 10}일 생략) ...")
print(f"\n [샘플 - 마지막 5일]")
for record in daily_result.data[-5:]:
print(f" {record['date']}: {record.get('total_generation', 0):.2f} kWh")
else:
print(f" ⚠️ 일별 데이터가 없습니다!")
# 3. 월별 데이터 확인 (monthly_stats)
print(f"\n📊 [Monthly] 2월 월별 데이터 (monthly_stats)")
print(f" 조회 월: {month_str}")
monthly_result = client.table("monthly_stats") \
.select("*") \
.eq("plant_id", plant_id) \
.eq("month", month_str) \
.execute()
if monthly_result.data:
record = monthly_result.data[0]
print(f" ✅ 2월 월별 통계 발견")
print(f" 📈 총 발전량: {record.get('total_generation', 0):.2f} kWh")
print(f" 🕐 업데이트: {record.get('updated_at', 'N/A')}")
else:
print(f" ⚠️ 2월 월별 데이터가 없습니다!")
print(f"\n✅ [{plant_name}] 검증 완료\n")
except Exception as e:
print(f"❌ 검증 중 오류 발생: {e}")
import traceback
traceback.print_exc()
def main():
"""메인 실행 함수"""
plants = [
('kremc-05', '5호기'),
('nrems-09', '9호기')
]
print("\n" + "="*70)
print("🔍 2월 데이터 검증 시작")
print("="*70)
for plant_id, plant_name in plants:
verify_data(plant_id, plant_name)
print("="*70)
print("🎉 모든 검증 완료!")
print("="*70 + "\n")
if __name__ == "__main__":
main()

118
tests/fill_all_today.py Normal file
View File

@ -0,0 +1,118 @@
import sys
import os
import importlib
from datetime import datetime, timezone, timedelta
# Add parent directory to path
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from dotenv import load_dotenv
load_dotenv(os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), '.env'))
from database import get_supabase_client, save_history
from config import get_all_plants
def cleanup_history_today(plant_id, today_str):
"""
Cleans up 'History' status records for the target date to avoid duplicates.
"""
client = get_supabase_client()
if not client:
return
# Delete records with status='History' created within the target date range
# Since created_at is timestampz, we need to be careful.
# But usually save_history sets created_at to the actual data timestamp for hourly history.
# Or does it?
# In 'save_history' (database.py): records.append({ ..., 'created_at': final_created_at, ... })
# where final_created_at comes from the data timestamp.
# So we should delete range [today 00:00:00, today 23:59:59]
start_ts = f"{today_str}T00:00:00"
end_ts = f"{today_str}T23:59:59"
try:
# We also filter by status='History' to avoid deleting real-time crawled logs (if any exist)
# Real-time logs usually have status='Normal' or 'Abnormal' or empty.
# History fetch sets status='History'.
res = client.table('solar_logs').delete() \
.eq('plant_id', plant_id) \
.eq('status', 'History') \
.gte('created_at', start_ts) \
.lte('created_at', end_ts) \
.execute()
if res.data:
print(f" 🧹 Cleaned up {len(res.data)} old history records for {today_str}.")
except Exception as e:
print(f" ⚠️ Cleanup failed: {e}")
def fill_all_today():
plants = get_all_plants()
now_kst = datetime.now(timezone(timedelta(hours=9)))
today_str = now_kst.strftime("%Y-%m-%d")
print(f"🚀 Starting Manual Data Fetch for TODAY: {today_str}")
print("=" * 60)
for plant in plants:
plant_id = plant['id']
plant_name = plant['name']
plant_type = plant['type']
# Skip unknown or unsupported types
if plant_type == 'unknown':
continue
print(f"\nProcessing [{plant_type.upper()}] {plant_name} ({plant_id})...")
try:
# Dynamic import
module = importlib.import_module(f"crawlers.{plant_type}")
# 1. Hourly Data
if hasattr(module, 'fetch_history_hourly'):
print(" ⏳ Fetching Hourly Data...")
# Cleanup previous 'History' data for today to prevent dups
cleanup_history_today(plant_id, today_str)
try:
# fetch_history_hourly(config, start_date, end_date)
data = module.fetch_history_hourly(plant, today_str, today_str)
if data:
# save_history handles 'hourly' -> inserts into solar_logs
save_history(data, 'hourly')
else:
print(" ⚠️ No Hourly data found.")
except Exception as e:
print(f" ❌ Hourly Fetch Error: {e}")
else:
print(" No fetch_history_hourly method.")
# 2. Daily Data (Optional, as it might not be ready yet)
if hasattr(module, 'fetch_history_daily'):
print(" ⏳ Fetching Daily Data...")
try:
# fetch_history_daily(config, start_date, end_date)
data = module.fetch_history_daily(plant, today_str, today_str)
if data:
# save_history handles 'daily' -> upserts daily_stats & updates monthly
save_history(data, 'daily')
else:
print(" ⚠️ No Daily data found (Site might not list today yet).")
except Exception as e:
print(f" ❌ Daily Fetch Error: {e}")
else:
print(" No fetch_history_daily method.")
except ImportError:
print(f" ❌ Module 'crawlers.{plant_type}' not found.")
except Exception as e:
print(f" ❌ Error processing plant: {e}")
print("\n" + "=" * 60)
print("All tasks completed.")
if __name__ == "__main__":
fill_all_today()

66
tools/check_db.py Normal file
View File

@ -0,0 +1,66 @@
import sys
from pathlib import Path
# Add parent directory to sys.path to allow importing from root
sys.path.append(str(Path(__file__).parent.parent))
from dotenv import load_dotenv
# 로드 환경 변수 (database 임포트 전에 실행)
load_dotenv()
from database import get_supabase_client
from datetime import datetime, timedelta
def check_db_data():
client = get_supabase_client()
if not client:
print("❌ Supabase connection failed")
return
# Check data from yesterday 18:00 to 20:00
# Note: DB stores in KST or UTC?
# recover_data.py used KST time in 'created_at' string.
# Let's query based on string range.
# KST 18:00 - 20:00 is UTC 09:00 - 11:00
start_time = "2026-02-12 09:00:00"
end_time = "2026-02-12 11:15:00"
print(f"🔍 Checking DB data from {start_time} to {end_time} (UTC)...")
try:
response = client.table("solar_logs").select("*") \
.gte("created_at", start_time) \
.lte("created_at", end_time) \
.order("created_at") \
.execute()
data = response.data
if not data:
print("⚠️ No data found in this range.")
return
print(f"✅ Found {len(data)} records.\n")
# Group by timestamp to see snapshot completeness
timestamps = {}
for item in data:
ts = item['created_at']
if ts not in timestamps:
timestamps[ts] = []
timestamps[ts].append(item)
for ts in sorted(timestamps.keys()):
items = timestamps[ts]
print(f"{ts} - {len(items)} plants")
for item in items:
print(f" - {item['plant_id']}: {item['current_kw']} kW / {item['today_kwh']} kWh")
print("-" * 50)
except Exception as e:
print(f"❌ Error querying DB: {e}")
if __name__ == "__main__":
check_db_data()

142
tools/recover_from_log.py Normal file
View File

@ -0,0 +1,142 @@
import sys
from pathlib import Path
# Add parent directory to sys.path to allow importing from root
sys.path.append(str(Path(__file__).parent.parent))
import os
import re
from datetime import datetime
from dotenv import load_dotenv
# 로드 환경 변수
load_dotenv()
from database import get_supabase_client, save_history
PLANT_MAP = {
"태양과바람 1호기": "nrems-01",
"태양과바람 2호기": "nrems-02",
"태양과바람 3호기": "nrems-03",
"태양과바람 4호기": "nrems-04",
"태양과바람 5호기": "kremc-05",
"태양과바람 6호기": "sunwms-06",
"태양과바람 8호기": "hyundai-08",
"태양과바람 9호기": "nrems-09",
"태양과바람 10호기": "cmsolar-10"
}
def clean_and_recover(log_path, start_time_str, end_time_str):
"""
1. Removes bad data (where current_kw == generation_kwh but current_kw should be 0)
Or simpler: remove ALL hourly data for the period and re-insert.
2. Parses log and re-inserts data.
"""
print(f"🧹 Cleaning DB data from {start_time_str} to {end_time_str}...")
# Convert local times to UTC range for deletion query
# But wait, save_history sends timezone-aware timestamp (+09:00).
# Supabase stores as UTC.
# To delete, we can use the same string range if we are careful, or convert.
# The safest way is to target the range.
# 1. Delete existing records in the range
client = get_supabase_client()
if not client:
return
# KST to UTC conversion for query
# 2026-02-12 17:00:00 KST -> 08:00 UTC
# 2026-02-13 10:00:00 KST -> 01:00 UTC (next day)
try:
start_dt = datetime.strptime(start_time_str, "%Y-%m-%d %H:%M:%S")
end_dt = datetime.strptime(end_time_str, "%Y-%m-%d %H:%M:%S")
# UTC subtract 9 hours
from datetime import timedelta
start_utc = (start_dt - timedelta(hours=9)).isoformat()
end_utc = (end_dt - timedelta(hours=9)).isoformat()
print(f" Deleting range (UTC): {start_utc} ~ {end_utc}")
# Delete solar_logs
res = client.table("solar_logs").delete() \
.gte("created_at", start_utc) \
.lte("created_at", end_utc) \
.execute()
print(f"✅ Deleted {len(res.data) if res.data else '0'} records.")
except Exception as e:
print(f"❌ Deletion failed: {e}")
# Proceed to insert anyway? Duplicates might occur if delete failed.
print(f"📂 Parsing log: {log_path}")
start_pattern = re.compile(r"통합 관제 시스템.*\((\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2})\)")
table_pattern = re.compile(r"(태양과바람 \d+호기)\s+\|\s+([\d.]+)\s+\|\s+([\d.]+)\s+\|")
current_timestamp = None
recovered_data = []
try:
with open(log_path, 'r', encoding='utf-8') as f:
for line in f:
start_match = start_pattern.search(line)
if start_match:
ts_str = start_match.group(1)
ts_dt = datetime.strptime(ts_str, "%Y-%m-%d %H:%M:%S")
if start_dt <= ts_dt <= end_dt:
current_timestamp = ts_str
else:
current_timestamp = None
continue
if current_timestamp:
table_match = table_pattern.search(line)
if table_match:
plant_name = table_match.group(1).strip()
kw = float(table_match.group(2))
kwh = float(table_match.group(3))
plant_id = PLANT_MAP.get(plant_name)
if plant_id:
recovered_data.append({
'plant_id': plant_id,
'timestamp': current_timestamp,
'current_kw': kw, # Now database.py handles 0.0 correctly
'generation_kwh': kwh
})
except Exception as e:
print(f"❌ Error parsing log: {e}")
return
print(f"✅ Found {len(recovered_data)} points to restore.")
if not recovered_data:
return
chunk_size = 100
total_saved = 0
for i in range(0, len(recovered_data), chunk_size):
chunk = recovered_data[i:i + chunk_size]
if save_history(chunk, 'hourly'):
total_saved += len(chunk)
else:
print("❌ Insert failed")
print(f"🎉 Recovery finished. {total_saved} records inserted.")
# 2. Daily stats update (optional, but safe to do)
# ... (omitted for brevity, hourly is critical data)
if __name__ == "__main__":
log_file = r"d:\dev\etc\SolorPower\crawler\log\cron.log"
# Target period: Yesterday 17:00 ~ Today 10:00
start = "2026-02-12 17:00:00"
end = "2026-02-13 10:00:00"
clean_and_recover(log_file, start, end)

View File

@ -0,0 +1,502 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
$env:VIRTUAL_ENV_PROMPT = $Prompt
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
# SIG # Begin signature block
# MIIvJAYJKoZIhvcNAQcCoIIvFTCCLxECAQExDzANBglghkgBZQMEAgEFADB5Bgor
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCBnL745ElCYk8vk
# dBtMuQhLeWJ3ZGfzKW4DHCYzAn+QB6CCE8MwggWQMIIDeKADAgECAhAFmxtXno4h
# MuI5B72nd3VcMA0GCSqGSIb3DQEBDAUAMGIxCzAJBgNVBAYTAlVTMRUwEwYDVQQK
# EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xITAfBgNV
# BAMTGERpZ2lDZXJ0IFRydXN0ZWQgUm9vdCBHNDAeFw0xMzA4MDExMjAwMDBaFw0z
# ODAxMTUxMjAwMDBaMGIxCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJ
# bmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xITAfBgNVBAMTGERpZ2lDZXJ0
# IFRydXN0ZWQgUm9vdCBHNDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
# AL/mkHNo3rvkXUo8MCIwaTPswqclLskhPfKK2FnC4SmnPVirdprNrnsbhA3EMB/z
# G6Q4FutWxpdtHauyefLKEdLkX9YFPFIPUh/GnhWlfr6fqVcWWVVyr2iTcMKyunWZ
# anMylNEQRBAu34LzB4TmdDttceItDBvuINXJIB1jKS3O7F5OyJP4IWGbNOsFxl7s
# Wxq868nPzaw0QF+xembud8hIqGZXV59UWI4MK7dPpzDZVu7Ke13jrclPXuU15zHL
# 2pNe3I6PgNq2kZhAkHnDeMe2scS1ahg4AxCN2NQ3pC4FfYj1gj4QkXCrVYJBMtfb
# BHMqbpEBfCFM1LyuGwN1XXhm2ToxRJozQL8I11pJpMLmqaBn3aQnvKFPObURWBf3
# JFxGj2T3wWmIdph2PVldQnaHiZdpekjw4KISG2aadMreSx7nDmOu5tTvkpI6nj3c
# AORFJYm2mkQZK37AlLTSYW3rM9nF30sEAMx9HJXDj/chsrIRt7t/8tWMcCxBYKqx
# YxhElRp2Yn72gLD76GSmM9GJB+G9t+ZDpBi4pncB4Q+UDCEdslQpJYls5Q5SUUd0
# viastkF13nqsX40/ybzTQRESW+UQUOsxxcpyFiIJ33xMdT9j7CFfxCBRa2+xq4aL
# T8LWRV+dIPyhHsXAj6KxfgommfXkaS+YHS312amyHeUbAgMBAAGjQjBAMA8GA1Ud
# EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBTs1+OC0nFdZEzf
# Lmc/57qYrhwPTzANBgkqhkiG9w0BAQwFAAOCAgEAu2HZfalsvhfEkRvDoaIAjeNk
# aA9Wz3eucPn9mkqZucl4XAwMX+TmFClWCzZJXURj4K2clhhmGyMNPXnpbWvWVPjS
# PMFDQK4dUPVS/JA7u5iZaWvHwaeoaKQn3J35J64whbn2Z006Po9ZOSJTROvIXQPK
# 7VB6fWIhCoDIc2bRoAVgX+iltKevqPdtNZx8WorWojiZ83iL9E3SIAveBO6Mm0eB
# cg3AFDLvMFkuruBx8lbkapdvklBtlo1oepqyNhR6BvIkuQkRUNcIsbiJeoQjYUIp
# 5aPNoiBB19GcZNnqJqGLFNdMGbJQQXE9P01wI4YMStyB0swylIQNCAmXHE/A7msg
# dDDS4Dk0EIUhFQEI6FUy3nFJ2SgXUE3mvk3RdazQyvtBuEOlqtPDBURPLDab4vri
# RbgjU2wGb2dVf0a1TD9uKFp5JtKkqGKX0h7i7UqLvBv9R0oN32dmfrJbQdA75PQ7
# 9ARj6e/CVABRoIoqyc54zNXqhwQYs86vSYiv85KZtrPmYQ/ShQDnUBrkG5WdGaG5
# nLGbsQAe79APT0JsyQq87kP6OnGlyE0mpTX9iV28hWIdMtKgK1TtmlfB2/oQzxm3
# i0objwG2J5VT6LaJbVu8aNQj6ItRolb58KaAoNYes7wPD1N1KarqE3fk3oyBIa0H
# EEcRrYc9B9F1vM/zZn4wggawMIIEmKADAgECAhAIrUCyYNKcTJ9ezam9k67ZMA0G
# CSqGSIb3DQEBDAUAMGIxCzAJBgNVBAYTAlVTMRUwEwYDVQQKEwxEaWdpQ2VydCBJ
# bmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xITAfBgNVBAMTGERpZ2lDZXJ0
# IFRydXN0ZWQgUm9vdCBHNDAeFw0yMTA0MjkwMDAwMDBaFw0zNjA0MjgyMzU5NTla
# MGkxCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5EaWdpQ2VydCwgSW5jLjFBMD8GA1UE
# AxM4RGlnaUNlcnQgVHJ1c3RlZCBHNCBDb2RlIFNpZ25pbmcgUlNBNDA5NiBTSEEz
# ODQgMjAyMSBDQTEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDVtC9C
# 0CiteLdd1TlZG7GIQvUzjOs9gZdwxbvEhSYwn6SOaNhc9es0JAfhS0/TeEP0F9ce
# 2vnS1WcaUk8OoVf8iJnBkcyBAz5NcCRks43iCH00fUyAVxJrQ5qZ8sU7H/Lvy0da
# E6ZMswEgJfMQ04uy+wjwiuCdCcBlp/qYgEk1hz1RGeiQIXhFLqGfLOEYwhrMxe6T
# SXBCMo/7xuoc82VokaJNTIIRSFJo3hC9FFdd6BgTZcV/sk+FLEikVoQ11vkunKoA
# FdE3/hoGlMJ8yOobMubKwvSnowMOdKWvObarYBLj6Na59zHh3K3kGKDYwSNHR7Oh
# D26jq22YBoMbt2pnLdK9RBqSEIGPsDsJ18ebMlrC/2pgVItJwZPt4bRc4G/rJvmM
# 1bL5OBDm6s6R9b7T+2+TYTRcvJNFKIM2KmYoX7BzzosmJQayg9Rc9hUZTO1i4F4z
# 8ujo7AqnsAMrkbI2eb73rQgedaZlzLvjSFDzd5Ea/ttQokbIYViY9XwCFjyDKK05
# huzUtw1T0PhH5nUwjewwk3YUpltLXXRhTT8SkXbev1jLchApQfDVxW0mdmgRQRNY
# mtwmKwH0iU1Z23jPgUo+QEdfyYFQc4UQIyFZYIpkVMHMIRroOBl8ZhzNeDhFMJlP
# /2NPTLuqDQhTQXxYPUez+rbsjDIJAsxsPAxWEQIDAQABo4IBWTCCAVUwEgYDVR0T
# AQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUaDfg67Y7+F8Rhvv+YXsIiGX0TkIwHwYD
# VR0jBBgwFoAU7NfjgtJxXWRM3y5nP+e6mK4cD08wDgYDVR0PAQH/BAQDAgGGMBMG
# A1UdJQQMMAoGCCsGAQUFBwMDMHcGCCsGAQUFBwEBBGswaTAkBggrBgEFBQcwAYYY
# aHR0cDovL29jc3AuZGlnaWNlcnQuY29tMEEGCCsGAQUFBzAChjVodHRwOi8vY2Fj
# ZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNlcnRUcnVzdGVkUm9vdEc0LmNydDBDBgNV
# HR8EPDA6MDigNqA0hjJodHRwOi8vY3JsMy5kaWdpY2VydC5jb20vRGlnaUNlcnRU
# cnVzdGVkUm9vdEc0LmNybDAcBgNVHSAEFTATMAcGBWeBDAEDMAgGBmeBDAEEATAN
# BgkqhkiG9w0BAQwFAAOCAgEAOiNEPY0Idu6PvDqZ01bgAhql+Eg08yy25nRm95Ry
# sQDKr2wwJxMSnpBEn0v9nqN8JtU3vDpdSG2V1T9J9Ce7FoFFUP2cvbaF4HZ+N3HL
# IvdaqpDP9ZNq4+sg0dVQeYiaiorBtr2hSBh+3NiAGhEZGM1hmYFW9snjdufE5Btf
# Q/g+lP92OT2e1JnPSt0o618moZVYSNUa/tcnP/2Q0XaG3RywYFzzDaju4ImhvTnh
# OE7abrs2nfvlIVNaw8rpavGiPttDuDPITzgUkpn13c5UbdldAhQfQDN8A+KVssIh
# dXNSy0bYxDQcoqVLjc1vdjcshT8azibpGL6QB7BDf5WIIIJw8MzK7/0pNVwfiThV
# 9zeKiwmhywvpMRr/LhlcOXHhvpynCgbWJme3kuZOX956rEnPLqR0kq3bPKSchh/j
# wVYbKyP/j7XqiHtwa+aguv06P0WmxOgWkVKLQcBIhEuWTatEQOON8BUozu3xGFYH
# Ki8QxAwIZDwzj64ojDzLj4gLDb879M4ee47vtevLt/B3E+bnKD+sEq6lLyJsQfmC
# XBVmzGwOysWGw/YmMwwHS6DTBwJqakAwSEs0qFEgu60bhQjiWQ1tygVQK+pKHJ6l
# /aCnHwZ05/LWUpD9r4VIIflXO7ScA+2GRfS0YW6/aOImYIbqyK+p/pQd52MbOoZW
# eE4wggd3MIIFX6ADAgECAhAHHxQbizANJfMU6yMM0NHdMA0GCSqGSIb3DQEBCwUA
# MGkxCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5EaWdpQ2VydCwgSW5jLjFBMD8GA1UE
# AxM4RGlnaUNlcnQgVHJ1c3RlZCBHNCBDb2RlIFNpZ25pbmcgUlNBNDA5NiBTSEEz
# ODQgMjAyMSBDQTEwHhcNMjIwMTE3MDAwMDAwWhcNMjUwMTE1MjM1OTU5WjB8MQsw
# CQYDVQQGEwJVUzEPMA0GA1UECBMGT3JlZ29uMRIwEAYDVQQHEwlCZWF2ZXJ0b24x
# IzAhBgNVBAoTGlB5dGhvbiBTb2Z0d2FyZSBGb3VuZGF0aW9uMSMwIQYDVQQDExpQ
# eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
# ADCCAgoCggIBAKgc0BTT+iKbtK6f2mr9pNMUTcAJxKdsuOiSYgDFfwhjQy89koM7
# uP+QV/gwx8MzEt3c9tLJvDccVWQ8H7mVsk/K+X+IufBLCgUi0GGAZUegEAeRlSXx
# xhYScr818ma8EvGIZdiSOhqjYc4KnfgfIS4RLtZSrDFG2tN16yS8skFa3IHyvWdb
# D9PvZ4iYNAS4pjYDRjT/9uzPZ4Pan+53xZIcDgjiTwOh8VGuppxcia6a7xCyKoOA
# GjvCyQsj5223v1/Ig7Dp9mGI+nh1E3IwmyTIIuVHyK6Lqu352diDY+iCMpk9Zanm
# SjmB+GMVs+H/gOiofjjtf6oz0ki3rb7sQ8fTnonIL9dyGTJ0ZFYKeb6BLA66d2GA
# LwxZhLe5WH4Np9HcyXHACkppsE6ynYjTOd7+jN1PRJahN1oERzTzEiV6nCO1M3U1
# HbPTGyq52IMFSBM2/07WTJSbOeXjvYR7aUxK9/ZkJiacl2iZI7IWe7JKhHohqKuc
# eQNyOzxTakLcRkzynvIrk33R9YVqtB4L6wtFxhUjvDnQg16xot2KVPdfyPAWd81w
# tZADmrUtsZ9qG79x1hBdyOl4vUtVPECuyhCxaw+faVjumapPUnwo8ygflJJ74J+B
# Yxf6UuD7m8yzsfXWkdv52DjL74TxzuFTLHPyARWCSCAbzn3ZIly+qIqDAgMBAAGj
# ggIGMIICAjAfBgNVHSMEGDAWgBRoN+Drtjv4XxGG+/5hewiIZfROQjAdBgNVHQ4E
# FgQUt/1Teh2XDuUj2WW3siYWJgkZHA8wDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQM
# MAoGCCsGAQUFBwMDMIG1BgNVHR8Ega0wgaowU6BRoE+GTWh0dHA6Ly9jcmwzLmRp
# Z2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRHNENvZGVTaWduaW5nUlNBNDA5NlNI
# QTM4NDIwMjFDQTEuY3JsMFOgUaBPhk1odHRwOi8vY3JsNC5kaWdpY2VydC5jb20v
# RGlnaUNlcnRUcnVzdGVkRzRDb2RlU2lnbmluZ1JTQTQwOTZTSEEzODQyMDIxQ0Ex
# LmNybDA+BgNVHSAENzA1MDMGBmeBDAEEATApMCcGCCsGAQUFBwIBFhtodHRwOi8v
# d3d3LmRpZ2ljZXJ0LmNvbS9DUFMwgZQGCCsGAQUFBwEBBIGHMIGEMCQGCCsGAQUF
# BzABhhhodHRwOi8vb2NzcC5kaWdpY2VydC5jb20wXAYIKwYBBQUHMAKGUGh0dHA6
# Ly9jYWNlcnRzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRHNENvZGVTaWdu
# aW5nUlNBNDA5NlNIQTM4NDIwMjFDQTEuY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZI
# hvcNAQELBQADggIBABxv4AeV/5ltkELHSC63fXAFYS5tadcWTiNc2rskrNLrfH1N
# s0vgSZFoQxYBFKI159E8oQQ1SKbTEubZ/B9kmHPhprHya08+VVzxC88pOEvz68nA
# 82oEM09584aILqYmj8Pj7h/kmZNzuEL7WiwFa/U1hX+XiWfLIJQsAHBla0i7QRF2
# de8/VSF0XXFa2kBQ6aiTsiLyKPNbaNtbcucaUdn6vVUS5izWOXM95BSkFSKdE45O
# q3FForNJXjBvSCpwcP36WklaHL+aHu1upIhCTUkzTHMh8b86WmjRUqbrnvdyR2yd
# I5l1OqcMBjkpPpIV6wcc+KY/RH2xvVuuoHjlUjwq2bHiNoX+W1scCpnA8YTs2d50
# jDHUgwUo+ciwpffH0Riq132NFmrH3r67VaN3TuBxjI8SIZM58WEDkbeoriDk3hxU
# 8ZWV7b8AW6oyVBGfM06UgkfMb58h+tJPrFx8VI/WLq1dTqMfZOm5cuclMnUHs2uq
# rRNtnV8UfidPBL4ZHkTcClQbCoz0UbLhkiDvIS00Dn+BBcxw/TKqVL4Oaz3bkMSs
# M46LciTeucHY9ExRVt3zy7i149sd+F4QozPqn7FrSVHXmem3r7bjyHTxOgqxRCVa
# 18Vtx7P/8bYSBeS+WHCKcliFCecspusCDSlnRUjZwyPdP0VHxaZg2unjHY3rMYIa
# tzCCGrMCAQEwfTBpMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIElu
# Yy4xQTA/BgNVBAMTOERpZ2lDZXJ0IFRydXN0ZWQgRzQgQ29kZSBTaWduaW5nIFJT
# QTQwOTYgU0hBMzg0IDIwMjEgQ0ExAhAHHxQbizANJfMU6yMM0NHdMA0GCWCGSAFl
# AwQCAQUAoIHIMBkGCSqGSIb3DQEJAzEMBgorBgEEAYI3AgEEMBwGCisGAQQBgjcC
# AQsxDjAMBgorBgEEAYI3AgEVMC8GCSqGSIb3DQEJBDEiBCBnAZ6P7YvTwq0fbF62
# o7E75R0LxsW5OtyYiFESQckLhjBcBgorBgEEAYI3AgEMMU4wTKBGgEQAQgB1AGkA
# bAB0ADoAIABSAGUAbABlAGEAcwBlAF8AdgAzAC4AMQAxAC4ANQBfADIAMAAyADMA
# MAA4ADIANAAuADAAMaECgAAwDQYJKoZIhvcNAQEBBQAEggIAhs4bX7EeJZ6oSTC7
# 5QH/9Qx1Cllidnzj94PqDIL0MiS5adMxYTBkEtP3XNQvYkCtBFc6+Rz7bdN+zWWo
# ZYr+sDmEQmRerr3RYyqt+EpgOXpN4BGsHyD7r1Dat1wblGSva8zlOHiIAfzRj2JB
# 0+fRJPSBRj9RYwZb5h+I2AFLmHf3yItUdgs8GV9NZsAs+p79dRmoqhgNC6qm8I0j
# PkwGr5ATZLyCk2U1+VGeK8iwAdTB4HAlVVM146D/34j/QPnoqe9ICE6Foo6IArVV
# CbqWRShWHffvpKaR7ACoTy9LoIQf93orWoc+amQsyaUmlV/zQaCnyjc2UoFCDHcH
# 87Yg+frSB8xe2azuKUTVlUDx9Y5wOtEgK+o8wg4ufwPZP0JnsVzN06aCNBz2Bnfb
# Mb96Mp0PoCnjp8eAKttmRTXWE0DYIv/XAr2xwwJLFEUdoG6bj0bpNF7Wz0/c3mi0
# NKZsd9xNLKBKjizQgCZ7SGCMuSjEnd6P0AI7M8jRx+NROKcJI6gjH0oKXm9JLvI8
# oKB2COIlxKEUI/R/kBOeKp53zUSsPFRiJrDEkiCFocAFdUTE326b9/acGbQPJJJ+
# nMXLrbTrMMohlj7qRshvO0ZVvpqBDoHlRQcJcfINEESgKNKx/bTpr5cuM3WIS5Ft
# 1GirQp9sABVeBom9Y0NDFXsCSkqhghdAMIIXPAYKKwYBBAGCNwMDATGCFywwghco
# BgkqhkiG9w0BBwKgghcZMIIXFQIBAzEPMA0GCWCGSAFlAwQCAQUAMHgGCyqGSIb3
# DQEJEAEEoGkEZzBlAgEBBglghkgBhv1sBwEwMTANBglghkgBZQMEAgEFAAQgDHAE
# wrb/OjfkdGEAR/N6/5LxwnpqnhSdUI5gfWTSXKECEQDkKzKdiKykh3cqy0kBK32H
# GA8yMDIzMDgyNDE0NTcyOFqgghMJMIIGwjCCBKqgAwIBAgIQBUSv85SdCDmmv9s/
# X+VhFjANBgkqhkiG9w0BAQsFADBjMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGln
# aUNlcnQsIEluYy4xOzA5BgNVBAMTMkRpZ2lDZXJ0IFRydXN0ZWQgRzQgUlNBNDA5
# NiBTSEEyNTYgVGltZVN0YW1waW5nIENBMB4XDTIzMDcxNDAwMDAwMFoXDTM0MTAx
# MzIzNTk1OVowSDELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJbmMu
# MSAwHgYDVQQDExdEaWdpQ2VydCBUaW1lc3RhbXAgMjAyMzCCAiIwDQYJKoZIhvcN
# AQEBBQADggIPADCCAgoCggIBAKNTRYcdg45brD5UsyPgz5/X5dLnXaEOCdwvSKOX
# ejsqnGfcYhVYwamTEafNqrJq3RApih5iY2nTWJw1cb86l+uUUI8cIOrHmjsvlmbj
# aedp/lvD1isgHMGXlLSlUIHyz8sHpjBoyoNC2vx/CSSUpIIa2mq62DvKXd4ZGIX7
# ReoNYWyd/nFexAaaPPDFLnkPG2ZS48jWPl/aQ9OE9dDH9kgtXkV1lnX+3RChG4PB
# uOZSlbVH13gpOWvgeFmX40QrStWVzu8IF+qCZE3/I+PKhu60pCFkcOvV5aDaY7Mu
# 6QXuqvYk9R28mxyyt1/f8O52fTGZZUdVnUokL6wrl76f5P17cz4y7lI0+9S769Sg
# LDSb495uZBkHNwGRDxy1Uc2qTGaDiGhiu7xBG3gZbeTZD+BYQfvYsSzhUa+0rRUG
# FOpiCBPTaR58ZE2dD9/O0V6MqqtQFcmzyrzXxDtoRKOlO0L9c33u3Qr/eTQQfqZc
# ClhMAD6FaXXHg2TWdc2PEnZWpST618RrIbroHzSYLzrqawGw9/sqhux7UjipmAmh
# cbJsca8+uG+W1eEQE/5hRwqM/vC2x9XH3mwk8L9CgsqgcT2ckpMEtGlwJw1Pt7U2
# 0clfCKRwo+wK8REuZODLIivK8SgTIUlRfgZm0zu++uuRONhRB8qUt+JQofM604qD
# y0B7AgMBAAGjggGLMIIBhzAOBgNVHQ8BAf8EBAMCB4AwDAYDVR0TAQH/BAIwADAW
# BgNVHSUBAf8EDDAKBggrBgEFBQcDCDAgBgNVHSAEGTAXMAgGBmeBDAEEAjALBglg
# hkgBhv1sBwEwHwYDVR0jBBgwFoAUuhbZbU2FL3MpdpovdYxqII+eyG8wHQYDVR0O
# BBYEFKW27xPn783QZKHVVqllMaPe1eNJMFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6
# Ly9jcmwzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRHNFJTQTQwOTZTSEEy
# NTZUaW1lU3RhbXBpbmdDQS5jcmwwgZAGCCsGAQUFBwEBBIGDMIGAMCQGCCsGAQUF
# BzABhhhodHRwOi8vb2NzcC5kaWdpY2VydC5jb20wWAYIKwYBBQUHMAKGTGh0dHA6
# Ly9jYWNlcnRzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRHNFJTQTQwOTZT
# SEEyNTZUaW1lU3RhbXBpbmdDQS5jcnQwDQYJKoZIhvcNAQELBQADggIBAIEa1t6g
# qbWYF7xwjU+KPGic2CX/yyzkzepdIpLsjCICqbjPgKjZ5+PF7SaCinEvGN1Ott5s
# 1+FgnCvt7T1IjrhrunxdvcJhN2hJd6PrkKoS1yeF844ektrCQDifXcigLiV4JZ0q
# BXqEKZi2V3mP2yZWK7Dzp703DNiYdk9WuVLCtp04qYHnbUFcjGnRuSvExnvPnPp4
# 4pMadqJpddNQ5EQSviANnqlE0PjlSXcIWiHFtM+YlRpUurm8wWkZus8W8oM3NG6w
# QSbd3lqXTzON1I13fXVFoaVYJmoDRd7ZULVQjK9WvUzF4UbFKNOt50MAcN7MmJ4Z
# iQPq1JE3701S88lgIcRWR+3aEUuMMsOI5ljitts++V+wQtaP4xeR0arAVeOGv6wn
# LEHQmjNKqDbUuXKWfpd5OEhfysLcPTLfddY2Z1qJ+Panx+VPNTwAvb6cKmx5Adza
# ROY63jg7B145WPR8czFVoIARyxQMfq68/qTreWWqaNYiyjvrmoI1VygWy2nyMpqy
# 0tg6uLFGhmu6F/3Ed2wVbK6rr3M66ElGt9V/zLY4wNjsHPW2obhDLN9OTH0eaHDA
# dwrUAuBcYLso/zjlUlrWrBciI0707NMX+1Br/wd3H3GXREHJuEbTbDJ8WC9nR2Xl
# G3O2mflrLAZG70Ee8PBf4NvZrZCARK+AEEGKMIIGrjCCBJagAwIBAgIQBzY3tyRU
# fNhHrP0oZipeWzANBgkqhkiG9w0BAQsFADBiMQswCQYDVQQGEwJVUzEVMBMGA1UE
# ChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSEwHwYD
# VQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwHhcNMjIwMzIzMDAwMDAwWhcN
# MzcwMzIyMjM1OTU5WjBjMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs
# IEluYy4xOzA5BgNVBAMTMkRpZ2lDZXJ0IFRydXN0ZWQgRzQgUlNBNDA5NiBTSEEy
# NTYgVGltZVN0YW1waW5nIENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC
# AgEAxoY1BkmzwT1ySVFVxyUDxPKRN6mXUaHW0oPRnkyibaCwzIP5WvYRoUQVQl+k
# iPNo+n3znIkLf50fng8zH1ATCyZzlm34V6gCff1DtITaEfFzsbPuK4CEiiIY3+va
# PcQXf6sZKz5C3GeO6lE98NZW1OcoLevTsbV15x8GZY2UKdPZ7Gnf2ZCHRgB720RB
# idx8ald68Dd5n12sy+iEZLRS8nZH92GDGd1ftFQLIWhuNyG7QKxfst5Kfc71ORJn
# 7w6lY2zkpsUdzTYNXNXmG6jBZHRAp8ByxbpOH7G1WE15/tePc5OsLDnipUjW8LAx
# E6lXKZYnLvWHpo9OdhVVJnCYJn+gGkcgQ+NDY4B7dW4nJZCYOjgRs/b2nuY7W+yB
# 3iIU2YIqx5K/oN7jPqJz+ucfWmyU8lKVEStYdEAoq3NDzt9KoRxrOMUp88qqlnNC
# aJ+2RrOdOqPVA+C/8KI8ykLcGEh/FDTP0kyr75s9/g64ZCr6dSgkQe1CvwWcZklS
# UPRR8zZJTYsg0ixXNXkrqPNFYLwjjVj33GHek/45wPmyMKVM1+mYSlg+0wOI/rOP
# 015LdhJRk8mMDDtbiiKowSYI+RQQEgN9XyO7ZONj4KbhPvbCdLI/Hgl27KtdRnXi
# YKNYCQEoAA6EVO7O6V3IXjASvUaetdN2udIOa5kM0jO0zbECAwEAAaOCAV0wggFZ
# MBIGA1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFLoW2W1NhS9zKXaaL3WMaiCP
# nshvMB8GA1UdIwQYMBaAFOzX44LScV1kTN8uZz/nupiuHA9PMA4GA1UdDwEB/wQE
# AwIBhjATBgNVHSUEDDAKBggrBgEFBQcDCDB3BggrBgEFBQcBAQRrMGkwJAYIKwYB
# BQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBBBggrBgEFBQcwAoY1aHR0
# cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0VHJ1c3RlZFJvb3RHNC5j
# cnQwQwYDVR0fBDwwOjA4oDagNIYyaHR0cDovL2NybDMuZGlnaWNlcnQuY29tL0Rp
# Z2lDZXJ0VHJ1c3RlZFJvb3RHNC5jcmwwIAYDVR0gBBkwFzAIBgZngQwBBAIwCwYJ
# YIZIAYb9bAcBMA0GCSqGSIb3DQEBCwUAA4ICAQB9WY7Ak7ZvmKlEIgF+ZtbYIULh
# sBguEE0TzzBTzr8Y+8dQXeJLKftwig2qKWn8acHPHQfpPmDI2AvlXFvXbYf6hCAl
# NDFnzbYSlm/EUExiHQwIgqgWvalWzxVzjQEiJc6VaT9Hd/tydBTX/6tPiix6q4XN
# Q1/tYLaqT5Fmniye4Iqs5f2MvGQmh2ySvZ180HAKfO+ovHVPulr3qRCyXen/KFSJ
# 8NWKcXZl2szwcqMj+sAngkSumScbqyQeJsG33irr9p6xeZmBo1aGqwpFyd/EjaDn
# mPv7pp1yr8THwcFqcdnGE4AJxLafzYeHJLtPo0m5d2aR8XKc6UsCUqc3fpNTrDsd
# CEkPlM05et3/JWOZJyw9P2un8WbDQc1PtkCbISFA0LcTJM3cHXg65J6t5TRxktcm
# a+Q4c6umAU+9Pzt4rUyt+8SVe+0KXzM5h0F4ejjpnOHdI/0dKNPH+ejxmF/7K9h+
# 8kaddSweJywm228Vex4Ziza4k9Tm8heZWcpw8De/mADfIBZPJ/tgZxahZrrdVcA6
# KYawmKAr7ZVBtzrVFZgxtGIJDwq9gdkT/r+k0fNX2bwE+oLeMt8EifAAzV3C+dAj
# fwAL5HYCJtnwZXZCpimHCUcr5n8apIUP/JiW9lVUKx+A+sDyDivl1vupL0QVSucT
# Dh3bNzgaoSv27dZ8/DCCBY0wggR1oAMCAQICEA6bGI750C3n79tQ4ghAGFowDQYJ
# KoZIhvcNAQEMBQAwZTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IElu
# YzEZMBcGA1UECxMQd3d3LmRpZ2ljZXJ0LmNvbTEkMCIGA1UEAxMbRGlnaUNlcnQg
# QXNzdXJlZCBJRCBSb290IENBMB4XDTIyMDgwMTAwMDAwMFoXDTMxMTEwOTIzNTk1
# OVowYjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UE
# CxMQd3d3LmRpZ2ljZXJ0LmNvbTEhMB8GA1UEAxMYRGlnaUNlcnQgVHJ1c3RlZCBS
# b290IEc0MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAv+aQc2jeu+Rd
# SjwwIjBpM+zCpyUuySE98orYWcLhKac9WKt2ms2uexuEDcQwH/MbpDgW61bGl20d
# q7J58soR0uRf1gU8Ug9SH8aeFaV+vp+pVxZZVXKvaJNwwrK6dZlqczKU0RBEEC7f
# gvMHhOZ0O21x4i0MG+4g1ckgHWMpLc7sXk7Ik/ghYZs06wXGXuxbGrzryc/NrDRA
# X7F6Zu53yEioZldXn1RYjgwrt0+nMNlW7sp7XeOtyU9e5TXnMcvak17cjo+A2raR
# mECQecN4x7axxLVqGDgDEI3Y1DekLgV9iPWCPhCRcKtVgkEy19sEcypukQF8IUzU
# vK4bA3VdeGbZOjFEmjNAvwjXWkmkwuapoGfdpCe8oU85tRFYF/ckXEaPZPfBaYh2
# mHY9WV1CdoeJl2l6SPDgohIbZpp0yt5LHucOY67m1O+SkjqePdwA5EUlibaaRBkr
# fsCUtNJhbesz2cXfSwQAzH0clcOP9yGyshG3u3/y1YxwLEFgqrFjGESVGnZifvaA
# sPvoZKYz0YkH4b235kOkGLimdwHhD5QMIR2yVCkliWzlDlJRR3S+Jqy2QXXeeqxf
# jT/JvNNBERJb5RBQ6zHFynIWIgnffEx1P2PsIV/EIFFrb7GrhotPwtZFX50g/KEe
# xcCPorF+CiaZ9eRpL5gdLfXZqbId5RsCAwEAAaOCATowggE2MA8GA1UdEwEB/wQF
# MAMBAf8wHQYDVR0OBBYEFOzX44LScV1kTN8uZz/nupiuHA9PMB8GA1UdIwQYMBaA
# FEXroq/0ksuCMS1Ri6enIZ3zbcgPMA4GA1UdDwEB/wQEAwIBhjB5BggrBgEFBQcB
# AQRtMGswJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBDBggr
# BgEFBQcwAoY3aHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0QXNz
# dXJlZElEUm9vdENBLmNydDBFBgNVHR8EPjA8MDqgOKA2hjRodHRwOi8vY3JsMy5k
# aWdpY2VydC5jb20vRGlnaUNlcnRBc3N1cmVkSURSb290Q0EuY3JsMBEGA1UdIAQK
# MAgwBgYEVR0gADANBgkqhkiG9w0BAQwFAAOCAQEAcKC/Q1xV5zhfoKN0Gz22Ftf3
# v1cHvZqsoYcs7IVeqRq7IviHGmlUIu2kiHdtvRoU9BNKei8ttzjv9P+Aufih9/Jy
# 3iS8UgPITtAq3votVs/59PesMHqai7Je1M/RQ0SbQyHrlnKhSLSZy51PpwYDE3cn
# RNTnf+hZqPC/Lwum6fI0POz3A8eHqNJMQBk1RmppVLC4oVaO7KTVPeix3P0c2PR3
# WlxUjG/voVA9/HYJaISfb8rbII01YBwCA8sgsKxYoA5AY8WYIsGyWfVVa88nq2x2
# zm8jLfR+cWojayL/ErhULSd+2DrZ8LaHlv1b0VysGMNNn3O3AamfV6peKOK5lDGC
# A3YwggNyAgEBMHcwYzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ
# bmMuMTswOQYDVQQDEzJEaWdpQ2VydCBUcnVzdGVkIEc0IFJTQTQwOTYgU0hBMjU2
# IFRpbWVTdGFtcGluZyBDQQIQBUSv85SdCDmmv9s/X+VhFjANBglghkgBZQMEAgEF
# AKCB0TAaBgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwHAYJKoZIhvcNAQkFMQ8X
# DTIzMDgyNDE0NTcyOFowKwYLKoZIhvcNAQkQAgwxHDAaMBgwFgQUZvArMsLCyQ+C
# Xc6qisnGTxmcz0AwLwYJKoZIhvcNAQkEMSIEICXoYBY9n9CHBlb2ZPmyIOzhh93Z
# zUXRoskCDmMHyq3cMDcGCyqGSIb3DQEJEAIvMSgwJjAkMCIEINL25G3tdCLM0dRA
# V2hBNm+CitpVmq4zFq9NGprUDHgoMA0GCSqGSIb3DQEBAQUABIICAFUzNs5f5wsA
# nHsLg2yauMwAyYAuQIL8+GKYnWW/AtSWnA/t+S4LbjIJaIpBzZaWTai8/I23tJJw
# W1CTYDV3hqPGG/8PEcs8RY12JQoYMRZHzHTkNvUJC9xMXfuZIxtCmoFP2xsQjLgP
# Pl45FYCo3NzWCwQ8A2SyR48lskuJ94Q7PADJHkTU7pEY0t/N6114Mo9aO+n6qSLJ
# huEu1DmWE7iarxtIKja66BQEHjdawlSbg82Fg8EfkfsAXDHLqH1pahvnWmOziFLp
# SOrFKfyUVdCoGR7k3bKkHO62AeWz/LbzN0HPkzV7xrh/PD+4rwzatBpSwzFUFgRN
# 8Zg+Kso4LgTktu3nW9rG3TkFUBM3WsP9atnUfvCGvAcDr4Qv5qSx1cyFhuTK4gRj
# FVEO93RAMu0S54vTXBQDjl/55MEgWkFinTMqbkM4DpqqMp2uBjp5sqHbns6cDOD+
# o+HyXhE9XKxu3myklhq338QMSKE8bcdQ0XogmOwqgrVkpiG3jH+2R9CJlPwsyJLM
# TYwlsno3hd4+w2SPtySgNXwZGSj2KRNJFMaePyru8QeVS33pcmXNh+CKrdW5fxk0
# C4wWetuCKJTCBDNCRzy5NjrGAkNH2F37JM1pi0n5x2esaTyLC4+gGzDfn5ki0BT4
# 8w/WVhUIJnuoyQ33wBhxukpdRjwb71K4
# SIG # End signature block

69
venv_win/Scripts/activate Normal file
View File

@ -0,0 +1,69 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="D:\dev\etc\SolorPower\crawler\venv_win"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/Scripts:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1="(venv_win) ${PS1:-}"
export PS1
VIRTUAL_ENV_PROMPT="(venv_win) "
export VIRTUAL_ENV_PROMPT
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi

View File

@ -0,0 +1,34 @@
@echo off
rem This file is UTF-8 encoded, so we need to update the current code page while executing it
for /f "tokens=2 delims=:." %%a in ('"%SystemRoot%\System32\chcp.com"') do (
set _OLD_CODEPAGE=%%a
)
if defined _OLD_CODEPAGE (
"%SystemRoot%\System32\chcp.com" 65001 > nul
)
set VIRTUAL_ENV=D:\dev\etc\SolorPower\crawler\venv_win
if not defined PROMPT set PROMPT=$P$G
if defined _OLD_VIRTUAL_PROMPT set PROMPT=%_OLD_VIRTUAL_PROMPT%
if defined _OLD_VIRTUAL_PYTHONHOME set PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%
set _OLD_VIRTUAL_PROMPT=%PROMPT%
set PROMPT=(venv_win) %PROMPT%
if defined PYTHONHOME set _OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%
set PYTHONHOME=
if defined _OLD_VIRTUAL_PATH set PATH=%_OLD_VIRTUAL_PATH%
if not defined _OLD_VIRTUAL_PATH set _OLD_VIRTUAL_PATH=%PATH%
set PATH=%VIRTUAL_ENV%\Scripts;%PATH%
set VIRTUAL_ENV_PROMPT=(venv_win)
:END
if defined _OLD_CODEPAGE (
"%SystemRoot%\System32\chcp.com" %_OLD_CODEPAGE% > nul
set _OLD_CODEPAGE=
)

View File

@ -0,0 +1,22 @@
@echo off
if defined _OLD_VIRTUAL_PROMPT (
set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
)
set _OLD_VIRTUAL_PROMPT=
if defined _OLD_VIRTUAL_PYTHONHOME (
set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%"
set _OLD_VIRTUAL_PYTHONHOME=
)
if defined _OLD_VIRTUAL_PATH (
set "PATH=%_OLD_VIRTUAL_PATH%"
)
set _OLD_VIRTUAL_PATH=
set VIRTUAL_ENV=
set VIRTUAL_ENV_PROMPT=
:END

BIN
venv_win/Scripts/dotenv.exe Normal file

Binary file not shown.

BIN
venv_win/Scripts/httpx.exe Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
venv_win/Scripts/pip.exe Normal file

Binary file not shown.

Binary file not shown.

BIN
venv_win/Scripts/pip3.exe Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
venv_win/Scripts/python.exe Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

5
venv_win/pyvenv.cfg Normal file
View File

@ -0,0 +1,5 @@
home = C:\Users\haneu\AppData\Local\Programs\Python\Python311
include-system-site-packages = false
version = 3.11.5
executable = C:\Users\haneu\AppData\Local\Programs\Python\Python311\python.exe
command = C:\Users\haneu\AppData\Local\Programs\Python\Python311\python.exe -m venv D:\dev\etc\SolorPower\crawler\venv_win