diff --git a/main.py b/main.py index 9d93d8f..fd13b99 100644 --- a/main.py +++ b/main.py @@ -18,17 +18,21 @@ from fastapi import Query import asyncio import datetime from valveBackend import ValveBackend +import csv +from collections import deque if platform.system() in ["Darwin"]: # macOS or Windows from MockCAN import CANBackend + logging.basicConfig(level=logging.INFO) + else: from classCAN import CANBackend # Your real backend + logging.basicConfig(level=logging.ERROR) app = FastAPI() app.add_middleware(SessionMiddleware, secret_key="your_super_secret_key") router = APIRouter() templates = Jinja2Templates(directory="templates") -logging.basicConfig(level=logging.INFO) can_backend = CANBackend() valve_backend = ValveBackend(eds_file="/home/hmi/Desktop/HMI/eds_file/inletvalveboard.eds") @@ -39,6 +43,16 @@ app.mount("/static", StaticFiles(directory="static"), name="static") # Global object to store the latest data latest_data: Dict[str, Any] = {"PU_1": None, "PU_2": None, "PU_3": None} +# RECORDER +recording_flag = False +recording_task = None +recording_writer = None +recording_file = None +write_buffer = deque() +flush_interval = 1.0 # flush every 1 second +last_flush_time = datetime.datetime.now() + + def format_data(data): return { @@ -206,7 +220,7 @@ async def update_latest_data(): while True: for pu in [1, 2, 3]: data = can_backend.get_latest_data(pu_number=pu) - logging.info(f"[MONITOR BUFFER] PU{pu}: {data}") + logging.info(f"[MONITOR BUFFER] PU{pu}: ") # {data} latest_data[f"PU_{pu}"] = format_data(data) await asyncio.sleep(0.2) # Update every 100ms @@ -238,6 +252,71 @@ def feedvalve_control(MV01_opening: int = Query(...)): logging.info(f"Feed valve opening to {MV01_opening}") return {"status": "ok"} +#LOCAL RECORDER +@app.post("/start_recording") +async def start_recording(): + global recording_flag, recording_task, recording_file, recording_writer + + if recording_flag: + raise HTTPException(status_code=400, detail="Already recording.") + + now = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + filename = f"recording_{now}.csv" + os.makedirs("recordings", exist_ok=True) + filepath = os.path.join("recordings", filename) + + recording_file = open(filepath, "w", newline="") + fieldnames = ["timestamp", "pu"] + list(format_data({}).keys()) + recording_writer = csv.DictWriter(recording_file, fieldnames=fieldnames) + recording_writer.writeheader() + + recording_flag = True + recording_task = asyncio.create_task(record_data_loop()) + logging.info(f"[RECORDING STARTED] File: {filepath}") + return {"status": "recording started", "file": filename} + + +@app.post("/stop_recording") +async def stop_recording(): + global recording_flag, recording_task, recording_file + + if not recording_flag: + raise HTTPException(status_code=400, detail="Not recording.") + + recording_flag = False + if recording_task: + await recording_task + recording_task = None + + if recording_file: + recording_file.close() + recording_file = None + + logging.info("[RECORDING STOPPED]") + return {"status": "recording stopped"} + + + +async def record_data_loop(): + global recording_writer, recording_file, write_buffer, last_flush_time + + while recording_flag: + timestamp = datetime.datetime.now().isoformat() + for pu, data in latest_data.items(): + if data: + row = { + "timestamp": timestamp, + "pu": pu, + **data + } + recording_writer.writerow(row) + + # Flush every flush_interval seconds + if (datetime.datetime.now() - last_flush_time).total_seconds() >= flush_interval: + recording_file.flush() + last_flush_time = datetime.datetime.now() + + await asyncio.sleep(0.1) # 10 Hz app.include_router(router) if __name__ == "__main__": diff --git a/templates/control.html b/templates/control.html index e329d85..84f224a 100644 --- a/templates/control.html +++ b/templates/control.html @@ -240,11 +240,16 @@ Monitor PU 3 + + +
@@ -327,100 +332,126 @@