Compare commits
103 Commits
asyncio-in
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a141f56ee5 | ||
|
|
7fd287fd06 | ||
|
|
516c4e9517 | ||
|
|
8924d68958 | ||
|
|
41323eb016 | ||
| e5c4f8a80b | |||
|
|
fcab8ac473 | ||
| c8d892ced3 | |||
| cf82c3f6ec | |||
| 11e8fa6d37 | |||
| 5df448841c | |||
|
|
dd6cc73cf0 | ||
|
|
1c603a5cb1 | ||
|
|
186fdbb952 | ||
| 09909ec041 | |||
|
|
1dc82c238e | ||
|
|
79c5a8e941 | ||
|
|
28ee78d055 | ||
|
|
ead7e3b647 | ||
|
|
3cfa5ef80e | ||
|
|
81983707c8 | ||
|
|
73b4eaf861 | ||
|
|
d56b3614c4 | ||
| 72bf6351a6 | |||
| f2b8f54b8e | |||
|
|
6683bd16a5 | ||
|
|
2a745d035f | ||
|
|
279a65cdb7 | ||
|
|
148111c627 | ||
|
|
cfcd7e3436 | ||
|
|
3d590d3c37 | ||
|
|
018a3757c3 | ||
|
|
2ef90283d6 | ||
|
|
18c34a3334 | ||
|
|
cae91b5f05 | ||
|
|
350125b659 | ||
|
|
25b3a114bf | ||
|
|
f9422fd210 | ||
|
|
868aa81c78 | ||
|
|
370b7797b9 | ||
|
|
eddedf1b43 | ||
|
|
ba5e38144e | ||
|
|
daa92510d3 | ||
|
|
d9b59c73b8 | ||
|
|
c7c850129c | ||
| d68b170ccb | |||
|
|
9dabcc81d0 | ||
|
|
4ee303f854 | ||
|
|
19b235bfb6 | ||
|
|
115ea2768e | ||
| 2446fb7b59 | |||
| 41c8d49d31 | |||
| e8755bd1de | |||
|
|
f808b88f93 | ||
| 117265586c | |||
|
|
ce8ff0a7bf | ||
|
|
5bc81789e5 | ||
| 5ac459a0d5 | |||
| 365162bd65 | |||
|
|
158daccb3e | ||
|
|
ded0565b10 | ||
|
|
72deb7646e | ||
|
|
985280cfe9 | ||
|
|
f6180386d1 | ||
|
|
34f0dda210 | ||
| 743aeee130 | |||
|
|
9ad18a17c8 | ||
| 400fe40bcd | |||
|
|
7edb759bd9 | ||
|
|
5b11f8006c | ||
|
|
6aeb2f9d3e | ||
| 7d2e11a4ce | |||
|
|
7984e11514 | ||
|
|
7a68c14813 | ||
| 5f03efb2cb | |||
| 6d29fa0059 | |||
| efd44dbf3e | |||
| 79b9f2d95d | |||
| 81353c8b1f | |||
|
|
9b0daf2d06 | ||
|
|
7b6f4ffe78 | ||
| 45f0c11196 | |||
|
|
c25a387e8b | ||
|
|
9e79f343a5 | ||
|
|
d311af6da4 | ||
|
|
aef1d1cdfa | ||
|
|
d7e5d1e34e | ||
|
|
d53170fbb0 | ||
| 6c26d9d6a2 | |||
| e8479bd8f1 | |||
|
|
9e5e4c2a70 | ||
| ef91ff4426 | |||
| 1aee1c012f | |||
|
|
6587851267 | ||
|
|
09f9e8feb2 | ||
| 9102812a6f | |||
|
|
9443f5e598 | ||
|
|
8f87e6890b | ||
| 19aa551af1 | |||
| bb39cede93 | |||
| fe4a478ced | |||
| feb48c6a19 | |||
| 3db1f96489 |
7
.gitignore
vendored
7
.gitignore
vendored
|
|
@ -1,6 +1,3 @@
|
||||||
__pycache__/main.cpython-311.pyc
|
__pycache__/main.cpython-311.pyc
|
||||||
recordings/recording_20250716_145535.csv
|
recordings/ *
|
||||||
recordings/recording_20250716_143101.csv
|
credentials.json
|
||||||
recordings/recording_20250716_143537.csv
|
|
||||||
recordings/recording_20250716_150700.csv
|
|
||||||
recordings/recording_20250716_181008.csv
|
|
||||||
|
|
|
||||||
5
.vscode/settings.json
vendored
Normal file
5
.vscode/settings.json
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"python.analysis.extraPaths": [
|
||||||
|
"./serial"
|
||||||
|
]
|
||||||
|
}
|
||||||
12
MockCAN.py
12
MockCAN.py
|
|
@ -7,7 +7,7 @@ class CANBackend:
|
||||||
def __init__(self, eds_file=None):
|
def __init__(self, eds_file=None):
|
||||||
self.connected = False
|
self.connected = False
|
||||||
|
|
||||||
def connect(self, node_id: int, eds_path: str) -> bool:
|
def connect(self) -> bool:
|
||||||
# Placeholder for connection logic
|
# Placeholder for connection logic
|
||||||
self.connected = True
|
self.connected = True
|
||||||
return True
|
return True
|
||||||
|
|
@ -23,7 +23,7 @@ class CANBackend:
|
||||||
# Placeholder for thermal loop cleaning
|
# Placeholder for thermal loop cleaning
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def send_state_command(self, state: str, pu_number : int, ploop_setpoint : float):
|
def send_state_command(self, state: str, pu_number : int, ploop_setpoint : float, qperm_setpoint : float):
|
||||||
# Placeholder for sending mode command
|
# Placeholder for sending mode command
|
||||||
PUs_states[pu_number-1] = {"PU_MODE": state, "ploop_setpoint":ploop_setpoint}
|
PUs_states[pu_number-1] = {"PU_MODE": state, "ploop_setpoint":ploop_setpoint}
|
||||||
|
|
||||||
|
|
@ -40,7 +40,9 @@ class CANBackend:
|
||||||
"PS2": 6.2,
|
"PS2": 6.2,
|
||||||
"PS3": 6.2,
|
"PS3": 6.2,
|
||||||
"PS4": 6.2,
|
"PS4": 6.2,
|
||||||
"Cond": 1* np.random.random(),
|
"Conductivity_Feed": 1* np.random.random(),
|
||||||
|
"Conductivity_Permeate": 1 * np.random.random(),
|
||||||
|
"Conductivity_Product": 1 * np.random.random(),
|
||||||
"MV02": round(100 * np.random.random(), 2),
|
"MV02": round(100 * np.random.random(), 2),
|
||||||
"MV02_sp": round(100 * np.random.random(), 2),
|
"MV02_sp": round(100 * np.random.random(), 2),
|
||||||
"MV03": round(100 * np.random.random(), 2),
|
"MV03": round(100 * np.random.random(), 2),
|
||||||
|
|
@ -64,7 +66,9 @@ class CANBackend:
|
||||||
"PS2": round(10 * np.random.random(), 2),
|
"PS2": round(10 * np.random.random(), 2),
|
||||||
"PS3": round(10 * np.random.random(), 2),
|
"PS3": round(10 * np.random.random(), 2),
|
||||||
"PS4": round(10 * np.random.random(), 2),
|
"PS4": round(10 * np.random.random(), 2),
|
||||||
"Cond": 1* np.random.random(),
|
"Conductivity_Feed": 1 * np.random.random(),
|
||||||
|
"Conductivity_Permeate": 1 * np.random.random(),
|
||||||
|
"Conductivity_Product": 1 * np.random.random(),
|
||||||
"MV02": round(100 * np.random.random(), 2),
|
"MV02": round(100 * np.random.random(), 2),
|
||||||
"MV02_sp": round(100 * np.random.random(), 2),
|
"MV02_sp": round(100 * np.random.random(), 2),
|
||||||
"MV03": round(100 * np.random.random(), 2),
|
"MV03": round(100 * np.random.random(), 2),
|
||||||
|
|
|
||||||
153
analysis/read_recording.py
Normal file
153
analysis/read_recording.py
Normal file
|
|
@ -0,0 +1,153 @@
|
||||||
|
# Cell 1: Import des bibliothèques
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import seaborn as sns
|
||||||
|
|
||||||
|
# Configuration pour des graphiques lisibles
|
||||||
|
sns.set(style="whitegrid")
|
||||||
|
plt.rcParams['figure.figsize'] = (14, 6)
|
||||||
|
|
||||||
|
# Chargement des données
|
||||||
|
df = pd.read_csv("../recordings/recording_20250718_112807.csv", parse_dates=["timestamp"])
|
||||||
|
|
||||||
|
df_PatientSkid = df[df['pu'] == 'PatientSkid'].copy()
|
||||||
|
|
||||||
|
# Cellule finale : Affichage multi-PU par grandeur
|
||||||
|
import matplotlib.dates as mdates
|
||||||
|
|
||||||
|
reference_lines = {
|
||||||
|
'Qperm': 1200,
|
||||||
|
'Pdilute': 2.5
|
||||||
|
}
|
||||||
|
|
||||||
|
quantities = ['Qperm', 'Qdilute', 'Qdrain', 'Pro', 'Pdilute','MV07_sp']
|
||||||
|
n_quantities = len(quantities)
|
||||||
|
pus_all =pus = ['PU_1','PU_2','PU_3']
|
||||||
|
|
||||||
|
fig, axes = plt.subplots(n_quantities, 1, figsize=(14, 3 * n_quantities), sharex=True)
|
||||||
|
fig.suptitle("Évolution des grandeurs par PU", fontsize=16)
|
||||||
|
|
||||||
|
for i, quantity in enumerate(quantities):
|
||||||
|
ax = axes[i]
|
||||||
|
for pu in pus_all:
|
||||||
|
df_pu = df[df['pu'] == pu]
|
||||||
|
if quantity in df_pu.columns:
|
||||||
|
ax.plot(df_pu['timestamp'], df_pu[quantity], label=pu)
|
||||||
|
if quantity in reference_lines:
|
||||||
|
ax.axhline(reference_lines[quantity], linestyle='--', color='red')
|
||||||
|
if quantity == 'Qdilute':
|
||||||
|
ax.plot(df_PatientSkid['timestamp'], df_PatientSkid['QSkid'], label='QSkid')
|
||||||
|
ax.set_ylabel(quantity)
|
||||||
|
ax.grid(True)
|
||||||
|
ax.legend(loc='upper right')
|
||||||
|
if i == n_quantities - 1:
|
||||||
|
ax.set_xlabel("Timestamp")
|
||||||
|
else:
|
||||||
|
ax.set_xlabel("")
|
||||||
|
ax.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M:%S"))
|
||||||
|
|
||||||
|
plt.tight_layout(rect=[0, 0, 1, 0.96])
|
||||||
|
plt.show()
|
||||||
|
|
||||||
|
# Analyse initiale pour PU_1
|
||||||
|
df_pu_1 = df[df['pu'] == 'PU_1'].copy()
|
||||||
|
df_pu_1.sort_values('timestamp', inplace=True)
|
||||||
|
df_pu_1['delta_t'] = df_pu_1['timestamp'].diff().dt.total_seconds()
|
||||||
|
df_pu_1 = df_pu_1.iloc[1:] # Supprimer la première valeur NaN
|
||||||
|
|
||||||
|
plt.figure('Time between messages',figsize=(10, 4))
|
||||||
|
sns.histplot(df_pu_1['delta_t'], bins=10,stat='probability')
|
||||||
|
plt.title("Time between messages for PU_1")
|
||||||
|
plt.xlabel("Timestamp")
|
||||||
|
plt.ylabel("Δt (seconds)")
|
||||||
|
plt.grid(True)
|
||||||
|
plt.tight_layout()
|
||||||
|
# plt.show()
|
||||||
|
|
||||||
|
print("Average time is ", df_pu_1['delta_t'].mean())
|
||||||
|
|
||||||
|
|
||||||
|
def plot_pu_data(pu_name):
|
||||||
|
# Filtrage
|
||||||
|
df_pu = df[df['pu'] == pu_name].copy()
|
||||||
|
df_pu['timestamp'] = pd.to_datetime(df_pu['timestamp'], errors='coerce')
|
||||||
|
df_pu = df_pu.dropna(subset=['timestamp'])
|
||||||
|
|
||||||
|
# --------- Plot 1: Débits ---------
|
||||||
|
flow_cols = ['Qperm', 'Qdilute', 'Qdrain', 'Qrecirc']
|
||||||
|
available_flows = [col for col in flow_cols if col in df_pu.columns]
|
||||||
|
if available_flows:
|
||||||
|
fig, ax = plt.subplots(figsize=(10, 4))
|
||||||
|
for col in available_flows:
|
||||||
|
ax.plot(df_pu['timestamp'], df_pu[col], label=col)
|
||||||
|
ax.plot(df_PatientSkid['timestamp'], df_PatientSkid['QSkid'],label='QSkid')
|
||||||
|
ax.set_title(f'{pu_name} - Flow Rates')
|
||||||
|
ax.set_xlabel("Timestamp")
|
||||||
|
ax.set_ylabel("Flow (L/min)")
|
||||||
|
ax.legend(loc='upper right')
|
||||||
|
ax.grid(True)
|
||||||
|
fig.tight_layout()
|
||||||
|
# plt.show()
|
||||||
|
|
||||||
|
# --------- Plot 2: Pressions ---------
|
||||||
|
pressure_cols = ['Pro', 'Pdilute', 'Pretentate']
|
||||||
|
available_pressures = [col for col in pressure_cols if col in df_pu.columns]
|
||||||
|
if available_pressures:
|
||||||
|
fig, ax = plt.subplots(figsize=(10, 4))
|
||||||
|
for col in available_pressures:
|
||||||
|
ax.plot(df_pu['timestamp'], df_pu[col], label=col)
|
||||||
|
ax.set_title(f'{pu_name} - Pressures')
|
||||||
|
ax.set_xlabel("Timestamp")
|
||||||
|
ax.set_ylabel("Pressure (bar)")
|
||||||
|
ax.legend(loc='upper right')
|
||||||
|
ax.grid(True)
|
||||||
|
fig.tight_layout()
|
||||||
|
# plt.show()
|
||||||
|
|
||||||
|
# --------- Plot 3: Motor Valve Positions ---------
|
||||||
|
mv_indices = range(2, 9) # MV02 à MV08
|
||||||
|
fig, axes = plt.subplots(3, 3, figsize=(15, 10), sharex=True)
|
||||||
|
fig.suptitle(f'{pu_name} - Motor Valve Positions vs Setpoints', fontsize=16)
|
||||||
|
|
||||||
|
plot_index = 0
|
||||||
|
for mv in mv_indices:
|
||||||
|
mv_real = f"MV0{mv}"
|
||||||
|
mv_sp = f"MV0{mv}_sp"
|
||||||
|
row, col = divmod(plot_index, 3)
|
||||||
|
ax = axes[row, col]
|
||||||
|
|
||||||
|
if mv_real in df_pu.columns and mv_sp in df_pu.columns:
|
||||||
|
ax.plot(df_pu['timestamp'], df_pu[mv_real], label='Actual', color='blue')
|
||||||
|
ax.plot(df_pu['timestamp'], df_pu[mv_sp], label='Setpoint', linestyle='--', color='orange')
|
||||||
|
ax.set_title(f"{mv_real}")
|
||||||
|
ax.set_ylabel("Position (%)")
|
||||||
|
ax.grid(True)
|
||||||
|
if row == 2:
|
||||||
|
ax.set_xlabel("Timestamp")
|
||||||
|
else:
|
||||||
|
ax.set_visible(False)
|
||||||
|
|
||||||
|
plot_index += 1
|
||||||
|
|
||||||
|
# Cacher les sous-graphiques inutilisés
|
||||||
|
while plot_index < 9:
|
||||||
|
row, col = divmod(plot_index, 3)
|
||||||
|
axes[row, col].set_visible(False)
|
||||||
|
plot_index += 1
|
||||||
|
|
||||||
|
handles, labels = axes[0][0].get_legend_handles_labels()
|
||||||
|
fig.legend(handles, labels, loc='upper right')
|
||||||
|
fig.tight_layout(rect=[0, 0, 1, 0.96])
|
||||||
|
# plt.show()
|
||||||
|
|
||||||
|
|
||||||
|
# Cell final : Affichage pour tous les PU
|
||||||
|
pus = df['pu'].dropna().unique()
|
||||||
|
print("PU disponibles :", pus)
|
||||||
|
pus = ['PU_1']
|
||||||
|
for pu in pus:
|
||||||
|
print(f"\n--- Data for {pu} ---\n")
|
||||||
|
plot_pu_data(pu)
|
||||||
|
plt.show()
|
||||||
|
|
||||||
|
|
||||||
221
classCAN.py
221
classCAN.py
|
|
@ -1,221 +0,0 @@
|
||||||
import threading
|
|
||||||
import canopen
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
class CANBackend:
|
|
||||||
def __init__(self, eds_file =None):
|
|
||||||
self.network = None
|
|
||||||
self.nodes = {} # {1: RemoteNode(0x02), 2: RemoteNode(0x03), ...}
|
|
||||||
self.connected = False
|
|
||||||
self.lock = threading.Lock()
|
|
||||||
self.polling_thread = None
|
|
||||||
self.polling_active = False
|
|
||||||
self.latest_data = {
|
|
||||||
1: {}, # PU1
|
|
||||||
2: {}, # PU2
|
|
||||||
3: {} # PU3
|
|
||||||
}
|
|
||||||
if eds_file is None:
|
|
||||||
self.eds_path = os.path.join(os.path.dirname(__file__), "eds_file", "processBoard_0.eds")
|
|
||||||
else:
|
|
||||||
self.eds_path = eds_file
|
|
||||||
|
|
||||||
def connect(self):
|
|
||||||
try:
|
|
||||||
self.network = canopen.Network()
|
|
||||||
self.network.connect(channel='can0', bustype='socketcan')
|
|
||||||
|
|
||||||
# PU mapping: PU1->0x02, PU2->0x04, PU3->0x127
|
|
||||||
node_map = {
|
|
||||||
1: 0x02,
|
|
||||||
2: 0x04,
|
|
||||||
3: 0x127,
|
|
||||||
}
|
|
||||||
|
|
||||||
for pu_number, node_id in node_map.items():
|
|
||||||
node = canopen.RemoteNode(node_id, self.eds_path)
|
|
||||||
self.network.add_node(node)
|
|
||||||
self.nodes[pu_number] = node
|
|
||||||
|
|
||||||
self.connected = True
|
|
||||||
self._start_sdo_polling()
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[CONNECT ERROR] {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def shutdown(self):
|
|
||||||
self.polling_active = False
|
|
||||||
if self.network:
|
|
||||||
self.network.disconnect()
|
|
||||||
self.nodes.clear()
|
|
||||||
self.connected = False
|
|
||||||
|
|
||||||
def _start_sdo_polling(self):
|
|
||||||
if self.polling_thread and self.polling_thread.is_alive():
|
|
||||||
return
|
|
||||||
self.polling_active = True
|
|
||||||
self.polling_thread = threading.Thread(target=self._sdo_polling_loop, daemon=True)
|
|
||||||
self.polling_thread.start()
|
|
||||||
|
|
||||||
def _sdo_polling_loop(self):
|
|
||||||
while self.polling_active:
|
|
||||||
with self.lock:
|
|
||||||
try:
|
|
||||||
for pu_number, node in self.nodes.items():
|
|
||||||
try:
|
|
||||||
fm1 = node.sdo[0x2004][1].raw
|
|
||||||
fm2 = node.sdo[0x2004][2].raw
|
|
||||||
fm3 = node.sdo[0x2004][3].raw
|
|
||||||
fm4 = node.sdo[0x2004][4].raw
|
|
||||||
|
|
||||||
ps1 = node.sdo[0x2005][1].raw
|
|
||||||
ps2 = node.sdo[0x2005][2].raw
|
|
||||||
ps3 = node.sdo[0x2005][3].raw
|
|
||||||
ps4 = node.sdo[0x2005][4].raw
|
|
||||||
|
|
||||||
mv02Cmd = node.sdo[0x2014][1].raw
|
|
||||||
mv03Cmd = node.sdo[0x2012][1].raw
|
|
||||||
mv04Cmd = node.sdo[0x2019][1].raw
|
|
||||||
mv05Cmd = node.sdo[0x2020][1].raw
|
|
||||||
mv06Cmd = node.sdo[0x2021][1].raw
|
|
||||||
mv07Cmd = node.sdo[0x2015][1].raw
|
|
||||||
mv08Cmd = node.sdo[0x2022][1].raw
|
|
||||||
|
|
||||||
# mv02fb = node.sdo[0x3000][2].raw
|
|
||||||
# mv03fb = node.sdo[0x3000][3].raw
|
|
||||||
# mv04fb = node.sdo[0x3000][4].raw
|
|
||||||
# mv05fb = node.sdo[0x3000][5].raw
|
|
||||||
# mv06fb = node.sdo[0x3000][6].raw
|
|
||||||
# mv07fb = node.sdo[0x3000][7].raw
|
|
||||||
# mv08fb = node.sdo[0x3000][8].raw
|
|
||||||
|
|
||||||
self.latest_data[pu_number] = {
|
|
||||||
"FM1": (fm1 / 100.0) * 60.0,
|
|
||||||
"FM2": (fm2 / 100.0) * 60.0,
|
|
||||||
"FM3": (fm3 / 100.0) * 60.0,
|
|
||||||
"FM4": (fm4 / 100.0) * 60.0,
|
|
||||||
|
|
||||||
"PS1": ps1 / 1000.0,
|
|
||||||
"PS2": ps2 / 1000.0,
|
|
||||||
"PS3": ps3 / 1000.0,
|
|
||||||
"PS4": ps4 / 1000.0,
|
|
||||||
|
|
||||||
"MV02_sp" : mv02Cmd / 100.0,
|
|
||||||
"MV03_sp" : mv03Cmd / 100.0,
|
|
||||||
"MV04_sp" : mv04Cmd / 100.0,
|
|
||||||
"MV05_sp" : mv05Cmd / 100.0,
|
|
||||||
"MV06_sp" : mv06Cmd / 100.0,
|
|
||||||
"MV07_sp" : mv07Cmd / 100.0,
|
|
||||||
"MV08_sp" : mv08Cmd / 100.0,
|
|
||||||
|
|
||||||
# "MV02" : mv02fb,
|
|
||||||
# "MV03" : mv03fb,
|
|
||||||
# "MV04" : mv04fb,
|
|
||||||
# "MV05" : mv05fb,
|
|
||||||
# "MV06" : mv06fb,
|
|
||||||
# "MV07" : mv07fb,
|
|
||||||
# "MV08" : mv08fb,
|
|
||||||
}
|
|
||||||
|
|
||||||
print(f"[PU{pu_number}] FM1: {fm1}, PS1: {ps1}")
|
|
||||||
except Exception as inner_e:
|
|
||||||
print(f"[SDO READ ERROR] PU{pu_number}: {inner_e}")
|
|
||||||
except Exception as outer_e:
|
|
||||||
print(f"[SDO POLL ERROR] {outer_e}")
|
|
||||||
|
|
||||||
time.sleep(1.0)
|
|
||||||
|
|
||||||
def get_latest_data(self, pu_number: int):
|
|
||||||
with self.lock:
|
|
||||||
return self.latest_data.get(pu_number, {}).copy()
|
|
||||||
|
|
||||||
def read_current_state(self, pu_number: int):
|
|
||||||
try:
|
|
||||||
node = self.nodes.get(pu_number)
|
|
||||||
if node is None:
|
|
||||||
return "Offline"
|
|
||||||
state_raw = node.sdo[0x2000].raw
|
|
||||||
return self.decode_state(state_raw)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[PU{pu_number} READ ERROR] {e}")
|
|
||||||
return "Offline"
|
|
||||||
|
|
||||||
def decode_state(self, state_val: int) -> str:
|
|
||||||
state_map = {
|
|
||||||
0: "SYSTEM_MODE_INIT",
|
|
||||||
1: "SYSTEM_MODE_OFF",
|
|
||||||
2: "SYSTEM_MODE_READY",
|
|
||||||
3: "SYSTEM_MODE_PRODUCTION",
|
|
||||||
4: "SYSTEM_MODE_LOW_LOOP_PRESSURE",
|
|
||||||
5: "SYSTEM_MODE_LOOP_CLEANING",
|
|
||||||
6: "SYSTEM_MODE_HEATING_RO",
|
|
||||||
7: "SYSTEM_MODE_RINSING_RO",
|
|
||||||
8: "SYSTEM_MODE_HEATING_EDI",
|
|
||||||
9: "SYSTEM_MODE_COOLING_EDI",
|
|
||||||
10: "SYSTEM_MODE_RO_FLUSH",
|
|
||||||
11: "SYSTEM_MODE_RO_RINSE",
|
|
||||||
12: "SYSTEM_MODE_EDI_RINSE",
|
|
||||||
15: "SYSTEM_MODE_FAIL_SAFE",
|
|
||||||
16: "SYSTEM_MODE_FIRST_FLUSH",
|
|
||||||
255: "SYSTEM_MODE_DEFAULT"
|
|
||||||
}
|
|
||||||
return state_map.get(state_val, f"UNKNOWN({state_val})")
|
|
||||||
|
|
||||||
def send_state_command(self, state: str, pu_number: int, ploop_setpoint: float):
|
|
||||||
if not self.connected:
|
|
||||||
raise RuntimeError("CAN not connected")
|
|
||||||
|
|
||||||
state_map = {
|
|
||||||
"IDLE": 1,
|
|
||||||
"PRE-PRODUCTION": 2,
|
|
||||||
"PRODUCTION" : 3,
|
|
||||||
"MAINTENANCE": 8,
|
|
||||||
"EMERGENCY_STOP": 9,
|
|
||||||
"FIRST_START": 10
|
|
||||||
}
|
|
||||||
|
|
||||||
if state not in state_map:
|
|
||||||
raise ValueError(f"Invalid state: {state}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
node = self.nodes.get(pu_number)
|
|
||||||
if node is None:
|
|
||||||
raise ValueError(f"PU{pu_number} not connected")
|
|
||||||
|
|
||||||
print(f"[DEBUG] Writing state {state_map[state]} to 0x2024:{pu_number}")
|
|
||||||
node.sdo[0x2024][0x01].raw = state_map[state]
|
|
||||||
|
|
||||||
print(f"[DEBUG] Writing ploop_setpoint {ploop_setpoint} to 0x2007")
|
|
||||||
node.sdo[0x2007].raw = int(ploop_setpoint * 100)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[SDO WRITE ERROR] PU{pu_number}: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def send_thermal_loop_cleaning(self, mode: str, pu_number: int):
|
|
||||||
if not self.connected:
|
|
||||||
raise RuntimeError("CAN not connected")
|
|
||||||
|
|
||||||
mode_map = {
|
|
||||||
"IDLE": 0,
|
|
||||||
"ACTIVE": 1
|
|
||||||
}
|
|
||||||
|
|
||||||
if mode not in mode_map:
|
|
||||||
raise ValueError(f"Invalid thermal loop mode: {mode}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
node = self.nodes.get(pu_number)
|
|
||||||
if node is None:
|
|
||||||
raise ValueError(f"PU{pu_number} not connected")
|
|
||||||
|
|
||||||
print(f"[DEBUG] Sending thermal loop mode {mode} to 0x2024:{pu_number}")
|
|
||||||
node.sdo[0x2024][pu_number].raw = mode_map[mode]
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[THERMAL LOOP ERROR] PU{pu_number}: {e}")
|
|
||||||
raise
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
{
|
|
||||||
"username": "northstarNehemis",
|
|
||||||
"password": "NehemisNorthStar@2025"
|
|
||||||
}
|
|
||||||
6357
eds_file/dockingBoard_0.eds
Normal file
6357
eds_file/dockingBoard_0.eds
Normal file
File diff suppressed because it is too large
Load Diff
|
|
@ -1,385 +0,0 @@
|
||||||
; EDS file for inletvalveboard - generated by CANopen DeviceDesigner 3.14.2
|
|
||||||
[FileInfo]
|
|
||||||
FileName=inletvalveboard.eds
|
|
||||||
FileVersion=1.0
|
|
||||||
FileRevision=1.0
|
|
||||||
EDSVersion=4.0
|
|
||||||
Description=EDS
|
|
||||||
CreationTime=12:05PM
|
|
||||||
CreationDate=04-15-25
|
|
||||||
ModificationTime=12:05PM
|
|
||||||
ModificationDate=04-15-25
|
|
||||||
CreatedBy=Vineeta Gupta
|
|
||||||
ModifiedBy=Vineeta Gupta
|
|
||||||
|
|
||||||
[Comments]
|
|
||||||
Lines=1
|
|
||||||
Line1=generated by CANopen DeviceDesigner by emotas
|
|
||||||
|
|
||||||
[DeviceInfo]
|
|
||||||
VendorName=nehemis
|
|
||||||
VendorNumber=0x319
|
|
||||||
ProductName=InletValveController
|
|
||||||
ProductNumber=1234
|
|
||||||
RevisionNumber=0x1
|
|
||||||
OrderCode=InletValveController
|
|
||||||
BaudRate_10=0
|
|
||||||
BaudRate_20=0
|
|
||||||
BaudRate_50=0
|
|
||||||
BaudRate_125=0
|
|
||||||
BaudRate_250=1
|
|
||||||
BaudRate_500=0
|
|
||||||
BaudRate_800=0
|
|
||||||
BaudRate_1000=0
|
|
||||||
NrOfRxPDO=0
|
|
||||||
NrOfTxPDO=1
|
|
||||||
SimpleBootupSlave=1
|
|
||||||
SimpleBootupMaster=0
|
|
||||||
LSS_Supported=0
|
|
||||||
Granularity=0
|
|
||||||
DynamicChannelsSupported=0
|
|
||||||
GroupMessaging=0
|
|
||||||
|
|
||||||
[DummyUsage]
|
|
||||||
Dummy0001=0
|
|
||||||
Dummy0002=0
|
|
||||||
Dummy0003=0
|
|
||||||
Dummy0004=0
|
|
||||||
Dummy0005=0
|
|
||||||
Dummy0006=0
|
|
||||||
Dummy0007=0
|
|
||||||
|
|
||||||
[MandatoryObjects]
|
|
||||||
SupportedObjects=3
|
|
||||||
1=0x1000
|
|
||||||
2=0x1001
|
|
||||||
3=0x1018
|
|
||||||
|
|
||||||
[ManufacturerObjects]
|
|
||||||
SupportedObjects=2
|
|
||||||
1=0x2001
|
|
||||||
2=0x3000
|
|
||||||
|
|
||||||
[OptionalObjects]
|
|
||||||
SupportedObjects=13
|
|
||||||
1=0x1003
|
|
||||||
2=0x1008
|
|
||||||
3=0x1014
|
|
||||||
4=0x1015
|
|
||||||
5=0x1016
|
|
||||||
6=0x1017
|
|
||||||
7=0x1029
|
|
||||||
8=0x1200
|
|
||||||
9=0x1800
|
|
||||||
10=0x1a00
|
|
||||||
11=0x6000
|
|
||||||
12=0x6001
|
|
||||||
13=0x6002
|
|
||||||
|
|
||||||
[1000]
|
|
||||||
ParameterName=Device Type
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=0
|
|
||||||
|
|
||||||
[1001]
|
|
||||||
ParameterName=Error Register
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[1003]
|
|
||||||
ParameterName=Predefined Error Field
|
|
||||||
ObjectType=8
|
|
||||||
SubNumber=2
|
|
||||||
|
|
||||||
[1003sub0]
|
|
||||||
ParameterName=Number of Errors
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=0
|
|
||||||
|
|
||||||
[1003sub1]
|
|
||||||
ParameterName=Standard Error Field
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=0
|
|
||||||
|
|
||||||
[1008]
|
|
||||||
ParameterName=Manufacturer device name
|
|
||||||
ObjectType=7
|
|
||||||
DataType=9
|
|
||||||
AccessType=const
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=emotas Slave 1
|
|
||||||
|
|
||||||
[1014]
|
|
||||||
ParameterName=COB ID EMCY
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=$NODEID+0x80
|
|
||||||
|
|
||||||
[1015]
|
|
||||||
ParameterName=Inhibit Time Emergency
|
|
||||||
ObjectType=7
|
|
||||||
DataType=6
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=0x0
|
|
||||||
|
|
||||||
[1016]
|
|
||||||
ParameterName=Consumer Heartbeat Time
|
|
||||||
ObjectType=8
|
|
||||||
SubNumber=1
|
|
||||||
|
|
||||||
[1016sub0]
|
|
||||||
ParameterName=Number of entries
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=0
|
|
||||||
|
|
||||||
[1017]
|
|
||||||
ParameterName=Producer Heartbeat Time
|
|
||||||
ObjectType=7
|
|
||||||
DataType=6
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[1018]
|
|
||||||
ParameterName=Identity Object
|
|
||||||
ObjectType=9
|
|
||||||
SubNumber=5
|
|
||||||
|
|
||||||
[1018sub0]
|
|
||||||
ParameterName=Number of entries
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=4
|
|
||||||
|
|
||||||
[1018sub1]
|
|
||||||
ParameterName=Vendor Id
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=0x319
|
|
||||||
|
|
||||||
[1018sub2]
|
|
||||||
ParameterName=Product Code
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=1234
|
|
||||||
|
|
||||||
[1018sub3]
|
|
||||||
ParameterName=Revision number
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=0x1
|
|
||||||
|
|
||||||
[1018sub4]
|
|
||||||
ParameterName=Serial number
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[1029]
|
|
||||||
ParameterName=Error behaviour
|
|
||||||
ObjectType=8
|
|
||||||
SubNumber=3
|
|
||||||
|
|
||||||
[1029sub0]
|
|
||||||
ParameterName=Nr of Error Classes
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=2
|
|
||||||
|
|
||||||
[1029sub1]
|
|
||||||
ParameterName=Communication Error
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=1
|
|
||||||
|
|
||||||
[1029sub2]
|
|
||||||
ParameterName=Specific Error Class
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[1200]
|
|
||||||
ParameterName=Server SDO Parameter
|
|
||||||
ObjectType=9
|
|
||||||
SubNumber=3
|
|
||||||
|
|
||||||
[1200sub0]
|
|
||||||
ParameterName=Number of entries
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=2
|
|
||||||
|
|
||||||
[1200sub1]
|
|
||||||
ParameterName=COB ID Client to Server
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=$NODEID+0x600
|
|
||||||
|
|
||||||
[1200sub2]
|
|
||||||
ParameterName=COB ID Server to Client
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=$NODEID+0x580
|
|
||||||
|
|
||||||
[1800]
|
|
||||||
ParameterName=TPDO communication parameter
|
|
||||||
ObjectType=9
|
|
||||||
SubNumber=6
|
|
||||||
|
|
||||||
[1800sub0]
|
|
||||||
ParameterName=Highest sub-index supported
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=const
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=6
|
|
||||||
|
|
||||||
[1800sub1]
|
|
||||||
ParameterName=COB-ID used by TPDO
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[1800sub2]
|
|
||||||
ParameterName=Transmission type
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[1800sub3]
|
|
||||||
ParameterName=Inhibit time
|
|
||||||
ObjectType=7
|
|
||||||
DataType=6
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[1800sub5]
|
|
||||||
ParameterName=Event timer
|
|
||||||
ObjectType=7
|
|
||||||
DataType=6
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[1800sub6]
|
|
||||||
ParameterName=SYNC start value
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[1a00]
|
|
||||||
ParameterName=Transmit PDO Mapping Parameter
|
|
||||||
ObjectType=9
|
|
||||||
SubNumber=2
|
|
||||||
;;This object contains the mapping for the PDO the device is able to transmit.
|
|
||||||
;;
|
|
||||||
|
|
||||||
[1a00sub0]
|
|
||||||
ParameterName=Highest sub-index supported
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=const
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=1
|
|
||||||
|
|
||||||
[1a00sub1]
|
|
||||||
ParameterName=Mapping Entry 1
|
|
||||||
ObjectType=7
|
|
||||||
DataType=7
|
|
||||||
AccessType=const
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=0x60010008
|
|
||||||
|
|
||||||
[2001]
|
|
||||||
ParameterName=Manufacturer Object
|
|
||||||
ObjectType=7
|
|
||||||
DataType=4
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=1
|
|
||||||
|
|
||||||
[3000]
|
|
||||||
ParameterName=Managed Array
|
|
||||||
ObjectType=8
|
|
||||||
SubNumber=3
|
|
||||||
|
|
||||||
[3000sub0]
|
|
||||||
ParameterName=NUmber of Entries
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=0
|
|
||||||
DefaultValue=2
|
|
||||||
|
|
||||||
[3000sub1]
|
|
||||||
ParameterName=Sub 1
|
|
||||||
ObjectType=7
|
|
||||||
DataType=3
|
|
||||||
AccessType=ro
|
|
||||||
PDOMapping=1
|
|
||||||
|
|
||||||
[3000sub2]
|
|
||||||
ParameterName=sub 2
|
|
||||||
ObjectType=7
|
|
||||||
DataType=3
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=1
|
|
||||||
|
|
||||||
[6000]
|
|
||||||
ParameterName=Position Set Point
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=0
|
|
||||||
|
|
||||||
[6001]
|
|
||||||
ParameterName=Position Feedback
|
|
||||||
ObjectType=7
|
|
||||||
DataType=5
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=1
|
|
||||||
|
|
||||||
[6002]
|
|
||||||
ParameterName=Motor Current
|
|
||||||
ObjectType=7
|
|
||||||
DataType=8
|
|
||||||
AccessType=rw
|
|
||||||
PDOMapping=1
|
|
||||||
|
|
||||||
File diff suppressed because it is too large
Load Diff
BIN
hardware/__pycache__/classCAN.cpython-311.pyc
Normal file
BIN
hardware/__pycache__/classCAN.cpython-311.pyc
Normal file
Binary file not shown.
BIN
hardware/__pycache__/patient_skid.cpython-311.pyc
Normal file
BIN
hardware/__pycache__/patient_skid.cpython-311.pyc
Normal file
Binary file not shown.
377
hardware/classCAN.py
Normal file
377
hardware/classCAN.py
Normal file
|
|
@ -0,0 +1,377 @@
|
||||||
|
import threading
|
||||||
|
import canopen
|
||||||
|
import can
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
class CANBackend:
|
||||||
|
"""
|
||||||
|
CANBackend handles CANopen communication with two Process Units (PU1 and PU2).
|
||||||
|
It listens for TPDOs, tracks real-time data, and sends SDO control commands
|
||||||
|
such as setting system modes and setpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, eds_file=None):
|
||||||
|
"""
|
||||||
|
Initialize the CAN backend.
|
||||||
|
|
||||||
|
:param eds_file: Optional path to the EDS file to use for the master node.
|
||||||
|
"""
|
||||||
|
self.network = None
|
||||||
|
self.master_node = None
|
||||||
|
self.master_node_id = 0x16 # Docking board node ID
|
||||||
|
self.nodes = {}
|
||||||
|
self.connected = False
|
||||||
|
self.lock = threading.Lock()
|
||||||
|
self.latest_data = {
|
||||||
|
0: {}, #Docking Parameters
|
||||||
|
1: {}, # PU1 data
|
||||||
|
2: {}, # PU2 data
|
||||||
|
}
|
||||||
|
|
||||||
|
# Default EDS file path
|
||||||
|
self.eds_path = eds_file if eds_file else os.path.join(os.path.dirname(__file__), "eds_file", "dockingBoard_0.eds")
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
"""
|
||||||
|
Connects to the CAN network and sets up the master node.
|
||||||
|
|
||||||
|
:return: True if successful, False otherwise.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.network = canopen.Network()
|
||||||
|
self.network.connect(channel='can0', bustype='socketcan')
|
||||||
|
|
||||||
|
self.master_node = canopen.RemoteNode(self.master_node_id, self.eds_path)
|
||||||
|
self.network.add_node(self.master_node)
|
||||||
|
self.master_node.nmt.state = 'OPERATIONAL'
|
||||||
|
self.nodes[0] = self.master_node
|
||||||
|
|
||||||
|
# Start background listener for TPDOs
|
||||||
|
self.listener_active = True
|
||||||
|
self.bus = can.interface.Bus(channel='can0', bustype='socketcan')
|
||||||
|
self.listener_thread = threading.Thread(target=self._can_listener_loop, daemon=True)
|
||||||
|
self.listener_thread.start()
|
||||||
|
|
||||||
|
self.connected = True
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[CONNECT ERROR] {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
"""
|
||||||
|
Cleanly shuts down the CAN backend and listener.
|
||||||
|
"""
|
||||||
|
self.listener_active = False
|
||||||
|
if self.network:
|
||||||
|
self.network.disconnect()
|
||||||
|
if hasattr(self, 'bus'):
|
||||||
|
self.bus.shutdown()
|
||||||
|
self.nodes.clear()
|
||||||
|
self.connected = False
|
||||||
|
|
||||||
|
def _can_listener_loop(self):
|
||||||
|
"""
|
||||||
|
Background thread to listen for CAN TPDO messages.
|
||||||
|
Updates the internal state for PU1 and PU2 based on COB-ID.
|
||||||
|
"""
|
||||||
|
while self.listener_active:
|
||||||
|
msg = self.bus.recv(1.0)
|
||||||
|
if msg is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
cob_id = msg.arbitration_id
|
||||||
|
data = msg.data
|
||||||
|
|
||||||
|
with self.lock:
|
||||||
|
# ========== PU1 COB-IDs ==========
|
||||||
|
if cob_id == 0x2A6 and len(data) >= 8:
|
||||||
|
self.latest_data[1].update({
|
||||||
|
"FM1": int.from_bytes(data[0:2], 'little') / 100.0 * 60.0,
|
||||||
|
"FM2": int.from_bytes(data[2:4], 'little') / 100.0 * 60.0,
|
||||||
|
"FM3": int.from_bytes(data[4:6], 'little') / 100.0 * 60.0,
|
||||||
|
"FM4": int.from_bytes(data[6:8], 'little') / 100.0 * 60.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2A7 and len(data) == 6:
|
||||||
|
self.latest_data[1].update({
|
||||||
|
"PS1": int.from_bytes(data[0:2], 'little') / 1000.0,
|
||||||
|
"PS2": int.from_bytes(data[2:4], 'little') / 1000.0,
|
||||||
|
"PS3": int.from_bytes(data[4:6], 'little') / 1000.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2A8 and len(data) >= 8:
|
||||||
|
self.latest_data[1].update({
|
||||||
|
"MV02_sp": int.from_bytes(data[0:2], 'little') / 100.0,
|
||||||
|
"MV03_sp": int.from_bytes(data[2:4], 'little') / 100.0,
|
||||||
|
"MV04_sp": int.from_bytes(data[4:6], 'little') / 100.0,
|
||||||
|
"MV05_sp": int.from_bytes(data[6:8], 'little') / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2A9 and len(data) >= 8:
|
||||||
|
self.latest_data[1].update({
|
||||||
|
"MV06_sp": int.from_bytes(data[0:2], 'little') / 100.0,
|
||||||
|
"MV07_sp": int.from_bytes(data[2:4], 'little') / 100.0,
|
||||||
|
"MV08_sp": int.from_bytes(data[4:6], 'little') / 100.0,
|
||||||
|
"Pump_sp": int.from_bytes(data[6:8], 'little') / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2AA and len(data) >= 7:
|
||||||
|
data = list(data)
|
||||||
|
self.latest_data[1].update({
|
||||||
|
"MV02": 100 * data[0] / 255,
|
||||||
|
"MV03": 100 * data[1] / 255,
|
||||||
|
"MV04": 100 * data[2] / 255,
|
||||||
|
"MV05": 100 * data[3] / 255,
|
||||||
|
"MV06": 100 * data[4] / 255,
|
||||||
|
"MV07": 100 * data[5] / 255,
|
||||||
|
"MV08": 100 * data[6] / 255,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2AB and len(data) >= 7:
|
||||||
|
self.latest_data[1].update({
|
||||||
|
"PU1_STATE" : data[0],
|
||||||
|
"Conductivity_Feed" : int.from_bytes(data[1:3], 'little') / 100.0,
|
||||||
|
"Conductivity_Permeate": int.from_bytes(data[3:5], 'little') / 100.0,
|
||||||
|
"Conductivity_Product" : int.from_bytes(data[5:7], 'little') / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
# ========== PU2 COB-IDs ==========
|
||||||
|
elif cob_id == 0x2AD and len(data) >= 8:
|
||||||
|
self.latest_data[2].update({
|
||||||
|
"FM1": int.from_bytes(data[0:2], 'little') / 100.0 * 60.0,
|
||||||
|
"FM2": int.from_bytes(data[2:4], 'little') / 100.0 * 60.0,
|
||||||
|
"FM3": int.from_bytes(data[4:6], 'little') / 100.0 * 60.0,
|
||||||
|
"FM4": int.from_bytes(data[6:8], 'little') / 100.0 * 60.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2AE and len(data) == 6:
|
||||||
|
self.latest_data[2].update({
|
||||||
|
"PS1": int.from_bytes(data[0:2], 'little') / 1000.0,
|
||||||
|
"PS2": int.from_bytes(data[2:4], 'little') / 1000.0,
|
||||||
|
"PS3": int.from_bytes(data[4:6], 'little') / 1000.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2AF and len(data) >= 8:
|
||||||
|
self.latest_data[2].update({
|
||||||
|
"MV02_sp": int.from_bytes(data[0:2], 'little') / 100.0,
|
||||||
|
"MV03_sp": int.from_bytes(data[2:4], 'little') / 100.0,
|
||||||
|
"MV04_sp": int.from_bytes(data[4:6], 'little') / 100.0,
|
||||||
|
"MV05_sp": int.from_bytes(data[6:8], 'little') / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2B0 and len(data) >= 8:
|
||||||
|
self.latest_data[2].update({
|
||||||
|
"MV06_sp": int.from_bytes(data[0:2], 'little') / 100.0,
|
||||||
|
"MV07_sp": int.from_bytes(data[2:4], 'little') / 100.0,
|
||||||
|
"MV08_sp": int.from_bytes(data[4:6], 'little') / 100.0,
|
||||||
|
"Qdrain_sp": int.from_bytes(data[6:8], 'little') / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2B1 and len(data) >= 7:
|
||||||
|
data = list(data)
|
||||||
|
self.latest_data[2].update({
|
||||||
|
"MV02": 100 * data[0] / 255,
|
||||||
|
"MV03": 100 * data[1] / 255,
|
||||||
|
"MV04": 100 * data[2] / 255,
|
||||||
|
"MV05": 100 * data[3] / 255,
|
||||||
|
"MV06": 100 * data[4] / 255,
|
||||||
|
"MV07": 100 * data[5] / 255,
|
||||||
|
"MV08": 100 * data[6] / 255,
|
||||||
|
})
|
||||||
|
|
||||||
|
# elif cob_id == 0x1B9 and len(data) >= 6:
|
||||||
|
# self.latest_data[1].update({
|
||||||
|
# "Conductivity_Feed": int.from_bytes(data[0:2], 'little'),
|
||||||
|
# "Conductivity_Permeate": int.from_bytes(data[2:4], 'little'),
|
||||||
|
# "Conductivity_Product": int.from_bytes(data[4:6], 'little'),
|
||||||
|
# })
|
||||||
|
|
||||||
|
# elif cob_id == 0x1BA and len(data) >= 6:
|
||||||
|
# self.latest_data[1].update({
|
||||||
|
# "Temperature_Feed": int.from_bytes(data[0:2], 'little'),
|
||||||
|
# "Temperature_Permeate": int.from_bytes(data[2:4], 'little'),
|
||||||
|
# "Temperature_Product": int.from_bytes(data[4:6], 'little'),
|
||||||
|
# })
|
||||||
|
|
||||||
|
# elif cob_id == 0x2B2 and len(data) >= 1:
|
||||||
|
# self.latest_data[2]["PU2_STATE"] = data[0]
|
||||||
|
|
||||||
|
elif cob_id == 0x2B2 and len(data) >= 7:
|
||||||
|
self.latest_data[2].update({
|
||||||
|
"PU2_STATE" : data[0],
|
||||||
|
"Conductivity_Feed" : int.from_bytes(data[1:3], 'little') / 100.0,
|
||||||
|
"Conductivity_Permeate": int.from_bytes(data[3:5], 'little') / 100.0,
|
||||||
|
"Conductivity_Product" : int.from_bytes(data[5:7], 'little') / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
# ========== Docking Parameters ==========
|
||||||
|
elif cob_id == 0x2AC and len(data) >= 8:
|
||||||
|
self.latest_data[0].update({
|
||||||
|
"Ploop_sp": int.from_bytes(data[0:2], 'little') / 1.0,
|
||||||
|
"Pdilute_sp": int.from_bytes(data[2:4], 'little') / 1.0,
|
||||||
|
"Qdrain_sp": int.from_bytes(data[4:6], 'little') / 1.0,
|
||||||
|
"TankLevel": int.from_bytes(data[6:8], 'little') / 1.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
elif cob_id == 0x2B3 and len(data) >= 8:
|
||||||
|
self.latest_data[0].update({
|
||||||
|
"Inlet_flow": int.from_bytes(data[0:2], 'little') / 10.0,
|
||||||
|
"Outlet_flow": int.from_bytes(data[2:4], 'little') / 10.0,
|
||||||
|
|
||||||
|
"Pressure_perm": int.from_bytes(data[4:6], 'little') / 1000.0,
|
||||||
|
"Pressure_ro": int.from_bytes(data[6:8], 'little') / 1000.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
# # ========== PU1 DRIFT CHECK ==========
|
||||||
|
# if cob_id in (0x2A6, 0x2A8): # FM1 or MV03_sp updates for PU1
|
||||||
|
# mv03_sp = self.latest_data[1].get("MV03_sp")
|
||||||
|
# qdrain = self.latest_data[1].get("FM1")
|
||||||
|
# if mv03_sp is not None and qdrain is not None:
|
||||||
|
# if mv03_sp <= 0 or qdrain <= 0:
|
||||||
|
# print(f"🔇 Skipping PU1 drift check (idle) → MV03_sp: {mv03_sp:.2f}, Qdrain: {qdrain:.2f}")
|
||||||
|
# elif detect_mv03_drift(mv03_sp, qdrain):
|
||||||
|
# print(f"⚠️ Drift detected on PU1 → MV03_sp: {mv03_sp:.2f} vs Qdrain: {qdrain:.2f}")
|
||||||
|
# else:
|
||||||
|
# print(f"✅ No drift on PU1 → MV03_sp: {mv03_sp:.2f} vs Qdrain: {qdrain:.2f}")
|
||||||
|
|
||||||
|
# # ========== PU2 DRIFT CHECK ==========
|
||||||
|
# if cob_id in (0x2AD, 0x2AF): # FM1 or MV03_sp updates for PU2
|
||||||
|
# mv03_sp = self.latest_data[2].get("MV03_sp")
|
||||||
|
# qdrain = self.latest_data[2].get("FM1")
|
||||||
|
# if mv03_sp is not None and qdrain is not None:
|
||||||
|
# if mv03_sp <= 0 or qdrain <= 0:
|
||||||
|
# print(f"🔇 Skipping PU2 drift check (idle) → MV03_sp: {mv03_sp:.2f}, Qdrain: {qdrain:.2f}")
|
||||||
|
# elif detect_mv03_drift(mv03_sp, qdrain):
|
||||||
|
# print(f"⚠️ Drift detected on PU2 → MV03_sp: {mv03_sp:.2f} vs Qdrain: {qdrain:.2f}")
|
||||||
|
# else:
|
||||||
|
# print(f"✅ No drift on PU2 → MV03_sp: {mv03_sp:.2f} vs Qdrain: {qdrain:.2f}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[TPDO PARSE ERROR] {e}")
|
||||||
|
|
||||||
|
def get_latest_data(self, pu_number: int):
|
||||||
|
"""
|
||||||
|
Retrieve the latest real-time data for the given PU.
|
||||||
|
|
||||||
|
:param pu_number: 1 or 2
|
||||||
|
:return: Dictionary of flow, pressure, valve data
|
||||||
|
"""
|
||||||
|
with self.lock:
|
||||||
|
return self.latest_data.get(pu_number, {}).copy()
|
||||||
|
|
||||||
|
def read_current_state(self, pu_number: int):
|
||||||
|
"""
|
||||||
|
Get the system mode (decoded string) of the given PU.
|
||||||
|
|
||||||
|
:param pu_number: 1 or 2
|
||||||
|
:return: State name or "Offline"
|
||||||
|
"""
|
||||||
|
state_val = self.latest_data.get(pu_number, {}).get(f"PU{pu_number}_STATE")
|
||||||
|
return self.decode_state(state_val) if state_val is not None else "Offline"
|
||||||
|
|
||||||
|
def decode_state(self, state_val: int) -> str:
|
||||||
|
"""
|
||||||
|
Convert system state integer to human-readable label.
|
||||||
|
|
||||||
|
:param state_val: Integer value from TPDO
|
||||||
|
:return: String state name
|
||||||
|
"""
|
||||||
|
state_map = {
|
||||||
|
0: "SYSTEM_MODE_INIT",
|
||||||
|
1: "SYSTEM_MODE_OFF",
|
||||||
|
2: "SYSTEM_MODE_READY",
|
||||||
|
3: "SYSTEM_MODE_PRODUCTION",
|
||||||
|
4: "SYSTEM_MODE_LOW_LOOP_PRESSURE",
|
||||||
|
5: "SYSTEM_MODE_LOOP_CLEANING",
|
||||||
|
6: "SYSTEM_MODE_HEATING_RO",
|
||||||
|
7: "SYSTEM_MODE_RINSING_RO",
|
||||||
|
8: "SYSTEM_MODE_HEATING_EDI",
|
||||||
|
9: "SYSTEM_MODE_COOLING_EDI",
|
||||||
|
10: "SYSTEM_MODE_RO_FLUSH",
|
||||||
|
11: "SYSTEM_MODE_RO_RINSE",
|
||||||
|
12: "SYSTEM_MODE_EDI_RINSE",
|
||||||
|
15: "SYSTEM_MODE_FAIL_SAFE",
|
||||||
|
16: "SYSTEM_MODE_FIRST_FLUSH",
|
||||||
|
255: "SYSTEM_MODE_DEFAULT"
|
||||||
|
}
|
||||||
|
return state_map.get(state_val, f"UNKNOWN({state_val})")
|
||||||
|
|
||||||
|
def send_state_command(self, state: str, pu_number: int, ploop_setpoint: float, qperm_setpoint : float): # TODO : use qperm_setpoint
|
||||||
|
"""
|
||||||
|
Send the PU state and pressure loop setpoint to the master node.
|
||||||
|
|
||||||
|
:param state: State string (e.g., "PRODUCTION")
|
||||||
|
:param pu_number: PU1 or PU2
|
||||||
|
:param ploop_setpoint: Float setpoint in bar (will be scaled)
|
||||||
|
"""
|
||||||
|
if not self.connected:
|
||||||
|
raise RuntimeError("CAN not connected")
|
||||||
|
|
||||||
|
state_map = {
|
||||||
|
"IDLE": 1,
|
||||||
|
"PRE-PRODUCTION": 2,
|
||||||
|
"PRODUCTION": 3,
|
||||||
|
"MAINTENANCE": 8,
|
||||||
|
"EMERGENCY_STOP": 9,
|
||||||
|
"FIRST_START": 10
|
||||||
|
}
|
||||||
|
|
||||||
|
if state not in state_map:
|
||||||
|
raise ValueError(f"Invalid state: {state}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
master_node = self.nodes.get(0)
|
||||||
|
if master_node is None:
|
||||||
|
raise ValueError("Master node not connected")
|
||||||
|
|
||||||
|
state_index = 0x3000
|
||||||
|
setpoint_index = 0x3001
|
||||||
|
|
||||||
|
print(f"[DEBUG] Writing state {state_map[state]} to master OD 0x{state_index:04X}:{pu_number:02X}")
|
||||||
|
master_node.sdo[state_index][pu_number].raw = state_map[state] & 0xFF
|
||||||
|
|
||||||
|
print(f"[DEBUG] Writing ploop_setpoint {ploop_setpoint} to master OD 0x{setpoint_index:04X}:{pu_number:02X}")
|
||||||
|
master_node.sdo[setpoint_index][1].raw = int(ploop_setpoint * 100)
|
||||||
|
|
||||||
|
print(f"[DEBUG] Writing qperm_setpoint {qperm_setpoint} to master OD 0x{setpoint_index:04X}:{pu_number:02X}")
|
||||||
|
master_node.sdo[setpoint_index][5].raw = int(qperm_setpoint)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[MASTER SDO WRITE ERROR] PU{pu_number}: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def send_thermal_loop_cleaning(self, mode: str, pu_number: int):
|
||||||
|
"""
|
||||||
|
Activate or deactivate thermal loop cleaning mode.
|
||||||
|
|
||||||
|
:param mode: "IDLE" or "ACTIVE"
|
||||||
|
:param pu_number: PU1 or PU2
|
||||||
|
"""
|
||||||
|
if not self.connected:
|
||||||
|
raise RuntimeError("CAN not connected")
|
||||||
|
|
||||||
|
mode_map = {
|
||||||
|
"IDLE": 0,
|
||||||
|
"ACTIVE": 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode not in mode_map:
|
||||||
|
raise ValueError(f"Invalid thermal loop mode: {mode}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
node = self.nodes.get(pu_number)
|
||||||
|
if node is None:
|
||||||
|
raise ValueError(f"PU{pu_number} not connected")
|
||||||
|
|
||||||
|
print(f"[DEBUG] Sending thermal loop mode {mode} to 0x2024:{pu_number}")
|
||||||
|
node.sdo[0x2024][pu_number].raw = mode_map[mode]
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[THERMAL LOOP ERROR] PU{pu_number}: {e}")
|
||||||
|
raise
|
||||||
6357
hardware/eds_file/dockingBoard_0.eds
Normal file
6357
hardware/eds_file/dockingBoard_0.eds
Normal file
File diff suppressed because it is too large
Load Diff
30
hardware/patient_skid.py
Normal file
30
hardware/patient_skid.py
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
|
||||||
|
def handle_patient_skid_for_idle() -> None:
|
||||||
|
"""Send the special commands to patient skid when entering IDLE."""
|
||||||
|
try:
|
||||||
|
url = "http://192.168.1.28:8000/stop_test"
|
||||||
|
response = httpx.get(url, timeout=1.0)
|
||||||
|
logging.info(f"Stopping test on Patient Skid: {response.status_code}")
|
||||||
|
|
||||||
|
url = "http://192.168.1.28:8000/close_valves"
|
||||||
|
response = httpx.get(url, timeout=1.0)
|
||||||
|
logging.info(f"Closing valves on Patient Skid: {response.status_code}")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error handling patient skid for IDLE: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def set_patient_skid_users(count: int = 0):
|
||||||
|
try:
|
||||||
|
url = f"http://192.168.1.28:8000/set_users/{count}"
|
||||||
|
response = httpx.get(url, timeout=5.0)
|
||||||
|
|
||||||
|
response_2 = httpx.get("http://192.168.1.28:8000/start_defined_test", timeout=5.0)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
return {"status": "success", "detail": response.json()}
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Remote server error: {response.text}")
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Request to external server failed: {str(e)}")
|
||||||
522
main.py
522
main.py
|
|
@ -1,47 +1,44 @@
|
||||||
from fastapi import FastAPI, HTTPException, Query, Form, Depends
|
from fastapi import FastAPI, HTTPException, Form
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
|
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from fastapi import Request, APIRouter
|
from fastapi import Request, APIRouter
|
||||||
import platform
|
import platform
|
||||||
from fastapi.templating import (
|
from fastapi.templating import Jinja2Templates
|
||||||
Jinja2Templates,
|
|
||||||
) # pip install fastapi uvicorn jinja2 python-multipart passlib
|
|
||||||
from starlette.middleware.sessions import SessionMiddleware
|
from starlette.middleware.sessions import SessionMiddleware
|
||||||
from starlette.exceptions import HTTPException as StarletteHTTPException
|
from starlette.exceptions import HTTPException as StarletteHTTPException
|
||||||
from starlette.status import HTTP_302_FOUND
|
from starlette.status import HTTP_302_FOUND
|
||||||
import json
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional, Dict, Any
|
from typing import Dict, Any
|
||||||
from fastapi import Query
|
from fastapi import Query
|
||||||
import asyncio
|
import asyncio
|
||||||
import datetime
|
import datetime
|
||||||
from valveBackend import ValveBackend
|
|
||||||
import csv
|
import csv
|
||||||
from collections import deque
|
from collections import deque
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import httpx
|
from hardware.patient_skid import handle_patient_skid_for_idle, set_patient_skid_users
|
||||||
|
|
||||||
|
from serial_manager import SerialConfig, SerialStore, SerialReader
|
||||||
|
from protocol_decoder import decode_frames
|
||||||
|
from serial_csv_logger import SerialCsvLogger # <-- CSV logger
|
||||||
|
|
||||||
if platform.system() in ["Darwin"]: # macOS or Windows
|
if platform.system() in ["Darwin"]: # macOS or Windows
|
||||||
from MockCAN import CANBackend
|
from MockCAN import CANBackend
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
else:
|
||||||
|
from hardware.classCAN import CANBackend # Your real backend
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
else:
|
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
|
||||||
from classCAN import CANBackend # Your real backend
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.ERROR)
|
|
||||||
|
|
||||||
app = FastAPI()
|
app = FastAPI()
|
||||||
app.add_middleware(SessionMiddleware, secret_key="your_super_secret_key")
|
app.add_middleware(SessionMiddleware, secret_key="your_super_secret_key")
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
templates = Jinja2Templates(directory="templates")
|
templates = Jinja2Templates(directory="templates")
|
||||||
can_backend = CANBackend()
|
can_backend = CANBackend()
|
||||||
valve_backend = ValveBackend(
|
|
||||||
eds_file="/home/hmi/Desktop/HMI/eds_file/inletvalveboard.eds"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Serve static files (HTML, JS, CSS)
|
# Serve static files (HTML, JS, CSS)
|
||||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||||
|
|
@ -52,10 +49,29 @@ latest_data: Dict[str, Any] = {
|
||||||
"PU_1": None,
|
"PU_1": None,
|
||||||
"PU_2": None,
|
"PU_2": None,
|
||||||
"PU_3": None,
|
"PU_3": None,
|
||||||
|
"DS": None,
|
||||||
"PatientSkid": {"QSkid": 0.0},
|
"PatientSkid": {"QSkid": 0.0},
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_FEED_VALVE = 0.0
|
latest_setpoints: Dict[str, Any] = {
|
||||||
|
"PU_1": {"Ploop_sp": 0.0, "Qperm_sp": 0.0},
|
||||||
|
"PU_2": {"Ploop_sp": 0.0, "Qperm_sp": 0.0},
|
||||||
|
"PU_3": {"Ploop_sp": 0.0, "Qperm_sp": 0.0},
|
||||||
|
}
|
||||||
|
|
||||||
|
active_PUs: list[int] = []
|
||||||
|
VALID_STATES = {
|
||||||
|
"IDLE",
|
||||||
|
"PRE-PRODUCTION",
|
||||||
|
"PRODUCTION",
|
||||||
|
"FIRST_START",
|
||||||
|
"THERMALLOOPCLEANING",
|
||||||
|
"DISINFECTION",
|
||||||
|
"SLEEP",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Dictionary to hold running tasks
|
||||||
|
tasks: dict[str, asyncio.Task] = {}
|
||||||
|
|
||||||
# RECORDER
|
# RECORDER
|
||||||
recording_flag = False
|
recording_flag = False
|
||||||
|
|
@ -66,27 +82,31 @@ write_buffer = deque()
|
||||||
flush_interval = 1.0 # flush every 1 second
|
flush_interval = 1.0 # flush every 1 second
|
||||||
last_flush_time = datetime.datetime.now()
|
last_flush_time = datetime.datetime.now()
|
||||||
|
|
||||||
|
# ---- Serial intake globals ----
|
||||||
|
serial_store = SerialStore(capacity=5000)
|
||||||
|
serial_reader: SerialReader | None = None
|
||||||
|
serial_csv: SerialCsvLogger | None = None # <-- added
|
||||||
|
|
||||||
## LOGGING
|
## LOGGING
|
||||||
|
def format_PU_data(data):
|
||||||
|
|
||||||
def format_data(data):
|
|
||||||
return {
|
return {
|
||||||
"timestamp": datetime.datetime.now().isoformat(),
|
"timestamp": datetime.datetime.now().isoformat(),
|
||||||
"Qperm": np.round(data.get("FM2", 0.0), 1),
|
"Qperm": np.round(data.get("FM2", 0.0), 1),
|
||||||
"Qdilute": np.round(data.get("FM1", 0.0), 1),
|
"Qdilute": np.round(data.get("FM1", 0.0), 1),
|
||||||
"Qdrain": np.round(data.get("FM4", 0.0), 1),
|
"Qdrain": np.round(data.get("FM4", 0.0), 1),
|
||||||
"Qrecirc": np.round(data.get("FM3", 0.0), 1),
|
"Qrecirc": np.round(data.get("FM3", 0.0), 1),
|
||||||
"QdrainEDI": np.round(data.get("FM2", 0.0), 1)- np.round(data.get("FM1", 0.0), 1),
|
"QdrainEDI": np.round(data.get("FM2", 0.0), 1) - np.round(data.get("FM1", 0.0), 1),
|
||||||
"Pro": np.round(data.get("PS2", 0.0), 2),
|
"Pro": np.round(data.get("PS2", 0.0), 2),
|
||||||
"Pdilute": np.round(data.get("PS3", 0.0), 2),
|
"Pdilute": np.round(data.get("PS3", 0.0), 2),
|
||||||
"Pretentate": np.round(data.get("PS1", 0.0), 2),
|
"Pretentate": np.round(data.get("PS1", 0.0), 2),
|
||||||
"Conductivity": np.round(data.get("Cond", 0.0), 1),
|
"Cfeed": data.get("Conductivity_Feed", 0.0),
|
||||||
|
"Cperm": data.get("Conductivity_Permeate", 0.0),
|
||||||
|
"Cdilute": data.get("Conductivity_Product", 0.0),
|
||||||
"MV02": np.round(data.get("MV02", 0.0), 1),
|
"MV02": np.round(data.get("MV02", 0.0), 1),
|
||||||
"MV02_sp": np.round(data.get("MV02_sp", 0.0), 1),
|
"MV02_sp": np.round(data.get("MV02_sp", 0.0), 1),
|
||||||
"MV03": np.round(data.get("MV03", 0.0), 1),
|
"MV03": np.round(data.get("MV03", 0.0), 1),
|
||||||
"MV03_sp": np.round(data.get("MV03_sp", 0.0), 1),
|
"MV03_sp": np.round(data.get("MV03_sp", 0.0), 1),
|
||||||
"MV04": np.round(data.get("MV05", 0.0), 1),
|
"MV04": np.round(data.get("MV04", 0.0), 1),
|
||||||
"MV04_sp": np.round(data.get("MV04_sp", 0.0), 1),
|
"MV04_sp": np.round(data.get("MV04_sp", 0.0), 1),
|
||||||
"MV05": np.round(data.get("MV05", 0.0), 1),
|
"MV05": np.round(data.get("MV05", 0.0), 1),
|
||||||
"MV05_sp": np.round(data.get("MV05_sp", 0.0), 1),
|
"MV05_sp": np.round(data.get("MV05_sp", 0.0), 1),
|
||||||
|
|
@ -96,12 +116,34 @@ def format_data(data):
|
||||||
"MV07_sp": np.round(data.get("MV07_sp", 0.0), 1),
|
"MV07_sp": np.round(data.get("MV07_sp", 0.0), 1),
|
||||||
"MV08": np.round(data.get("MV08", 0.0), 1),
|
"MV08": np.round(data.get("MV08", 0.0), 1),
|
||||||
"MV08_sp": np.round(data.get("MV08_sp", 0.0), 1),
|
"MV08_sp": np.round(data.get("MV08_sp", 0.0), 1),
|
||||||
|
"Qdrain_sp" : max(60*np.round(data.get("Qdrain_sp", 0.0), 2),350.0),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def format_DS_data(data):
|
||||||
|
q_conso = max(np.round(data.get("Inlet_flow", 0.0), 1) - np.round(data.get("Outlet_flow", 0.0), 1),0)
|
||||||
|
return {
|
||||||
|
"timestamp": datetime.datetime.now().isoformat(),
|
||||||
|
"Qconso": q_conso ,
|
||||||
|
"TankLevel": np.round(data.get("TankLevel", 0.0), 2),
|
||||||
|
"Qinlet": np.round(data.get("Inlet_flow", 0.0), 1),
|
||||||
|
"Qoutlet": np.round(data.get("Outlet_flow", 0.0), 1),
|
||||||
|
}
|
||||||
|
|
||||||
|
## Fetch setpoints
|
||||||
|
def update_setpoints(p_loop_setpoint : float, q_perm_setpoint : float, pu : int):
|
||||||
|
global latest_setpoints
|
||||||
|
pu_key = "PU_"+str(pu)
|
||||||
|
latest_setpoints[pu_key]["Ploop_sp"] = p_loop_setpoint
|
||||||
|
latest_setpoints[pu_key]["Qperm_sp"] = q_perm_setpoint
|
||||||
|
|
||||||
|
def format_setpoints(pu: int): # THis is a bit convoluted to pass from an object to another but it works
|
||||||
|
global latest_setpoints, latest_data
|
||||||
|
pu_key = "PU_" + str(pu)
|
||||||
|
latest_data[pu_key]["Ploop_sp"] = latest_setpoints[pu_key]["Ploop_sp"]
|
||||||
|
latest_data[pu_key]["Qperm_sp"] = latest_setpoints[pu_key]["Qperm_sp"]
|
||||||
|
|
||||||
|
|
||||||
# CREDENTIALS
|
# CREDENTIALS
|
||||||
|
|
||||||
# Load users from JSON file at startup
|
|
||||||
CREDENTIAL_PATH = Path("credentials.json")
|
CREDENTIAL_PATH = Path("credentials.json")
|
||||||
if CREDENTIAL_PATH.exists():
|
if CREDENTIAL_PATH.exists():
|
||||||
with CREDENTIAL_PATH.open("r") as f:
|
with CREDENTIAL_PATH.open("r") as f:
|
||||||
|
|
@ -112,7 +154,6 @@ else:
|
||||||
USERNAME = CREDENTIALS["username"]
|
USERNAME = CREDENTIALS["username"]
|
||||||
PASSWORD = CREDENTIALS["password"]
|
PASSWORD = CREDENTIALS["password"]
|
||||||
|
|
||||||
|
|
||||||
# ======== LOGIN & SESSION HANDLING ========
|
# ======== LOGIN & SESSION HANDLING ========
|
||||||
def require_login(request: Request):
|
def require_login(request: Request):
|
||||||
user = request.session.get("user")
|
user = request.session.get("user")
|
||||||
|
|
@ -121,12 +162,10 @@ def require_login(request: Request):
|
||||||
raise StarletteHTTPException(status_code=302, detail="Redirect to login")
|
raise StarletteHTTPException(status_code=302, detail="Redirect to login")
|
||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
@app.get("/", response_class=HTMLResponse)
|
@app.get("/", response_class=HTMLResponse)
|
||||||
def login_form(request: Request):
|
def login_form(request: Request):
|
||||||
return templates.TemplateResponse("login.html", {"request": request})
|
return templates.TemplateResponse("login.html", {"request": request})
|
||||||
|
|
||||||
|
|
||||||
@app.post("/login")
|
@app.post("/login")
|
||||||
def login(request: Request, username: str = Form(...), password: str = Form(...)):
|
def login(request: Request, username: str = Form(...), password: str = Form(...)):
|
||||||
if username == USERNAME and password == PASSWORD:
|
if username == USERNAME and password == PASSWORD:
|
||||||
|
|
@ -136,160 +175,215 @@ def login(request: Request, username: str = Form(...), password: str = Form(...)
|
||||||
"login.html", {"request": request, "error": "Invalid credentials.json"}
|
"login.html", {"request": request, "error": "Invalid credentials.json"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/logout")
|
@app.get("/logout")
|
||||||
def logout(request: Request):
|
def logout(request: Request):
|
||||||
request.session.clear()
|
request.session.clear()
|
||||||
return RedirectResponse("/", status_code=HTTP_302_FOUND)
|
return RedirectResponse("/", status_code=HTTP_302_FOUND)
|
||||||
|
|
||||||
|
# ======== PROTECTED INTERFACE / STARTUP-SHUTDOWN ========
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
# ----- CSV logger -----
|
||||||
|
global serial_csv
|
||||||
|
serial_csv = SerialCsvLogger(out_dir="serial_logs", rotate_daily=True)
|
||||||
|
|
||||||
# ======== PROTECTED INTERFACE ========
|
# ----- start the serial reader -----
|
||||||
|
global serial_reader
|
||||||
|
cfg = SerialConfig(
|
||||||
|
port=os.getenv("SERIAL_PORT", "/dev/ttyUSB0"),
|
||||||
|
baudrate=int(os.getenv("SERIAL_BAUD", "115200")),
|
||||||
|
csv_log_path=None, # disable the generic CSV inside reader; use segregated logger instead
|
||||||
|
ring_capacity=int(os.getenv("SERIAL_RING", "5000")),
|
||||||
|
)
|
||||||
|
serial_reader = SerialReader(
|
||||||
|
cfg,
|
||||||
|
serial_store,
|
||||||
|
decoder=decode_frames,
|
||||||
|
on_message=(lambda p: serial_csv.log(p)) # write CSV per message type
|
||||||
|
)
|
||||||
|
serial_reader.start()
|
||||||
|
|
||||||
|
# ----- your existing tasks -----
|
||||||
|
asyncio.create_task(update_latest_data())
|
||||||
|
asyncio.create_task(update_latest_flow())
|
||||||
|
|
||||||
|
@app.on_event("shutdown")
|
||||||
|
def _serial_stop():
|
||||||
|
if serial_reader:
|
||||||
|
serial_reader.stop()
|
||||||
|
if serial_csv:
|
||||||
|
serial_csv.close()
|
||||||
|
|
||||||
|
# ======== PAGES ========
|
||||||
@app.get("/control", response_class=HTMLResponse)
|
@app.get("/control", response_class=HTMLResponse)
|
||||||
def control_page(request: Request):
|
def control_page(request: Request):
|
||||||
|
can_backend.connect()
|
||||||
if request.session.get("user") != USERNAME:
|
if request.session.get("user") != USERNAME:
|
||||||
return RedirectResponse("/", status_code=HTTP_302_FOUND)
|
return RedirectResponse("/", status_code=HTTP_302_FOUND)
|
||||||
return templates.TemplateResponse("control.html", {"request": request})
|
return templates.TemplateResponse("control.html", {"request": request})
|
||||||
|
|
||||||
|
@app.get("/monitor-DS", response_class=HTMLResponse)
|
||||||
@app.get("/monitor-page", response_class=HTMLResponse)
|
|
||||||
def monitor_page(request: Request):
|
def monitor_page(request: Request):
|
||||||
with open("static/monitor.html") as f:
|
with open("static/monitor_DS.html") as f:
|
||||||
return HTMLResponse(f.read())
|
return HTMLResponse(f.read())
|
||||||
|
|
||||||
@app.get("/multi-monitor-page", response_class=HTMLResponse)
|
@app.get("/monitor-PU", response_class=HTMLResponse)
|
||||||
|
def monitor_page(request: Request):
|
||||||
|
with open("static/monitor_PU.html") as f:
|
||||||
|
return HTMLResponse(f.read())
|
||||||
|
|
||||||
|
@app.get("/multi-monitor-PU", response_class=HTMLResponse)
|
||||||
def monitor_page(request: Request):
|
def monitor_page(request: Request):
|
||||||
with open("static/multi_pu_dashboard.html") as f:
|
with open("static/multi_pu_dashboard.html") as f:
|
||||||
return HTMLResponse(f.read())
|
return HTMLResponse(f.read())
|
||||||
|
|
||||||
|
# ======== SERIAL API ========
|
||||||
|
@app.get("/serial/messages")
|
||||||
|
def serial_messages(n: int = 100):
|
||||||
|
return serial_store.latest(min(max(n, 1), 1000))
|
||||||
|
|
||||||
|
@app.get("/serial/stats")
|
||||||
|
def serial_stats():
|
||||||
|
return serial_store.stats()
|
||||||
|
|
||||||
|
@app.get("/serial/snapshot")
|
||||||
|
def serial_snapshot():
|
||||||
|
return serial_store.latest_by_id()
|
||||||
|
|
||||||
# ======== CAN + BACKEND ROUTES ========
|
# ======== CAN + BACKEND ROUTES ========
|
||||||
|
|
||||||
|
|
||||||
@app.post("/connect_toggle")
|
@app.post("/connect_toggle")
|
||||||
def connect_toggle():
|
def connect_toggle():
|
||||||
logging.info("Toggling CAN connection...")
|
logging.info(f"Toggling CAN connection, CAN is {can_backend.connected}")
|
||||||
if can_backend.connected:
|
if can_backend.connected:
|
||||||
can_backend.shutdown()
|
can_backend.shutdown()
|
||||||
|
logging.info("Shutting down CAN connection...")
|
||||||
return {"connected": False}
|
return {"connected": False}
|
||||||
else:
|
else:
|
||||||
success = can_backend.connect()
|
success = can_backend.connect()
|
||||||
try:
|
|
||||||
valve_backend.connect()
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Connection error : {e}")
|
|
||||||
|
|
||||||
if not success:
|
if not success:
|
||||||
raise HTTPException(status_code=500, detail="Connection failed.")
|
raise HTTPException(status_code=500, detail="Connection failed.")
|
||||||
return {"connected": True}
|
return {"connected": can_backend.connected}
|
||||||
|
|
||||||
|
@app.get("/is_connected")
|
||||||
|
def is_connected():
|
||||||
|
return {"connected": can_backend.connected}
|
||||||
|
|
||||||
@app.post("/command/{state}/pu/{pu_number}")
|
# PU CONTROL
|
||||||
def send_command(state: str, pu_number: int, ploop_setpoint: float = Query(...)):
|
|
||||||
global DEFAULT_FEED_VALVE
|
|
||||||
VALID_STATES = {
|
|
||||||
"IDLE",
|
|
||||||
"PRE-PRODUCTION",
|
|
||||||
"PRODUCTION",
|
|
||||||
"FIRST_START",
|
|
||||||
"THERMALLOOPCLEANING",
|
|
||||||
"DISINFECTION",
|
|
||||||
"SLEEP",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
def validate_state(state: str) -> str:
|
||||||
|
"""Normalize and validate the requested state."""
|
||||||
state = state.upper()
|
state = state.upper()
|
||||||
|
|
||||||
if state not in VALID_STATES:
|
if state not in VALID_STATES:
|
||||||
raise HTTPException(status_code=400, detail=f"Invalid state '{state}'")
|
raise HTTPException(status_code=400, detail=f"Invalid state '{state}'")
|
||||||
|
return state
|
||||||
|
|
||||||
|
|
||||||
|
def expand_pu_number(pu_number: int) -> list[int]:
|
||||||
|
"""Temporary rule: if PU = 3 → run on [1, 2]."""
|
||||||
|
return [pu_number] if pu_number != 3 else [1, 2]
|
||||||
|
|
||||||
|
|
||||||
|
def send_command_to_pu(
|
||||||
|
pu: int, state: str, ploop_setpoint: float, qperm_setpoint: float
|
||||||
|
) -> dict:
|
||||||
|
"""Send a state command + update setpoints for one PU."""
|
||||||
|
state = validate_state(state)
|
||||||
|
|
||||||
|
if state == "IDLE":
|
||||||
|
handle_patient_skid_for_idle()
|
||||||
|
update_setpoints(ploop_setpoint, qperm_setpoint, pu)
|
||||||
|
can_backend.send_state_command(state, pu, ploop_setpoint, qperm_setpoint)
|
||||||
|
current_state = can_backend.read_current_state(pu)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"pu": pu,
|
||||||
|
"command": state,
|
||||||
|
"ploop_setpoint": ploop_setpoint,
|
||||||
|
"qperm_setpoint": qperm_setpoint,
|
||||||
|
"current_state": current_state,
|
||||||
|
}
|
||||||
|
|
||||||
|
@app.post("/command/{state}/pu/{pu_number}")
|
||||||
|
def send_command_endpoint(
|
||||||
|
state: str,
|
||||||
|
pu_number: int,
|
||||||
|
ploop_setpoint: float = Query(...),
|
||||||
|
qperm_setpoint: float = Query(...),
|
||||||
|
):
|
||||||
logging.info(f"Sending state '{state}' to PU {pu_number}")
|
logging.info(f"Sending state '{state}' to PU {pu_number}")
|
||||||
|
|
||||||
if state == "PRE-PRODUCTION":
|
pus = expand_pu_number(pu_number)
|
||||||
valve_backend.send_command(70)
|
|
||||||
elif "IDLE":
|
|
||||||
valve_backend.send_command(DEFAULT_FEED_VALVE)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
can_backend.send_state_command(state, pu_number, ploop_setpoint)
|
results = []
|
||||||
current_state = can_backend.read_current_state(pu_number)
|
for pu in pus:
|
||||||
return {
|
result = send_command_to_pu(pu, state, ploop_setpoint, qperm_setpoint)
|
||||||
"status": "success",
|
results.append(result)
|
||||||
"command": state,
|
|
||||||
"pu": pu_number,
|
return {"status": "success", "results": results}
|
||||||
"ploop_setpoint": ploop_setpoint,
|
|
||||||
"current_state": current_state,
|
|
||||||
}
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
logging.error(str(e))
|
||||||
raise HTTPException(status_code=500, detail=str(e))
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
## MONITORING
|
||||||
@app.get("/api/pu_status")
|
@app.get("/api/pu_status")
|
||||||
def get_pu_status():
|
def get_pu_status():
|
||||||
|
global active_PUs, latest_setpoints
|
||||||
states = {
|
states = {
|
||||||
"PU1": can_backend.read_current_state(1),
|
"PU1": can_backend.read_current_state(1),
|
||||||
"PU2": can_backend.read_current_state(2),
|
"PU2": can_backend.read_current_state(2),
|
||||||
"PU3": can_backend.read_current_state(3),
|
"PU3": can_backend.read_current_state(3),
|
||||||
}
|
}
|
||||||
logging.info(f"[PU STATUS] {states}")
|
logging.debug(f"[PU STATUS] {states}")
|
||||||
|
|
||||||
|
if states["PU1"] == "SYSTEM_MODE_READY":
|
||||||
|
send_command_to_pu(state="PRODUCTION", pu_number = 1, ploop_setpoint = latest_setpoints["PU_1"]["Ploop_sp"] , qperm_setpoint=latest_setpoints["PU_1"]["Qperm_sp"])
|
||||||
|
if states["PU2"] == "SYSTEM_MODE_READY":
|
||||||
|
send_command_to_pu(state="PRODUCTION", pu_number = 2, ploop_setpoint = latest_setpoints["PU_2"]["Ploop_sp"] , qperm_setpoint=latest_setpoints["PU_2"]["Qperm_sp"])
|
||||||
|
if states["PU3"] == "SYSTEM_MODE_READY":
|
||||||
|
send_command_to_pu(state="PRODUCTION", pu_number = 3, ploop_setpoint = latest_setpoints["PU_3"]["Ploop_sp"] , qperm_setpoint=latest_setpoints["PU_3"]["Qperm_sp"])
|
||||||
|
|
||||||
|
|
||||||
|
active_PUs = [
|
||||||
|
index + 1
|
||||||
|
for index, (pu, status) in enumerate(states.items())
|
||||||
|
if status != "Offline"
|
||||||
|
]
|
||||||
|
logging.debug(f"[ACTIVE PU] {active_PUs}")
|
||||||
|
|
||||||
return JSONResponse(content=states)
|
return JSONResponse(content=states)
|
||||||
|
|
||||||
|
|
||||||
async def update_latest_data():
|
async def update_latest_data():
|
||||||
|
global active_PUs
|
||||||
while True:
|
while True:
|
||||||
for pu in [
|
# DS
|
||||||
1,
|
data = can_backend.get_latest_data(pu_number=0)
|
||||||
2,
|
latest_data["DS"] = format_DS_data(data)
|
||||||
]: # TODO: REPLACE THIS WITH CONNECTED PUs, IS GET PU STATUS SLOW?
|
|
||||||
|
# PUs
|
||||||
|
for pu in active_PUs:
|
||||||
data = can_backend.get_latest_data(pu_number=pu)
|
data = can_backend.get_latest_data(pu_number=pu)
|
||||||
latest_data[f"PU_{pu}"] = format_data(data)
|
latest_data[f"PU_{pu}"] = format_PU_data(data)
|
||||||
current_data = latest_data[f"PU_{pu}"]
|
format_setpoints(pu)
|
||||||
logging.debug(f"[MONITOR BUFFER] PU{pu}: {current_data}")
|
|
||||||
# logging.info(f"[MONITOR BUFFER] latest_data: {latest_data}")
|
logging.debug(f"[MONITOR DS BUFFER] latest_data: {latest_data}")
|
||||||
await asyncio.sleep(0.05)
|
await asyncio.sleep(0.05)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/monitor")
|
@app.get("/monitor")
|
||||||
async def get_monitor_data(pu_number: Optional[float] = Query(None)):
|
async def get_monitor_data():
|
||||||
print(f"pu_number is {pu_number}")
|
return latest_data
|
||||||
if pu_number is not None:
|
|
||||||
return latest_data.get(f"PU_{pu_number}", {})
|
|
||||||
else:
|
|
||||||
# print(latest_data)
|
|
||||||
return latest_data
|
|
||||||
|
|
||||||
|
|
||||||
@app.on_event("startup")
|
|
||||||
async def startup_event():
|
|
||||||
asyncio.create_task(update_latest_data())
|
|
||||||
asyncio.create_task(update_latest_flow())
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/can_status")
|
|
||||||
def can_status():
|
|
||||||
"""Return current CAN connection status."""
|
|
||||||
return {"connected": can_backend.connected}
|
|
||||||
|
|
||||||
|
|
||||||
@app.post("/command/feed_valve")
|
|
||||||
def feedvalve_control(MV01_opening: int = Query(...)):
|
|
||||||
"""Control MV01 feed valve"""
|
|
||||||
global DEFAULT_FEED_VALVE
|
|
||||||
DEFAULT_FEED_VALVE = MV01_opening
|
|
||||||
valve_backend.send_command(MV01_opening)
|
|
||||||
logging.info(f"Feed valve opening to {MV01_opening}")
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# LOCAL RECORDER
|
# LOCAL RECORDER
|
||||||
@app.post("/start_recording")
|
# --- internal helpers (not endpoints) ---
|
||||||
async def start_recording():
|
async def start_recording_internal():
|
||||||
global recording_flag, recording_task, recording_file, recording_writer
|
global recording_flag, recording_task, recording_file, recording_writer
|
||||||
|
|
||||||
if recording_flag:
|
if recording_flag:
|
||||||
raise HTTPException(status_code=400, detail="Already recording.")
|
logging.warning("Recording already in progress.")
|
||||||
|
return None
|
||||||
|
|
||||||
now = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
now = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
filename = f"recording_{now}.csv"
|
filename = f"recording_{now}.csv"
|
||||||
|
|
@ -297,22 +391,29 @@ async def start_recording():
|
||||||
filepath = os.path.join("recordings", filename)
|
filepath = os.path.join("recordings", filename)
|
||||||
|
|
||||||
recording_file = open(filepath, "w", newline="")
|
recording_file = open(filepath, "w", newline="")
|
||||||
fieldnames = ["timestamp", "pu", "QSkid"] + list(format_data({}).keys())
|
fieldnames_common = ["timestamp", "pu", "QSkid"]
|
||||||
|
fieldnames_DS = list(format_DS_data({}).keys())
|
||||||
|
fieldnames_DS.pop(0)
|
||||||
|
fieldnames_PUs = list(format_PU_data({}).keys())
|
||||||
|
fieldnames_PUs.pop(0)
|
||||||
|
|
||||||
|
fieldnames = fieldnames_common + fieldnames_DS + fieldnames_PUs + ["Qperm_sp", "Ploop_sp"]
|
||||||
|
|
||||||
recording_writer = csv.DictWriter(recording_file, fieldnames=fieldnames)
|
recording_writer = csv.DictWriter(recording_file, fieldnames=fieldnames)
|
||||||
recording_writer.writeheader()
|
recording_writer.writeheader()
|
||||||
|
|
||||||
recording_flag = True
|
recording_flag = True
|
||||||
recording_task = asyncio.create_task(record_data_loop())
|
recording_task = asyncio.create_task(record_data_loop())
|
||||||
logging.info(f"[RECORDING STARTED] File: {filepath}")
|
logging.info(f"[RECORDING STARTED] File: {filepath}")
|
||||||
return {"status": "recording started", "file": filename}
|
return filename
|
||||||
|
|
||||||
|
|
||||||
@app.post("/stop_recording")
|
async def stop_recording_internal():
|
||||||
async def stop_recording():
|
|
||||||
global recording_flag, recording_task, recording_file
|
global recording_flag, recording_task, recording_file
|
||||||
|
|
||||||
if not recording_flag:
|
if not recording_flag:
|
||||||
raise HTTPException(status_code=400, detail="Not recording.")
|
logging.warning("No active recording to stop.")
|
||||||
|
return False
|
||||||
|
|
||||||
recording_flag = False
|
recording_flag = False
|
||||||
if recording_task:
|
if recording_task:
|
||||||
|
|
@ -324,8 +425,29 @@ async def stop_recording():
|
||||||
recording_file = None
|
recording_file = None
|
||||||
|
|
||||||
logging.info("[RECORDING STOPPED]")
|
logging.info("[RECORDING STOPPED]")
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
# --- API endpoints ---
|
||||||
|
@app.post("/start_recording")
|
||||||
|
async def start_recording():
|
||||||
|
filename = await start_recording_internal()
|
||||||
|
if not filename:
|
||||||
|
raise HTTPException(status_code=400, detail="Already recording.")
|
||||||
|
return {"status": "recording started", "file": filename}
|
||||||
|
|
||||||
|
@app.post("/stop_recording")
|
||||||
|
async def stop_recording():
|
||||||
|
success = await stop_recording_internal()
|
||||||
|
if not success:
|
||||||
|
raise HTTPException(status_code=400, detail="Not recording.")
|
||||||
return {"status": "recording stopped"}
|
return {"status": "recording stopped"}
|
||||||
|
|
||||||
|
@app.get("/is_recording")
|
||||||
|
async def is_recording():
|
||||||
|
"""Return True if recording is on, False otherwise"""
|
||||||
|
return JSONResponse(content={"recording": recording_flag})
|
||||||
|
|
||||||
|
|
||||||
async def record_data_loop():
|
async def record_data_loop():
|
||||||
global recording_writer, recording_file, write_buffer, last_flush_time
|
global recording_writer, recording_file, write_buffer, last_flush_time
|
||||||
|
|
@ -334,107 +456,151 @@ async def record_data_loop():
|
||||||
timestamp = datetime.datetime.now().isoformat()
|
timestamp = datetime.datetime.now().isoformat()
|
||||||
for pu, data in latest_data.items():
|
for pu, data in latest_data.items():
|
||||||
if data:
|
if data:
|
||||||
row = {
|
row = {"timestamp": timestamp, "pu": pu, **data}
|
||||||
"timestamp": timestamp,
|
|
||||||
"pu": pu,
|
|
||||||
**data
|
|
||||||
}
|
|
||||||
recording_writer.writerow(row)
|
recording_writer.writerow(row)
|
||||||
|
|
||||||
# Flush every flush_interval seconds
|
# Flush every flush_interval seconds
|
||||||
if (
|
if (datetime.datetime.now() - last_flush_time).total_seconds() >= flush_interval:
|
||||||
datetime.datetime.now() - last_flush_time
|
|
||||||
).total_seconds() >= flush_interval:
|
|
||||||
recording_file.flush()
|
recording_file.flush()
|
||||||
last_flush_time = datetime.datetime.now()
|
last_flush_time = datetime.datetime.now()
|
||||||
|
|
||||||
await asyncio.sleep(0.05) # 10 Hz
|
await asyncio.sleep(0.05) # 10 Hz
|
||||||
|
|
||||||
## AUTOMATIC TESTING
|
## AUTOMATIC TESTING
|
||||||
|
async def send_command_with_delay(
|
||||||
async def send_command_with_delay(state: str, pu: int, delay_s: int = 0, ploop_setpoint: float = 0.0):
|
state: str,
|
||||||
|
pu: int,
|
||||||
|
delay_s: int = 0,
|
||||||
|
ploop_setpoint: float = 2.5,
|
||||||
|
qperm_setpoint: float = 1200.0,
|
||||||
|
):
|
||||||
await asyncio.sleep(delay_s)
|
await asyncio.sleep(delay_s)
|
||||||
logging.info(f"[AUTO TEST] Sending {state} to PU{pu} after {delay_s}s")
|
logging.info(f"[AUTO TEST] Sending {state} to PU{pu} after {delay_s}s")
|
||||||
can_backend.send_state_command(state, pu, ploop_setpoint)
|
try:
|
||||||
|
result = send_command_to_pu(pu, state, ploop_setpoint, qperm_setpoint)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"[AUTO TEST] Failed to send {state} to PU{pu}: {e}")
|
||||||
|
return {"status": "error", "detail": str(e)}
|
||||||
|
|
||||||
async def set_patients_with_delay(count: int, delay_s: int):
|
async def set_patients_with_delay(count: int, delay_s: int):
|
||||||
await asyncio.sleep(delay_s)
|
await asyncio.sleep(delay_s)
|
||||||
logging.info(f"[AUTO TEST] Sending {count} patients to patient skid after {delay_s}s")
|
logging.info(f"[AUTO TEST] Sending {count} patients to patient skid after {delay_s}s")
|
||||||
set_patient_skid_users(count)
|
set_patient_skid_users(count)
|
||||||
|
|
||||||
@router.post("/test/auto/1")
|
|
||||||
async def auto_test_pu1(ploop_setpoint: float = Query(0.0)):
|
|
||||||
pu = 1
|
|
||||||
logging.info("[AUTO TEST] Starting automatic test for 1 PU")
|
|
||||||
asyncio.create_task(run_auto_test_pu1(pu, ploop_setpoint))
|
|
||||||
return {"status": "started", "pu": pu}
|
|
||||||
|
|
||||||
@router.post("/test/auto/2")
|
@router.post("/test/auto/{pu_number}")
|
||||||
async def auto_test_pu2(ploop_setpoint: float = Query(0.0)):
|
async def auto_test(pu_number: int ):
|
||||||
logging.info("[AUTO TEST] Starting automatic test for 2 PUs")
|
"""
|
||||||
asyncio.create_task(run_auto_test_pu2(ploop_setpoint))
|
Start automatic test for PU1 or PU2.
|
||||||
return {"status": "started", "pu": [1, 2]}
|
"""
|
||||||
|
global tasks
|
||||||
|
|
||||||
async def run_auto_test_pu1(pu: int, ploop_setpoint: float):
|
logging.info(f"[AUTO TEST] Starting automatic test for PU{pu_number}")
|
||||||
await send_command_with_delay("PRE-PRODUCTION", pu, delay_s=0, ploop_setpoint=ploop_setpoint)
|
|
||||||
await send_command_with_delay("PRODUCTION", pu, delay_s=180, ploop_setpoint=ploop_setpoint)
|
|
||||||
await set_patients_with_delay(5, delay_s=60)
|
|
||||||
await set_patients_with_delay(10, delay_s=60)
|
|
||||||
await send_command_with_delay("IDLE", pu, delay_s=60, ploop_setpoint=ploop_setpoint)
|
|
||||||
logging.info("[AUTO TEST] Finished PU1 test")
|
|
||||||
|
|
||||||
async def run_auto_test_pu2(ploop_setpoint: float):
|
key = f"pu{pu_number}"
|
||||||
# Step 1: Run PU1 test
|
if key in tasks and not tasks[key].done():
|
||||||
await run_auto_test_pu1(1, ploop_setpoint)
|
tasks[key].cancel()
|
||||||
|
logging.info(f"[AUTO TEST] PU{pu_number} Cancelled")
|
||||||
|
|
||||||
# Step 2: PU2 sequence
|
await start_recording_internal()
|
||||||
await send_command_with_delay("PRE-PRODUCTION", 2, delay_s=0, ploop_setpoint=ploop_setpoint)
|
logging.info("[AUTO TEST] Recorder started")
|
||||||
await send_command_with_delay("PRODUCTION", 2, delay_s=180, ploop_setpoint=ploop_setpoint)
|
if pu_number == 1:
|
||||||
await set_patients_with_delay(15, delay_s=60)
|
task = asyncio.create_task(run_auto_test_1())
|
||||||
await set_patients_with_delay(0, delay_s=60)
|
result = {"status": "started", "pu": 1}
|
||||||
await send_command_with_delay("IDLE", 2, delay_s=60, ploop_setpoint=ploop_setpoint)
|
elif pu_number == 2:
|
||||||
await send_command_with_delay("IDLE", 1, delay_s=60, ploop_setpoint=ploop_setpoint)
|
task = asyncio.create_task(run_auto_test_2())
|
||||||
logging.info("[AUTO TEST] Finished PU1 + PU2 test")
|
result = {"status": "started", "pu": [2]}
|
||||||
|
elif pu_number == 3:
|
||||||
|
task = asyncio.create_task(run_auto_test_3())
|
||||||
|
result = {"status": "started", "pu": [2]}
|
||||||
|
else:
|
||||||
|
return {"status": "error", "message": "Invalid PU number"}
|
||||||
|
|
||||||
|
tasks[key] = task
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/test/auto/stop/{pu}")
|
||||||
|
async def stop_auto_test(pu: int):
|
||||||
|
global tasks
|
||||||
|
key = f"pu{pu}"
|
||||||
|
logging.info(f"[AUTO TEST] Stopping {pu}")
|
||||||
|
|
||||||
|
await stop_recording_internal()
|
||||||
|
logging.info("[AUTO TEST] Recorder stopped")
|
||||||
|
if key in tasks and not tasks[key].done():
|
||||||
|
tasks[key].cancel()
|
||||||
|
await send_command_with_delay("IDLE", pu =pu, delay_s=0)
|
||||||
|
logging.info(f"[AUTO TEST] Test of {key} canceled and PU stopped")
|
||||||
|
return {"status": "stopped", "pu": pu}
|
||||||
|
|
||||||
|
logging.info(f"[AUTO TEST] Stopping {pu} No test Runining")
|
||||||
|
return {"status": "no task running", "pu": pu}
|
||||||
|
|
||||||
|
async def run_auto_test_1(pu: int = 1):
|
||||||
|
try:
|
||||||
|
await send_command_with_delay("PRE-PRODUCTION", pu = pu, delay_s=0, ploop_setpoint=2.5, qperm_setpoint=1200.0)
|
||||||
|
await asyncio.sleep(180) # Starting time of the machine
|
||||||
|
await set_patients_with_delay(5, delay_s=10)
|
||||||
|
await set_patients_with_delay(10, delay_s=20)
|
||||||
|
await set_patients_with_delay(0, delay_s=20)
|
||||||
|
await send_command_with_delay("IDLE", pu =pu, delay_s=20, ploop_setpoint=2.5, qperm_setpoint=1200.0)
|
||||||
|
logging.info("[AUTO TEST] Finished PU1 test")
|
||||||
|
await stop_recording_internal()
|
||||||
|
logging.info("[AUTO TEST] Recorder stopped")
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logging.info(f"[AUTO TEST] PU 1 task cancelled")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def run_auto_test_2():
|
||||||
|
try:
|
||||||
|
await send_command_with_delay("PRE-PRODUCTION", pu=1, delay_s=0, ploop_setpoint=2.5, qperm_setpoint=1200.0)
|
||||||
|
await send_command_with_delay("PRE-PRODUCTION", pu=1, delay_s=90, ploop_setpoint=2.5, qperm_setpoint=1200.0)
|
||||||
|
await asyncio.sleep(90) # Starting time of the machine
|
||||||
|
await set_patients_with_delay(5, delay_s=10)
|
||||||
|
await set_patients_with_delay(10, delay_s=40)
|
||||||
|
|
||||||
|
await asyncio.sleep(100)
|
||||||
|
await send_command_with_delay("IDLE", pu=1, delay_s=0, ploop_setpoint=2.5, qperm_setpoint=1200.0)
|
||||||
|
await send_command_with_delay("IDLE", pu=2, delay_s=10, ploop_setpoint=2.5, qperm_setpoint=1200.0)
|
||||||
|
|
||||||
|
logging.info("[AUTO TEST] Finished PU1 + PU2 test")
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logging.info(f"[AUTO TEST] PU 2 task cancelled")
|
||||||
|
# optional cleanup
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def run_auto_test_3():
|
||||||
|
try:
|
||||||
|
# Step 1: Run PU1 test
|
||||||
|
# await run_auto_test_1()
|
||||||
|
# TODO : TODO
|
||||||
|
logging.info("[AUTO TEST] Finished PU1 + PU2 test")
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logging.info(f"[AUTO TEST] PU 2 task cancelled")
|
||||||
|
# optional cleanup
|
||||||
|
raise
|
||||||
|
|
||||||
@router.post("/test/auto/3")
|
|
||||||
async def auto_test_pu3():
|
|
||||||
# Call the function for PU3 auto test
|
|
||||||
logging.info("Start auto test of 3 PU")
|
|
||||||
return {"status": "started", "pu": 3}
|
|
||||||
|
|
||||||
# PATIENT SKID HELPERS
|
# PATIENT SKID HELPERS
|
||||||
async def update_latest_flow():
|
async def update_latest_flow():
|
||||||
|
global active_PUs
|
||||||
async with aiohttp.ClientSession() as session:
|
async with aiohttp.ClientSession() as session:
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
async with session.get("http://192.168.1.28:8000/instant_flow") as resp:
|
async with session.get("http://192.168.1.28:8000/instant_flow") as resp:
|
||||||
data = await resp.json()
|
data = await resp.json()
|
||||||
latest_flow = int(data["log"]["flow"])
|
latest_flow = int(data["log"])
|
||||||
logging.debug(f"Updated flow: {latest_flow}")
|
logging.debug(f"Updated flow: {latest_flow}")
|
||||||
latest_data["PatientSkid"]["QSkid"] = latest_flow
|
latest_data["PatientSkid"]["QSkid"] = latest_flow
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error fetching flow: {e}")
|
logging.error(f"Error fetching flow: {e}")
|
||||||
await asyncio.sleep(1.0)
|
await asyncio.sleep(1.0)
|
||||||
|
|
||||||
def set_patient_skid_users(count: int = 1):
|
|
||||||
try:
|
|
||||||
url = f"http://192.168.1.28:8000/set_users/{count}"
|
|
||||||
response = httpx.get(url, timeout=5.0)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
return {"status": "success", "detail": response.json()}
|
|
||||||
else:
|
|
||||||
raise HTTPException(status_code=502, detail=f"Remote server error: {response.text}")
|
|
||||||
except httpx.RequestError as e:
|
|
||||||
raise HTTPException(status_code=500, detail=f"Request to external server failed: {str(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
app.include_router(router)
|
app.include_router(router)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import uvicorn
|
import uvicorn
|
||||||
|
|
||||||
uvicorn.run(
|
uvicorn.run(
|
||||||
"main:app",
|
"main:app",
|
||||||
host="127.0.0.1",
|
host="127.0.0.1",
|
||||||
|
|
|
||||||
94
protocol_decoder.py
Normal file
94
protocol_decoder.py
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
from typing import Dict, Any, List, Tuple
|
||||||
|
import re
|
||||||
|
|
||||||
|
RE_PU_VP = re.compile(r'^P(?P<pu>[1-3])VP$')
|
||||||
|
RE_PU_CO = re.compile(r'^P(?P<pu>[1-3])CO$')
|
||||||
|
RE_DOCK_VP = re.compile(r'^D0VP$')
|
||||||
|
RE_DOCK_CO = re.compile(r'^(D0CO|DOCO)$') # be tolerant
|
||||||
|
|
||||||
|
def _to_i(s: str) -> int:
|
||||||
|
try: return int(s.strip())
|
||||||
|
except: return 0
|
||||||
|
|
||||||
|
def _to_pct(s: str) -> int:
|
||||||
|
try: return int(s.strip())
|
||||||
|
except:
|
||||||
|
try: return int(float(s))
|
||||||
|
except: return 0
|
||||||
|
|
||||||
|
def _to_bool(s: str) -> bool:
|
||||||
|
return str(s).strip() in ("1","true","True","TRUE")
|
||||||
|
|
||||||
|
def _dock_vp(vals: List[str]) -> Dict[str, Any]:
|
||||||
|
names = ["mv01","mv09","mv10","mv11","mmv01","mmv02","mmv03","sv01","sv02","sv03"]
|
||||||
|
out: Dict[str, Any] = {}
|
||||||
|
for k, v in zip(names, vals):
|
||||||
|
out[k] = _to_bool(v) if k.startswith("sv") else _to_pct(v)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _dock_co(vals: List[str]) -> Dict[str, Any]:
|
||||||
|
out: Dict[str, Any] = {}
|
||||||
|
for name, v in zip(["cs01","cs02"], vals):
|
||||||
|
q = _to_i(v) # 0.1 µS
|
||||||
|
out[f"{name}_0p1uS"] = q
|
||||||
|
out[f"{name}_uS"] = q*0.1
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _pu_vp(pu: int, vals: List[str]) -> Dict[str, Any]:
|
||||||
|
out: Dict[str, Any] = {"pu": pu}
|
||||||
|
for k, v in zip(["mv02","mv03","mv04","mv05","mv06","mv07","mv08"], vals):
|
||||||
|
out[k] = _to_pct(v)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def _pu_co(pu: int, vals: List[str]) -> Dict[str, Any]:
|
||||||
|
out: Dict[str, Any] = {"pu": pu}
|
||||||
|
for name, v in zip(["cs03","cs04","cs05"], vals):
|
||||||
|
q = _to_i(v)
|
||||||
|
out[f"{name}_0p1uS"] = q
|
||||||
|
out[f"{name}_uS"] = q*0.1
|
||||||
|
return out
|
||||||
|
|
||||||
|
def decode_frames(buffer: bytes) -> Tuple[List[Tuple[bytes, Dict[str, Any]]], bytes, int]:
|
||||||
|
msgs: List[Tuple[bytes, Dict[str, Any]]] = []
|
||||||
|
errors = 0
|
||||||
|
parts = buffer.split(b"\n")
|
||||||
|
remaining = parts[-1]
|
||||||
|
|
||||||
|
for line in parts[:-1]:
|
||||||
|
raw = line.strip().rstrip(b"\r")
|
||||||
|
if not raw: continue
|
||||||
|
try:
|
||||||
|
t = raw.decode("utf-8")
|
||||||
|
fields = [f.strip() for f in t.split(",")]
|
||||||
|
if len(fields) < 3: raise ValueError("too few fields")
|
||||||
|
version, msg_id, ts_ms = fields[0], fields[1], fields[2]
|
||||||
|
data = fields[3:]
|
||||||
|
|
||||||
|
parsed: Dict[str, Any] = {"version":version, "msg_id":msg_id, "ts_ms": int(ts_ms)}
|
||||||
|
|
||||||
|
if RE_DOCK_VP.match(msg_id):
|
||||||
|
parsed.update({"src":"dock","type":"valves"})
|
||||||
|
parsed.update(_dock_vp(data))
|
||||||
|
elif RE_DOCK_CO.match(msg_id):
|
||||||
|
parsed.update({"src":"dock","type":"cond"})
|
||||||
|
parsed.update(_dock_co(data))
|
||||||
|
else:
|
||||||
|
m = RE_PU_VP.match(msg_id)
|
||||||
|
if m:
|
||||||
|
pu = int(m.group("pu"))
|
||||||
|
parsed.update({"src":"pu","type":"valves","pu":pu})
|
||||||
|
parsed.update(_pu_vp(pu, data))
|
||||||
|
else:
|
||||||
|
m = RE_PU_CO.match(msg_id)
|
||||||
|
if m:
|
||||||
|
pu = int(m.group("pu"))
|
||||||
|
parsed.update({"src":"pu","type":"cond","pu":pu})
|
||||||
|
parsed.update(_pu_co(pu, data))
|
||||||
|
else:
|
||||||
|
parsed.update({"src":"unknown","type":"raw","data":data})
|
||||||
|
|
||||||
|
msgs.append((raw, parsed))
|
||||||
|
except Exception:
|
||||||
|
errors += 1
|
||||||
|
|
||||||
|
return msgs, remaining, errors
|
||||||
|
|
@ -1,4 +1,14 @@
|
||||||
fastapi
|
aiohttp==3.12.14
|
||||||
uvicorn[standard]
|
canopen==2.3.0
|
||||||
python-can
|
fastapi==0.116.1
|
||||||
canopen
|
httpx==0.28.1
|
||||||
|
matplotlib==3.10.5
|
||||||
|
numpy==2.3.2
|
||||||
|
pandas==2.3.2
|
||||||
|
pyserial==3.5
|
||||||
|
python_can==4.5.0
|
||||||
|
seaborn==0.13.2
|
||||||
|
starlette==0.47.2
|
||||||
|
uvicorn==0.35.0
|
||||||
|
jinja2
|
||||||
|
itsdangerous
|
||||||
|
|
|
||||||
120
serial_csv_logger.py
Normal file
120
serial_csv_logger.py
Normal file
|
|
@ -0,0 +1,120 @@
|
||||||
|
# serial_csv_logger.py
|
||||||
|
import os, csv, datetime, json
|
||||||
|
from typing import Dict, Any, Tuple, Optional
|
||||||
|
|
||||||
|
class SerialCsvLogger:
|
||||||
|
"""
|
||||||
|
Writes parsed serial frames to CSV, segregated by message type:
|
||||||
|
- D0VP_YYYY-MM-DD.csv (Docking valves)
|
||||||
|
- D0CO_YYYY-MM-DD.csv (Docking conductivity)
|
||||||
|
- P1VP_YYYY-MM-DD.csv (PU1 valves), P2VP..., P3VP...
|
||||||
|
- P1CO_YYYY-MM-DD.csv (PU1 conductivity), etc.
|
||||||
|
- Unknown_YYYY-MM-DD.csv (for anything unmatched)
|
||||||
|
"""
|
||||||
|
def __init__(self, out_dir: str = "serial_logs", rotate_daily: bool = True):
|
||||||
|
self.out_dir = out_dir
|
||||||
|
self.rotate_daily = rotate_daily
|
||||||
|
self._writers: Dict[str, Tuple[csv.DictWriter, Any, str]] = {} # key -> (writer, file, date_str)
|
||||||
|
os.makedirs(self.out_dir, exist_ok=True)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
for _, (_, f, _) in self._writers.items():
|
||||||
|
try: f.close()
|
||||||
|
except: pass
|
||||||
|
self._writers.clear()
|
||||||
|
|
||||||
|
# ---------- public API ----------
|
||||||
|
def log(self, parsed: Dict[str, Any]):
|
||||||
|
msg_id = parsed.get("msg_id", "Unknown")
|
||||||
|
date_str = datetime.date.today().isoformat() if self.rotate_daily else "all"
|
||||||
|
key = f"{msg_id}"
|
||||||
|
|
||||||
|
# rotate if day changed
|
||||||
|
if key in self._writers and self._writers[key][2] != date_str:
|
||||||
|
self._writers[key][1].close()
|
||||||
|
del self._writers[key]
|
||||||
|
|
||||||
|
writer, _, _ = self._ensure_writer(key, msg_id, date_str)
|
||||||
|
row = self._build_row(msg_id, parsed)
|
||||||
|
writer.writerow(row)
|
||||||
|
|
||||||
|
# ---------- internals ----------
|
||||||
|
def _ensure_writer(self, key: str, msg_id: str, date_str: str):
|
||||||
|
if key in self._writers:
|
||||||
|
return self._writers[key]
|
||||||
|
|
||||||
|
fname = f"{msg_id}_{date_str}.csv"
|
||||||
|
path = os.path.join(self.out_dir, fname)
|
||||||
|
f = open(path, "a", newline="")
|
||||||
|
headers = self._headers_for(msg_id)
|
||||||
|
writer = csv.DictWriter(f, fieldnames=headers)
|
||||||
|
|
||||||
|
# write header only if file is empty
|
||||||
|
if f.tell() == 0:
|
||||||
|
writer.writeheader()
|
||||||
|
|
||||||
|
self._writers[key] = (writer, f, date_str)
|
||||||
|
return self._writers[key]
|
||||||
|
|
||||||
|
def _headers_for(self, msg_id: str):
|
||||||
|
# Common heads
|
||||||
|
base = ["ts_iso", "ts_ms", "version", "msg_id"]
|
||||||
|
|
||||||
|
if msg_id == "D0VP":
|
||||||
|
return base + ["mv01","mv09","mv10","mv11","mmv01","mmv02","mmv03","sv01","sv02","sv03"]
|
||||||
|
|
||||||
|
if msg_id in ("D0CO", "DOCO"):
|
||||||
|
# write both scaled (uS) and raw (0.1 uS) for traceability
|
||||||
|
return base + ["cs01_uS","cs01_0p1uS","cs02_uS","cs02_0p1uS"]
|
||||||
|
|
||||||
|
if msg_id.endswith("VP") and len(msg_id) == 4 and msg_id[0] == "P":
|
||||||
|
# P1VP / P2VP / P3VP
|
||||||
|
return base + ["pu","mv02","mv03","mv04","mv05","mv06","mv07","mv08"]
|
||||||
|
|
||||||
|
if msg_id.endswith("CO") and len(msg_id) == 4 and msg_id[0] == "P":
|
||||||
|
# P1CO / P2CO / P3CO
|
||||||
|
return base + ["pu","cs03_uS","cs03_0p1uS","cs04_uS","cs04_0p1uS","cs05_uS","cs05_0p1uS"]
|
||||||
|
|
||||||
|
# fallback
|
||||||
|
return base + ["payload_json"]
|
||||||
|
|
||||||
|
def _build_row(self, msg_id: str, p: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
ts_iso = datetime.datetime.fromtimestamp(p.get("ts_ms", 0)/1000.0).isoformat() if "ts_ms" in p else ""
|
||||||
|
row = {"ts_iso": ts_iso, "ts_ms": p.get("ts_ms", ""), "version": p.get("version",""), "msg_id": msg_id}
|
||||||
|
|
||||||
|
if msg_id == "D0VP":
|
||||||
|
row.update({
|
||||||
|
"mv01": p.get("mv01"), "mv09": p.get("mv09"), "mv10": p.get("mv10"), "mv11": p.get("mv11"),
|
||||||
|
"mmv01": p.get("mmv01"), "mmv02": p.get("mmv02"), "mmv03": p.get("mmv03"),
|
||||||
|
"sv01": p.get("sv01"), "sv02": p.get("sv02"), "sv03": p.get("sv03"),
|
||||||
|
})
|
||||||
|
return row
|
||||||
|
|
||||||
|
if msg_id in ("D0CO", "DOCO"):
|
||||||
|
row.update({
|
||||||
|
"cs01_uS": p.get("cs01_uS"), "cs01_0p1uS": p.get("cs01_0p1uS"),
|
||||||
|
"cs02_uS": p.get("cs02_uS"), "cs02_0p1uS": p.get("cs02_0p1uS"),
|
||||||
|
})
|
||||||
|
return row
|
||||||
|
|
||||||
|
if msg_id.endswith("VP") and len(msg_id) == 4 and msg_id[0] == "P":
|
||||||
|
row.update({
|
||||||
|
"pu": p.get("pu"),
|
||||||
|
"mv02": p.get("mv02"), "mv03": p.get("mv03"), "mv04": p.get("mv04"),
|
||||||
|
"mv05": p.get("mv05"), "mv06": p.get("mv06"), "mv07": p.get("mv07"), "mv08": p.get("mv08"),
|
||||||
|
})
|
||||||
|
return row
|
||||||
|
|
||||||
|
if msg_id.endswith("CO") and len(msg_id) == 4 and msg_id[0] == "P":
|
||||||
|
row.update({
|
||||||
|
"pu": p.get("pu"),
|
||||||
|
"cs03_uS": p.get("cs03_uS"), "cs03_0p1uS": p.get("cs03_0p1uS"),
|
||||||
|
"cs04_uS": p.get("cs04_uS"), "cs04_0p1uS": p.get("cs04_0p1uS"),
|
||||||
|
"cs05_uS": p.get("cs05_uS"), "cs05_0p1uS": p.get("cs05_0p1uS"),
|
||||||
|
})
|
||||||
|
return row
|
||||||
|
|
||||||
|
# Unknown → keep full payload as JSON for later inspection
|
||||||
|
pay = {k:v for k,v in p.items() if k not in ("version","msg_id","ts_ms")}
|
||||||
|
row["payload_json"] = json.dumps(pay, separators=(",",":"))
|
||||||
|
return row
|
||||||
234
serial_manager.py
Normal file
234
serial_manager.py
Normal file
|
|
@ -0,0 +1,234 @@
|
||||||
|
# serial_manager.py
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import csv
|
||||||
|
from collections import deque
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Callable, Deque, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
import serial # provided by python3-serial
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SerialConfig:
|
||||||
|
"""
|
||||||
|
Configuration for the read-only serial intake.
|
||||||
|
"""
|
||||||
|
port: str = "/dev/ttyUSB0"
|
||||||
|
baudrate: int = 115200
|
||||||
|
bytesize: int = serial.EIGHTBITS
|
||||||
|
parity: str = serial.PARITY_NONE
|
||||||
|
stopbits: int = serial.STOPBITS_ONE
|
||||||
|
timeout: float = 0.05
|
||||||
|
rtscts: bool = False
|
||||||
|
dsrdtr: bool = False
|
||||||
|
xonxoff: bool = False
|
||||||
|
ring_capacity: int = 5000
|
||||||
|
# If set, a single "generic" CSV will be written here (append mode).
|
||||||
|
# If you want segregated CSVs per message type, leave this as None and
|
||||||
|
# supply an `on_message` callback that writes where you want.
|
||||||
|
csv_log_path: Optional[str] = None # e.g. "/home/pi/hmi/serial_log.csv"
|
||||||
|
|
||||||
|
|
||||||
|
class SerialStore:
|
||||||
|
"""
|
||||||
|
Thread-safe store for recent parsed messages and intake stats.
|
||||||
|
Stores parsed dicts as returned by the decoder.
|
||||||
|
"""
|
||||||
|
def __init__(self, capacity: int):
|
||||||
|
self._buf: Deque[Dict[str, Any]] = deque(maxlen=capacity)
|
||||||
|
self._lock = threading.Lock()
|
||||||
|
self._stats = {
|
||||||
|
"frames_in": 0,
|
||||||
|
"frames_ok": 0,
|
||||||
|
"frames_bad": 0,
|
||||||
|
"restarts": 0,
|
||||||
|
"last_err": "",
|
||||||
|
}
|
||||||
|
self._latest_by_id: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
def add(self, msg: Dict[str, Any], ok: bool = True):
|
||||||
|
with self._lock:
|
||||||
|
self._buf.append(msg)
|
||||||
|
self._stats["frames_in"] += 1
|
||||||
|
if ok:
|
||||||
|
self._stats["frames_ok"] += 1
|
||||||
|
else:
|
||||||
|
self._stats["frames_bad"] += 1
|
||||||
|
mid = msg.get("msg_id")
|
||||||
|
if mid:
|
||||||
|
self._latest_by_id[mid] = msg
|
||||||
|
|
||||||
|
def latest(self, n: int = 100) -> List[Dict[str, Any]]:
|
||||||
|
with self._lock:
|
||||||
|
return list(self._buf)[-n:]
|
||||||
|
|
||||||
|
def latest_by_id(self) -> Dict[str, Dict[str, Any]]:
|
||||||
|
with self._lock:
|
||||||
|
return dict(self._latest_by_id)
|
||||||
|
|
||||||
|
def stats(self) -> Dict[str, Any]:
|
||||||
|
with self._lock:
|
||||||
|
return dict(self._stats)
|
||||||
|
|
||||||
|
def set_error(self, err: str):
|
||||||
|
with self._lock:
|
||||||
|
self._stats["last_err"] = err
|
||||||
|
|
||||||
|
def inc_restart(self):
|
||||||
|
with self._lock:
|
||||||
|
self._stats["restarts"] += 1
|
||||||
|
|
||||||
|
|
||||||
|
class SerialReader:
|
||||||
|
"""
|
||||||
|
Background read-only serial reader.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cfg: SerialConfig
|
||||||
|
store: SerialStore
|
||||||
|
decoder: function(buffer: bytes) ->
|
||||||
|
(messages: List[Tuple[raw_frame: bytes, parsed: Dict]], remaining: bytes, errors: int)
|
||||||
|
on_message: optional callback called for each parsed dict (e.g., segregated CSV logger)
|
||||||
|
"""
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
cfg: SerialConfig,
|
||||||
|
store: SerialStore,
|
||||||
|
decoder: Callable[[bytes], Tuple[List[Tuple[bytes, Dict[str, Any]]], bytes, int]],
|
||||||
|
on_message: Optional[Callable[[Dict[str, Any]], None]] = None,
|
||||||
|
):
|
||||||
|
self.cfg = cfg
|
||||||
|
self.store = store
|
||||||
|
self.decoder = decoder
|
||||||
|
self.on_message = on_message
|
||||||
|
|
||||||
|
self._ser: Optional[serial.Serial] = None
|
||||||
|
self._th: Optional[threading.Thread] = None
|
||||||
|
self._stop = threading.Event()
|
||||||
|
self._buffer = b""
|
||||||
|
|
||||||
|
# Optional generic CSV (single file) if cfg.csv_log_path is set
|
||||||
|
self._csv_file = None
|
||||||
|
self._csv_writer = None
|
||||||
|
|
||||||
|
# ---------- lifecycle ----------
|
||||||
|
def start(self):
|
||||||
|
self._stop.clear()
|
||||||
|
self._open_serial()
|
||||||
|
self._open_csv()
|
||||||
|
self._th = threading.Thread(target=self._run, name="SerialReader", daemon=True)
|
||||||
|
self._th.start()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self._stop.set()
|
||||||
|
if self._th and self._th.is_alive():
|
||||||
|
self._th.join(timeout=2.0)
|
||||||
|
self._close_serial()
|
||||||
|
self._close_csv()
|
||||||
|
|
||||||
|
# ---------- internals ----------
|
||||||
|
def _open_serial(self):
|
||||||
|
try:
|
||||||
|
self._ser = serial.Serial(
|
||||||
|
port=self.cfg.port,
|
||||||
|
baudrate=self.cfg.baudrate,
|
||||||
|
bytesize=self.cfg.bytesize,
|
||||||
|
parity=self.cfg.parity,
|
||||||
|
stopbits=self.cfg.stopbits,
|
||||||
|
timeout=self.cfg.timeout,
|
||||||
|
rtscts=self.cfg.rtscts,
|
||||||
|
dsrdtr=self.cfg.dsrdtr,
|
||||||
|
xonxoff=self.cfg.xonxoff,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
self.store.set_error(f"Open error: {e}")
|
||||||
|
self._ser = None
|
||||||
|
|
||||||
|
def _close_serial(self):
|
||||||
|
try:
|
||||||
|
if self._ser and self._ser.is_open:
|
||||||
|
self._ser.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self._ser = None
|
||||||
|
|
||||||
|
def _open_csv(self):
|
||||||
|
if not self.cfg.csv_log_path:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
self._csv_file = open(self.cfg.csv_log_path, "a", newline="")
|
||||||
|
self._csv_writer = csv.writer(self._csv_file)
|
||||||
|
# Write header only if file is empty (avoid duplicates on restart)
|
||||||
|
if self._csv_file.tell() == 0:
|
||||||
|
self._csv_writer.writerow(["ts_ms", "msg_id", "raw_hex", "parsed"])
|
||||||
|
self._csv_file.flush()
|
||||||
|
except Exception as e:
|
||||||
|
self.store.set_error(f"CSV open error: {e}")
|
||||||
|
self._csv_file = None
|
||||||
|
self._csv_writer = None
|
||||||
|
|
||||||
|
def _close_csv(self):
|
||||||
|
try:
|
||||||
|
if self._csv_file:
|
||||||
|
self._csv_file.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self._csv_file = None
|
||||||
|
self._csv_writer = None
|
||||||
|
|
||||||
|
def _log_csv(self, raw: bytes, parsed: Dict[str, Any]):
|
||||||
|
"""Write to the optional single generic CSV."""
|
||||||
|
if not self._csv_writer:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
self._csv_writer.writerow(
|
||||||
|
[parsed.get("ts_ms"), parsed.get("msg_id"), raw.hex(), parsed]
|
||||||
|
)
|
||||||
|
self._csv_file.flush()
|
||||||
|
except Exception as e:
|
||||||
|
self.store.set_error(f"CSV write error: {e}")
|
||||||
|
|
||||||
|
def _run(self):
|
||||||
|
backoff = 0.5
|
||||||
|
while not self._stop.is_set():
|
||||||
|
if not self._ser or not self._ser.is_open:
|
||||||
|
# reconnect with exponential backoff (capped)
|
||||||
|
self._close_serial()
|
||||||
|
time.sleep(backoff)
|
||||||
|
self.store.inc_restart()
|
||||||
|
self._open_serial()
|
||||||
|
backoff = min(backoff * 1.5, 5.0)
|
||||||
|
continue
|
||||||
|
|
||||||
|
backoff = 0.5
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = self._ser.read(4096) # non-blocking due to timeout
|
||||||
|
if data:
|
||||||
|
self._buffer += data
|
||||||
|
frames, remaining, errors = self.decoder(self._buffer)
|
||||||
|
self._buffer = remaining
|
||||||
|
|
||||||
|
for raw, parsed in frames:
|
||||||
|
# store
|
||||||
|
self.store.add(parsed, ok=True)
|
||||||
|
# optional generic CSV
|
||||||
|
self._log_csv(raw, parsed)
|
||||||
|
# optional segregated sink
|
||||||
|
if self.on_message:
|
||||||
|
try:
|
||||||
|
self.on_message(parsed)
|
||||||
|
except Exception as e:
|
||||||
|
self.store.set_error(f"CSV sink error: {e}")
|
||||||
|
|
||||||
|
# count decode errors
|
||||||
|
for _ in range(errors):
|
||||||
|
self.store.add({"error": "decode"}, ok=False)
|
||||||
|
else:
|
||||||
|
time.sleep(0.01)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.store.set_error(f"Read/Decode error: {e}")
|
||||||
|
self._close_serial()
|
||||||
|
time.sleep(0.5)
|
||||||
|
|
@ -1,309 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<title>Live Monitoring Dashboard</title>
|
|
||||||
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
font-family: Arial, sans-serif;
|
|
||||||
margin: 0;
|
|
||||||
padding: 20px;
|
|
||||||
}
|
|
||||||
.plot-container {
|
|
||||||
display: flex;
|
|
||||||
flex-wrap: wrap;
|
|
||||||
justify-content: center;
|
|
||||||
gap: 20px;
|
|
||||||
}
|
|
||||||
.large-plot {
|
|
||||||
width: 45%;
|
|
||||||
height: 300px;
|
|
||||||
}
|
|
||||||
.small-plot {
|
|
||||||
width: 30%;
|
|
||||||
height: 250px;
|
|
||||||
}
|
|
||||||
h1 {
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
#recordButton {
|
|
||||||
background-color: #ff4444;
|
|
||||||
color: white;
|
|
||||||
border: none;
|
|
||||||
padding: 10px 20px;
|
|
||||||
font-size: 16px;
|
|
||||||
cursor: pointer;
|
|
||||||
border-radius: 5px;
|
|
||||||
margin: 10px;
|
|
||||||
}
|
|
||||||
.status-container {
|
|
||||||
background-color: #f0f0f0;
|
|
||||||
padding: 10px;
|
|
||||||
border-radius: 5px;
|
|
||||||
margin: 10px auto;
|
|
||||||
text-align: center;
|
|
||||||
font-size: 18px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1 id="pageTitle">Live Monitoring Dashboard</h1>
|
|
||||||
<div class="status-container">
|
|
||||||
<p>Current Status: <span id="currentStatus">Loading...</span></p>
|
|
||||||
</div>
|
|
||||||
<button id="recordButton" onclick="toggleRecording()">Record</button>
|
|
||||||
<div class="plot-container">
|
|
||||||
<div id="flow-plot-1" class="large-plot"></div>
|
|
||||||
<div id="pressure-plot-1" class="large-plot"></div>
|
|
||||||
<div id="flow-plot-2" class="large-plot"></div>
|
|
||||||
<div id="pressure-plot-2" class="large-plot"></div>
|
|
||||||
<div id="MV02_sp-plot" class="small-plot"></div>
|
|
||||||
<div id="MV03_sp-plot" class="small-plot"></div>
|
|
||||||
<div id="MV04_sp-05-plot" class="small-plot"></div>
|
|
||||||
<div id="MV06_sp-plot" class="small-plot"></div>
|
|
||||||
<div id="MV07_sp-plot" class="small-plot"></div>
|
|
||||||
<div id="MV08_sp-plot" class="small-plot"></div>
|
|
||||||
</div>
|
|
||||||
<script>
|
|
||||||
// Extract PU number from URL
|
|
||||||
const urlParams = new URLSearchParams(window.location.search);
|
|
||||||
const puNumber = urlParams.get('pu_number') || '1'; // Default to PU 1 if not specified
|
|
||||||
document.getElementById('pageTitle').textContent = `Live Monitoring Dashboard - PU ${puNumber}`;
|
|
||||||
|
|
||||||
let isRecording = false;
|
|
||||||
let recordedData = [];
|
|
||||||
let recordingInterval;
|
|
||||||
let csvFileName = '';
|
|
||||||
|
|
||||||
async function toggleRecording() {
|
|
||||||
const recordButton = document.getElementById('recordButton');
|
|
||||||
if (!isRecording) {
|
|
||||||
isRecording = true;
|
|
||||||
recordButton.style.backgroundColor = '#ff0000';
|
|
||||||
recordButton.textContent = 'Stop Recording';
|
|
||||||
recordedData = [];
|
|
||||||
csvFileName = `monitoring_data_PU${puNumber}_${new Date().toISOString().replace(/[:.]/g, '-')}.csv`;
|
|
||||||
startRecording();
|
|
||||||
} else {
|
|
||||||
isRecording = false;
|
|
||||||
recordButton.style.backgroundColor = '#ff4444';
|
|
||||||
recordButton.textContent = 'Record';
|
|
||||||
stopRecording();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function startRecording() {
|
|
||||||
recordingInterval = setInterval(async () => {
|
|
||||||
const response = await fetch('/monitor');
|
|
||||||
if (!response.ok) {
|
|
||||||
console.error(`HTTP error! status: ${response.status}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const allData = await response.json();
|
|
||||||
const puData = allData[`PU_${puNumber}`];
|
|
||||||
const SkidData = allData[`PatientSkid`];
|
|
||||||
recordedData.push({
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
Qperm: puData.Qperm,
|
|
||||||
Qdilute: puData.Qdilute,
|
|
||||||
Qdrain: puData.Qdrain,
|
|
||||||
Qrecirc: puData.Qrecirc,
|
|
||||||
QdrainEDI: puData.QdrainEDI,
|
|
||||||
Pro: puData.Pro,
|
|
||||||
Pdilute: puData.Pdilute,
|
|
||||||
Pretentate: puData.Pretentate,
|
|
||||||
MV02_sp: puData.MV02_sp,
|
|
||||||
MV03_sp: puData.MV03_sp,
|
|
||||||
MV04_sp: puData.MV04_sp,
|
|
||||||
MV05_sp: puData.MV05_sp,
|
|
||||||
MV06_sp: puData.MV06_sp,
|
|
||||||
MV07_sp: puData.MV07_sp,
|
|
||||||
MV08_sp: puData.MV08_sp,
|
|
||||||
QSkid: SkidData.QSkid,
|
|
||||||
});
|
|
||||||
}, 100);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function stopRecording() {
|
|
||||||
clearInterval(recordingInterval);
|
|
||||||
if (recordedData.length > 0) {
|
|
||||||
const csvContent = "data:text/csv;charset=utf-8," +
|
|
||||||
"Timestamp,Qperm,Qdilute,Qdrain,Qrecirc,QdrainEDI,Pro,Pdilute,Pretentate,MV02_sp,MV03_sp,MV04_sp,MV05_sp,MV06_sp,MV07_sp,MV08_sp,QSkid\n" +
|
|
||||||
recordedData.map(row =>
|
|
||||||
`${row.timestamp},${row.Qperm},${row.Qdilute},${row.Qdrain},${row.Qrecirc},${row.QdrainEDI},${row.Pro},${row.Pdilute},${row.Pretentate},${row.MV02_sp},${row.MV03_sp},${row.MV04_sp},${row.MV05_sp},${row.MV06_sp},${row.MV07_sp},${row.MV08_sp},${row.QSkid}`
|
|
||||||
).join("\n");
|
|
||||||
const encodedUri = encodeURI(csvContent);
|
|
||||||
const link = document.createElement("a");
|
|
||||||
link.setAttribute("href", encodedUri);
|
|
||||||
link.setAttribute("download", csvFileName);
|
|
||||||
document.body.appendChild(link);
|
|
||||||
link.click();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
window.onbeforeunload = function() {
|
|
||||||
if (isRecording) {
|
|
||||||
stopRecording();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const maxPoints = 100;
|
|
||||||
|
|
||||||
function getLastMinuteRange() {
|
|
||||||
const now = new Date();
|
|
||||||
const oneMinuteAgo = new Date(now.getTime() - 60 * 1000);
|
|
||||||
return [oneMinuteAgo, now];
|
|
||||||
}
|
|
||||||
|
|
||||||
async function updatePlots() {
|
|
||||||
try {
|
|
||||||
const response = await fetch('/monitor');
|
|
||||||
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
|
||||||
const allData = await response.json();
|
|
||||||
const puData = allData[`PU_${puNumber}`];
|
|
||||||
const SkidData = allData[`PatientSkid`];
|
|
||||||
|
|
||||||
const timestamp = new Date(puData.timestamp);
|
|
||||||
Plotly.extendTraces('flow-plot-1', {
|
|
||||||
x: [[timestamp], [timestamp]],
|
|
||||||
y: [[puData.Qperm], [puData.Qdilute]]
|
|
||||||
}, [0, 1], maxPoints);
|
|
||||||
|
|
||||||
Plotly.extendTraces('flow-plot-2', {
|
|
||||||
x: [[timestamp], [timestamp], [timestamp], [timestamp]],
|
|
||||||
y: [[puData.Qdrain], [puData.Qrecirc], [SkidData.QSkid], [puData.QdrainEDI]]
|
|
||||||
}, [0, 1, 2, 3], maxPoints);
|
|
||||||
|
|
||||||
Plotly.extendTraces('pressure-plot-1', {
|
|
||||||
x: [[timestamp], [timestamp]],
|
|
||||||
y: [[puData.Pro], [puData.Pretentate]]
|
|
||||||
}, [0, 1], maxPoints);
|
|
||||||
|
|
||||||
Plotly.extendTraces('pressure-plot-2', {
|
|
||||||
x: [[timestamp]],
|
|
||||||
y: [[puData.Pdilute]]
|
|
||||||
}, [0], maxPoints);
|
|
||||||
|
|
||||||
Plotly.extendTraces('MV02_sp-plot', { x: [[timestamp]], y: [[puData.MV02_sp]] }, [0], maxPoints);
|
|
||||||
Plotly.extendTraces('MV03_sp-plot', { x: [[timestamp]], y: [[puData.MV03_sp]] }, [0], maxPoints);
|
|
||||||
Plotly.extendTraces('MV04_sp-05-plot', {
|
|
||||||
x: [[timestamp], [timestamp]],
|
|
||||||
y: [[puData.MV04_sp], [puData.MV05_sp]]
|
|
||||||
}, [0, 1], maxPoints);
|
|
||||||
Plotly.extendTraces('MV06_sp-plot', { x: [[timestamp]], y: [[puData.MV06_sp]] }, [0], maxPoints);
|
|
||||||
Plotly.extendTraces('MV07_sp-plot', { x: [[timestamp]], y: [[puData.MV07_sp]] }, [0], maxPoints);
|
|
||||||
Plotly.extendTraces('MV08_sp-plot', { x: [[timestamp]], y: [[puData.MV08_sp]] }, [0], maxPoints);
|
|
||||||
|
|
||||||
const range = getLastMinuteRange();
|
|
||||||
const plotIds = ['flow-plot-1', 'flow-plot-2', 'pressure-plot-1', 'pressure-plot-2', 'MV02_sp-plot', 'MV03_sp-plot', 'MV04_sp-05-plot', 'MV06_sp-plot', 'MV07_sp-plot', 'MV08_sp-plot'];
|
|
||||||
// plotIds.forEach(id => {
|
|
||||||
// Plotly.relayout(id, { 'xaxis.range': range });
|
|
||||||
// });
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error updating plots:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchPUStatus() {
|
|
||||||
try {
|
|
||||||
const response = await fetch("/api/pu_status");
|
|
||||||
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
|
||||||
const data = await response.json();
|
|
||||||
const status = data[`PU${puNumber}`] || "Unknown";
|
|
||||||
document.getElementById("currentStatus").textContent = status;
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error fetching PU status:", error);
|
|
||||||
document.getElementById("currentStatus").textContent = "Error fetching status";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function initPlots() {
|
|
||||||
const time0 = [new Date()];
|
|
||||||
|
|
||||||
Plotly.newPlot('flow-plot-1', [
|
|
||||||
{ x: time0, y: [0], name: 'Qperm', mode: 'lines', line: { color: 'blue' } },
|
|
||||||
{ x: time0, y: [0], name: 'Qdilute', mode: 'lines', line: { color: 'green' } }
|
|
||||||
], {
|
|
||||||
title: 'Qperm and Qdilute Flow Rates Over Time',
|
|
||||||
xaxis: { title: 'Time', type: 'date' },
|
|
||||||
yaxis: { title: 'Flow (L/h)' }
|
|
||||||
});
|
|
||||||
|
|
||||||
Plotly.newPlot('flow-plot-2', [
|
|
||||||
{ x: time0, y: [0], name: 'Qdrain', mode: 'lines', line: { color: 'red' } },
|
|
||||||
{ x: time0, y: [0], name: 'Qrecirc', mode: 'lines', line: { color: 'orange' } },
|
|
||||||
{ x: time0, y: [0], name: 'QSkid', mode: 'lines', line: { color: 'green' } },
|
|
||||||
{ x: time0, y: [0], name: 'QdrainEDI', mode: 'lines', line: { color: 'blue' } }
|
|
||||||
], {
|
|
||||||
title: 'Qdrain, Qrecirc, Qskid and QdrainEDI Flow Rates Over Time',
|
|
||||||
xaxis: { title: 'Time', type: 'date' },
|
|
||||||
yaxis: { title: 'Flow (L/h)' }
|
|
||||||
});
|
|
||||||
|
|
||||||
Plotly.newPlot('pressure-plot-1', [
|
|
||||||
{ x: time0, y: [0], name: 'Pro', mode: 'lines', line: { color: 'purple' } },
|
|
||||||
{ x: time0, y: [0], name: 'Pretentate', mode: 'lines', line: { color: 'gray' } }
|
|
||||||
], {
|
|
||||||
title: 'Pro and Pretentate Pressure Over Time',
|
|
||||||
xaxis: { title: 'Time', type: 'date' },
|
|
||||||
yaxis: { title: 'Pressure (bar)' }
|
|
||||||
});
|
|
||||||
|
|
||||||
Plotly.newPlot('pressure-plot-2', [
|
|
||||||
{ x: time0, y: [0], name: 'Pdilute', mode: 'lines', line: { color: 'teal' } }
|
|
||||||
], {
|
|
||||||
title: 'Pdilute Pressure Over Time',
|
|
||||||
xaxis: { title: 'Time', type: 'date' },
|
|
||||||
yaxis: { title: 'Pressure (bar)' }
|
|
||||||
});
|
|
||||||
|
|
||||||
Plotly.newPlot('MV02_sp-plot', [{
|
|
||||||
x: time0, y: [0], name: 'MV02_sp', mode: 'lines'
|
|
||||||
}], {
|
|
||||||
title: 'MV02_sp (%)', yaxis: { }, xaxis: { type: 'date' }
|
|
||||||
});
|
|
||||||
|
|
||||||
Plotly.newPlot('MV03_sp-plot', [{
|
|
||||||
x: time0, y: [0], name: 'MV03_sp', mode: 'lines'
|
|
||||||
}], {
|
|
||||||
title: 'MV03_sp (%)', yaxis: { }, xaxis: { type: 'date' }
|
|
||||||
});
|
|
||||||
|
|
||||||
Plotly.newPlot('MV04_sp-05-plot', [
|
|
||||||
{ x: time0, y: [0], name: 'MV04_sp', mode: 'lines' },
|
|
||||||
{ x: time0, y: [0], name: 'MV05_sp', mode: 'lines' }
|
|
||||||
], {
|
|
||||||
title: 'MV04_sp + MV05_sp (%)', yaxis: { range: [0, 100] }, xaxis: { type: 'date' }
|
|
||||||
});
|
|
||||||
|
|
||||||
Plotly.newPlot('MV06_sp-plot', [{
|
|
||||||
x: time0, y: [0], name: 'MV06_sp', mode: 'lines'
|
|
||||||
}], {
|
|
||||||
title: 'MV06_sp (%)', yaxis: { }, xaxis: { type: 'date' }
|
|
||||||
});
|
|
||||||
|
|
||||||
Plotly.newPlot('MV07_sp-plot', [{
|
|
||||||
x: time0, y: [0], name: 'MV07_sp', mode: 'lines'
|
|
||||||
}], {
|
|
||||||
title: 'MV07_sp (%)', yaxis: { }, xaxis: { type: 'date' }
|
|
||||||
});
|
|
||||||
|
|
||||||
Plotly.newPlot('MV08_sp-plot', [{
|
|
||||||
x: time0, y: [0], name: 'MV08_sp', mode: 'lines'
|
|
||||||
}], {
|
|
||||||
title: 'MV08_sp (%)', yaxis: { range: [0, 100] }, xaxis: { type: 'date' }
|
|
||||||
});
|
|
||||||
|
|
||||||
setInterval(updatePlots, 500);
|
|
||||||
}
|
|
||||||
|
|
||||||
window.onload = function() {
|
|
||||||
initPlots();
|
|
||||||
fetchPUStatus();
|
|
||||||
setInterval(fetchPUStatus, 5000); // Update status every 5 seconds
|
|
||||||
};
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
119
static/monitor_DS.html
Normal file
119
static/monitor_DS.html
Normal file
|
|
@ -0,0 +1,119 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<title>Live Monitoring Dashboard</title>
|
||||||
|
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
margin: 0;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
.plot-container {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 20px;
|
||||||
|
}
|
||||||
|
.large-plot {
|
||||||
|
width: 45%;
|
||||||
|
height: 300px;
|
||||||
|
}
|
||||||
|
.small-plot {
|
||||||
|
width: 30%;
|
||||||
|
height: 250px;
|
||||||
|
}
|
||||||
|
h1 {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
.status-container {
|
||||||
|
background-color: #f0f0f0;
|
||||||
|
padding: 10px;
|
||||||
|
border-radius: 5px;
|
||||||
|
margin: 10px auto;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 18px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1 id="pageTitle">Live Monitoring Dashboard - DS</h1>
|
||||||
|
<div class="status-container">
|
||||||
|
<p>Current Status: <span id="currentStatus">Loading...</span></p>
|
||||||
|
</div>
|
||||||
|
<div class="plot-container">
|
||||||
|
<div id="tank-level-plot" class="large-plot"></div>
|
||||||
|
<div id="flow-plot" class="large-plot"></div>
|
||||||
|
</div>
|
||||||
|
<script>
|
||||||
|
const maxPoints = 50;
|
||||||
|
|
||||||
|
async function updatePlots() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('/monitor');
|
||||||
|
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
||||||
|
const allData = await response.json();
|
||||||
|
const dsData = allData['DS'];
|
||||||
|
const t = new Date(dsData.timestamp);
|
||||||
|
|
||||||
|
Plotly.extendTraces('tank-level-plot', {
|
||||||
|
x: [[t]],
|
||||||
|
y: [[dsData.TankLevel]]
|
||||||
|
}, [0], maxPoints);
|
||||||
|
|
||||||
|
Plotly.extendTraces('flow-plot', {
|
||||||
|
x: [[t], [t], [t]],
|
||||||
|
y: [[dsData.Qconso], [dsData.Qinlet], [dsData.Qoutlet]]
|
||||||
|
}, [0, 1, 2], maxPoints);
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Error updating plots:", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchDSStatus() {
|
||||||
|
try {
|
||||||
|
const res = await fetch("/api/ds_status");
|
||||||
|
const data = await res.json();
|
||||||
|
const status = data['DS'] || "Unknown";
|
||||||
|
document.getElementById("currentStatus").textContent = status;
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Error fetching DS status:", e);
|
||||||
|
document.getElementById("currentStatus").textContent = "Error fetching status";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function initPlots() {
|
||||||
|
const time0 = [new Date()];
|
||||||
|
|
||||||
|
Plotly.newPlot('tank-level-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'Tank Level', mode: 'lines' }
|
||||||
|
], {
|
||||||
|
title: 'Tank Level',
|
||||||
|
xaxis: { type: 'date' },
|
||||||
|
yaxis: { title: 'Level' }
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('flow-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'Qconso', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Qinlet', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Qoutlet', mode: 'lines' }
|
||||||
|
], {
|
||||||
|
title: 'Flow Measurements',
|
||||||
|
xaxis: { type: 'date' },
|
||||||
|
yaxis: { title: 'Flow (L/h)' }
|
||||||
|
});
|
||||||
|
|
||||||
|
setInterval(updatePlots, 500);
|
||||||
|
}
|
||||||
|
|
||||||
|
window.onload = function () {
|
||||||
|
initPlots();
|
||||||
|
fetchDSStatus();
|
||||||
|
setInterval(fetchDSStatus, 5000);
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
280
static/monitor_PU.html
Normal file
280
static/monitor_PU.html
Normal file
|
|
@ -0,0 +1,280 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<title>Live Monitoring Dashboard</title>
|
||||||
|
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
margin: 0;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.plot-container {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.large-plot {
|
||||||
|
width: 45%;
|
||||||
|
height: 300px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.small-plot {
|
||||||
|
width: 30%;
|
||||||
|
height: 250px;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-container {
|
||||||
|
background-color: #f0f0f0;
|
||||||
|
padding: 10px;
|
||||||
|
border-radius: 5px;
|
||||||
|
margin: 10px auto;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 18px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<h1 id="pageTitle">Live Monitoring Dashboard</h1>
|
||||||
|
<div class="status-container">
|
||||||
|
<p>Current Status: <span id="currentStatus">Loading...</span></p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="plot-container">
|
||||||
|
<div id="flow-plot-1" class="large-plot"></div>
|
||||||
|
<div id="pressure-plot-1" class="large-plot"></div>
|
||||||
|
<div id="flow-plot-2" class="large-plot"></div>
|
||||||
|
<div id="pressure-plot-2" class="large-plot"></div>
|
||||||
|
<div id="conductivity-plot" class="large-plot"></div>
|
||||||
|
<div id="MV07-plot" class="small-plot"></div>
|
||||||
|
<div id="MV02-plot" class="small-plot"></div>
|
||||||
|
<div id="MV03-plot" class="small-plot"></div>
|
||||||
|
<div id="MV04_sp-05-plot" class="small-plot"></div>
|
||||||
|
<div id="MV06-plot" class="small-plot"></div>
|
||||||
|
<div id="MV08-plot" class="small-plot"></div>
|
||||||
|
<div id="pump-plot" class="small-plot"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
const urlParams = new URLSearchParams(window.location.search);
|
||||||
|
const puNumber = urlParams.get('pu_number') || '1';
|
||||||
|
document.getElementById('pageTitle').textContent = `Live Monitoring Dashboard - PU ${puNumber}`;
|
||||||
|
|
||||||
|
const maxPoints = 50;
|
||||||
|
|
||||||
|
async function updatePlots() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('/monitor');
|
||||||
|
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
|
||||||
|
const allData = await response.json();
|
||||||
|
const puData = allData[`PU_${puNumber}`];
|
||||||
|
const SkidData = allData[`PatientSkid`];
|
||||||
|
const DSData = allData[`DS`];
|
||||||
|
const t = new Date(puData.timestamp);
|
||||||
|
|
||||||
|
Plotly.extendTraces('flow-plot-1',
|
||||||
|
{ x: [[t], [t], [t]], y: [[puData.Qperm], [puData.Qdilute], [puData.Qperm_sp]] },
|
||||||
|
[0, 1, 2],
|
||||||
|
maxPoints
|
||||||
|
);
|
||||||
|
|
||||||
|
Plotly.extendTraces('flow-plot-2', {
|
||||||
|
x: [[t], [t], [t], [t], [t], [t]],
|
||||||
|
y: [[puData.Qdrain], [puData.Qrecirc], [SkidData.QSkid], [puData.QdrainEDI], [DSData.Qconso], [puData.Qdrain_sp]]
|
||||||
|
}, [0, 1, 2, 3, 4, 5], maxPoints);
|
||||||
|
|
||||||
|
Plotly.extendTraces('pressure-plot-1',
|
||||||
|
{ x: [[t], [t]], y: [[puData.Pro], [puData.Pretentate]] },
|
||||||
|
[0, 1],
|
||||||
|
maxPoints
|
||||||
|
);
|
||||||
|
|
||||||
|
Plotly.extendTraces('pressure-plot-2', {
|
||||||
|
x: [[t], [t]],
|
||||||
|
y: [[puData.Pdilute], [puData.Ploop_sp]]
|
||||||
|
}, [0, 1], maxPoints);
|
||||||
|
|
||||||
|
Plotly.extendTraces('conductivity-plot', {
|
||||||
|
x: [[t], [t], [t]],
|
||||||
|
y: [[puData.Cfeed], [puData.Cperm], [puData.Cdilute]]
|
||||||
|
}, [0, 1, 2], maxPoints);
|
||||||
|
|
||||||
|
Plotly.extendTraces('MV07-plot', {
|
||||||
|
x: [[t], [t]],
|
||||||
|
y: [[puData.MV07_sp], [puData.MV07]]
|
||||||
|
}, [0, 1], maxPoints);
|
||||||
|
|
||||||
|
Plotly.extendTraces('MV02-plot', {
|
||||||
|
x: [[t], [t]],
|
||||||
|
y: [[puData.MV02_sp], [puData.MV02]]
|
||||||
|
}, [0, 1], maxPoints);
|
||||||
|
|
||||||
|
Plotly.extendTraces('MV03-plot', {
|
||||||
|
x: [[t], [t]],
|
||||||
|
y: [[puData.MV03_sp], [puData.MV03]]
|
||||||
|
}, [0, 1], maxPoints);
|
||||||
|
|
||||||
|
Plotly.extendTraces('MV04_sp-05-plot', {
|
||||||
|
x: [[t], [t], [t], [t]],
|
||||||
|
y: [[puData.MV04_sp], [puData.MV04], [puData.MV05_sp], [puData.MV05]]
|
||||||
|
}, [0, 1, 2, 3], maxPoints);
|
||||||
|
|
||||||
|
Plotly.extendTraces('MV06-plot', {
|
||||||
|
x: [[t], [t]],
|
||||||
|
y: [[puData.MV06_sp], [puData.MV06]]
|
||||||
|
}, [0, 1], maxPoints);
|
||||||
|
|
||||||
|
|
||||||
|
Plotly.extendTraces('MV08-plot', {
|
||||||
|
x: [[t], [t]],
|
||||||
|
y: [[puData.MV08_sp], [puData.MV08]]
|
||||||
|
}, [0, 1], maxPoints);
|
||||||
|
|
||||||
|
Plotly.extendTraces('pump-plot', {
|
||||||
|
x: [[t]],
|
||||||
|
y: [[puData.Pump_sp]]
|
||||||
|
}, [0], maxPoints);
|
||||||
|
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Error updating plots:", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchPUStatus() {
|
||||||
|
try {
|
||||||
|
const res = await fetch("/api/pu_status");
|
||||||
|
const data = await res.json();
|
||||||
|
const status = data[`PU${puNumber}`] || "Unknown";
|
||||||
|
document.getElementById("currentStatus").textContent = status;
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Error fetching PU status:", e);
|
||||||
|
document.getElementById("currentStatus").textContent = "Error fetching status";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function initPlots() {
|
||||||
|
const time0 = [new Date()];
|
||||||
|
|
||||||
|
Plotly.newPlot('flow-plot-1', [
|
||||||
|
{ x: time0, y: [0], name: 'Qperm', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Qdilute', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Qperm_sp', mode: 'lines', line: { dash: 'dash', color: 'red' } }
|
||||||
|
], {
|
||||||
|
title: 'Qperm and Qdilute',
|
||||||
|
xaxis: { type: 'date' },
|
||||||
|
yaxis: { title: 'Flow (L/h)' }
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
Plotly.newPlot('flow-plot-2', [
|
||||||
|
{ x: time0, y: [0], name: 'Qdrain', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Qrecirc', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'QSkid', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'QdrainEDI', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Qconso', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Qdrain_sp', mode: 'lines' },
|
||||||
|
], {
|
||||||
|
title: 'Other Flows', xaxis: { type: 'date' }, yaxis: { title: 'Flow (L/h)' }
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('pressure-plot-1', [
|
||||||
|
{ x: time0, y: [0], name: 'Pro', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Pretentate', mode: 'lines' },
|
||||||
|
], {
|
||||||
|
title: 'Pro and Pretentate',
|
||||||
|
xaxis: { type: 'date' },
|
||||||
|
yaxis: { title: 'Pressure (bar)' }
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('pressure-plot-2', [
|
||||||
|
{ x: time0, y: [0], name: 'Pdilute', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Ploop_sp', mode: 'lines', line: { dash: 'dash', color: 'red' } }
|
||||||
|
], {
|
||||||
|
title: 'Pdilute Pressure', xaxis: { type: 'date' }, yaxis: { title: 'Pressure (bar)' }
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('conductivity-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'Cfeed', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Cperm', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'Cdilute', mode: 'lines' }
|
||||||
|
], {
|
||||||
|
title: 'Conductivity Measurements',
|
||||||
|
xaxis: { type: 'date' },
|
||||||
|
yaxis: { title: 'Conductivity (µS/cm)' }
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
Plotly.newPlot('MV02-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'MV02_sp', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'MV02', mode: 'lines' }
|
||||||
|
], {
|
||||||
|
title: 'MV02: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: {}
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('MV03-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'MV03_sp', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'MV03', mode: 'lines' }
|
||||||
|
], {
|
||||||
|
title: 'MV03: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: {}
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('MV04_sp-05-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'MV04_sp', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'MV04', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'MV05_sp', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'MV05', mode: 'lines' }
|
||||||
|
], {
|
||||||
|
title: 'MV04 & MV05: Setpoints and Actuals', xaxis: { type: 'date' }, yaxis: { range: [0, 100] }
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('MV06-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'MV06_sp', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'MV06', mode: 'lines' }
|
||||||
|
], {
|
||||||
|
title: 'MV06: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: {}
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('MV07-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'MV07_sp', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'MV07', mode: 'lines' }
|
||||||
|
], {
|
||||||
|
title: 'MV07: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: {}
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('MV08-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'MV08_sp', mode: 'lines' },
|
||||||
|
{ x: time0, y: [0], name: 'MV08', mode: 'lines' }
|
||||||
|
], {
|
||||||
|
title: 'MV08: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: { range: [0, 100] }
|
||||||
|
});
|
||||||
|
|
||||||
|
Plotly.newPlot('pump-plot', [
|
||||||
|
{ x: time0, y: [0], name: 'Pump_sp', mode: 'lines' },
|
||||||
|
], {
|
||||||
|
title: 'Pump: Setpoint ', xaxis: { type: 'date' }, yaxis: { range: [0, 100] }
|
||||||
|
});
|
||||||
|
|
||||||
|
setInterval(updatePlots, 500);
|
||||||
|
}
|
||||||
|
|
||||||
|
window.onload = function () {
|
||||||
|
initPlots();
|
||||||
|
fetchPUStatus();
|
||||||
|
setInterval(fetchPUStatus, 1000);
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
|
|
@ -14,10 +14,27 @@
|
||||||
h1 {
|
h1 {
|
||||||
text-align: center;
|
text-align: center;
|
||||||
}
|
}
|
||||||
|
.status-container {
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 20px;
|
||||||
|
margin: 10px 0;
|
||||||
|
padding: 10px;
|
||||||
|
background-color: #f5f5f5;
|
||||||
|
border-radius: 8px;
|
||||||
|
}
|
||||||
|
.status-box {
|
||||||
|
padding: 8px 15px;
|
||||||
|
border-radius: 5px;
|
||||||
|
background-color: #e0e0e0;
|
||||||
|
font-weight: bold;
|
||||||
|
min-width: 100px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
.plot-container {
|
.plot-container {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
gap: 10px;
|
gap: 5px;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
}
|
}
|
||||||
.plot {
|
.plot {
|
||||||
|
|
@ -28,74 +45,82 @@
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<h1>Multi-PU Monitoring Dashboard</h1>
|
<h1>Multi-PU Monitoring Dashboard</h1>
|
||||||
|
|
||||||
|
<!-- Statuses for each PU -->
|
||||||
|
<div class="status-container" id="statusContainer">
|
||||||
|
<div id="PU1-status" class="status-box">PU1: Loading...</div>
|
||||||
|
<div id="PU2-status" class="status-box">PU2: Loading...</div>
|
||||||
|
<div id="PU3-status" class="status-box">PU3: Loading...</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="plot-container">
|
<div class="plot-container">
|
||||||
<div id="Qperm-plot" class="plot"></div>
|
<div id="Qperm-plot" class="plot"></div>
|
||||||
<div id="Pdilute-plot" class="plot"></div>
|
<div id="Pdilute-plot" class="plot"></div>
|
||||||
<div id="Pro-plot" class="plot"></div>
|
<div id="Pro-plot" class="plot"></div>
|
||||||
<div id="Qdilute-plot" class="plot"></div>
|
<div id="Qdilute-plot" class="plot"></div>
|
||||||
<div id="Qdrain-plot" class="plot"></div>
|
<div id="Qdrain-plot" class="plot"></div>
|
||||||
|
<div id="Cdilute-plot" class="plot"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
const time0 = [new Date()];
|
const time0 = [new Date()];
|
||||||
const zero = [0];
|
const zero = [0];
|
||||||
const maxPoints = 200;
|
const maxPoints = 100;
|
||||||
const puList = ['PU_1', 'PU_2', 'PU_3'];
|
const puList = ['PU_1', 'PU_2', 'PU_3'];
|
||||||
|
const windowMs = 30 * 1000; // 30 seconds
|
||||||
|
|
||||||
const plots = [
|
const plots = [
|
||||||
{ id: 'Qperm-plot', quantity: 'Qperm', title: 'Qperm per PU', ref: 1200 },
|
{ id: 'Qperm-plot', quantity: 'Qperm', title: 'Qperm per PU', refKey: 'Qperm_sp' },
|
||||||
{ id: 'Qdilute-plot', quantity: 'Qdilute', title: 'Qdilute per PU' },
|
{ id: 'Qdilute-plot', quantity: 'Qdilute', title: 'Qdilute per PU' },
|
||||||
{ id: 'Qdrain-plot', quantity: 'Qdrain', title: 'Qdrain per PU' },
|
{ id: 'Qdrain-plot', quantity: 'Qdrain', title: 'Qdrain per PU' },
|
||||||
{ id: 'Pro-plot', quantity: 'Pro', title: 'Pro per PU' },
|
{ id: 'Pro-plot', quantity: 'Pro', title: 'Pro per PU' },
|
||||||
{ id: 'Pdilute-plot', quantity: 'Pdilute', title: 'Pdilute per PU', ref: 2.5 },
|
{ id: 'Pdilute-plot', quantity: 'Pdilute', title: 'Pdilute per PU' , refKey: 'Ploop_sp'},
|
||||||
|
{ id: 'Cdilute-plot', quantity: 'Cdilute', title: 'Cdilute per PU' },
|
||||||
];
|
];
|
||||||
|
|
||||||
function makeTraces(quantity) {
|
const plotTraceMap = {}; // track trace indices per plot
|
||||||
return puList.map((pu, i) => ({
|
|
||||||
x: time0.slice(),
|
|
||||||
y: zero.slice(),
|
|
||||||
name: pu,
|
|
||||||
mode: 'lines',
|
|
||||||
line: { width: 2 },
|
|
||||||
legendgroup: pu
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
function initAllPlots() {
|
function initAllPlots() {
|
||||||
plots.forEach(plot => {
|
plots.forEach(plot => {
|
||||||
const data = makeTraces(plot.quantity);
|
const data = makeTraces(plot.quantity);
|
||||||
const layout = {
|
plotTraceMap[plot.id] = { pu: [0,1,2], extra: {} }; // base 3 PUs
|
||||||
title: plot.title,
|
|
||||||
xaxis: { title: 'Time', type: 'date' },
|
|
||||||
yaxis: { title: plot.id.includes('P') ? 'Pressure (bar)' : 'Flow (L/h)' },
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add ref line if present
|
if (plot.refKey) {
|
||||||
if (plot.ref !== undefined) {
|
|
||||||
data.push({
|
|
||||||
x: [time0[0], time0[0]],
|
|
||||||
y: [plot.ref, plot.ref],
|
|
||||||
mode: 'lines',
|
|
||||||
line: { dash: 'dash', color: 'red' },
|
|
||||||
name: `Ref ${plot.ref}`,
|
|
||||||
showlegend: true
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add QSkid trace only for Qperm plot
|
|
||||||
if (plot.id === 'Qperm-plot') {
|
|
||||||
data.push({
|
data.push({
|
||||||
x: time0.slice(),
|
x: time0.slice(),
|
||||||
y: zero.slice(),
|
y: [0],
|
||||||
name: 'QSkid',
|
|
||||||
mode: 'lines',
|
mode: 'lines',
|
||||||
line: { color: 'black', width: 2, dash: 'dot' },
|
line: { dash: 'dash', color: 'red' },
|
||||||
legendgroup: 'PatientSkid'
|
name: `${plot.refKey} (PU2)`,
|
||||||
});
|
});
|
||||||
|
plotTraceMap[plot.id].extra.ref = data.length - 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
Plotly.newPlot(plot.id, data, layout);
|
if (plot.id === 'Qperm-plot') {
|
||||||
|
data.push({ x: time0.slice(), y: zero.slice(), name: 'QSkid', mode: 'lines' });
|
||||||
|
plotTraceMap[plot.id].extra.qSkid = data.length - 1;
|
||||||
|
|
||||||
|
data.push({ x: time0.slice(), y: zero.slice(), name: 'Qconso', mode: 'lines' });
|
||||||
|
plotTraceMap[plot.id].extra.qConso = data.length - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (plot.id === 'Qdrain-plot') {
|
||||||
|
data.push({ x: time0.slice(), y: zero.slice(), name: 'QSkid', mode: 'lines' });
|
||||||
|
plotTraceMap[plot.id].extra.qSkid = data.length - 1;
|
||||||
|
|
||||||
|
data.push({ x: time0.slice(), y: zero.slice(), name: 'Qconso', mode: 'lines' });
|
||||||
|
plotTraceMap[plot.id].extra.qConso = data.length - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
Plotly.newPlot(plot.id, data, {
|
||||||
|
title: plot.title,
|
||||||
|
xaxis: { type: 'date' },
|
||||||
|
yaxis: { title: plot.id.includes('P') ? 'Pressure (bar)' : 'Flow (L/h)' }
|
||||||
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async function updateAllPlots() {
|
async function updateAllPlots() {
|
||||||
try {
|
try {
|
||||||
const res = await fetch('/monitor');
|
const res = await fetch('/monitor');
|
||||||
|
|
@ -103,14 +128,15 @@ async function updateAllPlots() {
|
||||||
const allData = await res.json();
|
const allData = await res.json();
|
||||||
const timestamp = new Date();
|
const timestamp = new Date();
|
||||||
|
|
||||||
// SkidData is only fetched once
|
|
||||||
const SkidData = allData["PatientSkid"] || {};
|
const SkidData = allData["PatientSkid"] || {};
|
||||||
|
const DSData = allData["DS"] || {};
|
||||||
|
const pu2Data = allData["PU_2"] || {}; // <--- take ref values from PU_3
|
||||||
|
|
||||||
plots.forEach(plot => {
|
plots.forEach(plot => {
|
||||||
const xUpdates = [];
|
const xUpdates = [];
|
||||||
const yUpdates = [];
|
const yUpdates = [];
|
||||||
|
|
||||||
|
// Extend PU1, PU2, PU3 values
|
||||||
puList.forEach(pu => {
|
puList.forEach(pu => {
|
||||||
const puData = allData[pu] || {};
|
const puData = allData[pu] || {};
|
||||||
const value = puData[plot.quantity];
|
const value = puData[plot.quantity];
|
||||||
|
|
@ -120,30 +146,78 @@ async function updateAllPlots() {
|
||||||
|
|
||||||
Plotly.extendTraces(plot.id, { x: xUpdates, y: yUpdates }, puList.map((_, i) => i), maxPoints);
|
Plotly.extendTraces(plot.id, { x: xUpdates, y: yUpdates }, puList.map((_, i) => i), maxPoints);
|
||||||
|
|
||||||
if (plot.ref !== undefined) {
|
// Update PU2 reference line dynamically
|
||||||
Plotly.extendTraces(plot.id, {
|
Plotly.extendTraces(plot.id,
|
||||||
x: [[timestamp]],
|
{ x: xUpdates, y: yUpdates },
|
||||||
y: [[plot.ref]]
|
plotTraceMap[plot.id].pu,
|
||||||
}, [puList.length], maxPoints); // the ref line is always the last trace
|
maxPoints
|
||||||
|
);
|
||||||
|
|
||||||
|
if (plot.refKey) {
|
||||||
|
const refVal = pu2Data[plot.refKey];
|
||||||
|
Plotly.extendTraces(plot.id,
|
||||||
|
{ x: [[timestamp]], y: [[refVal ?? null]] },
|
||||||
|
[plotTraceMap[plot.id].extra.ref],
|
||||||
|
maxPoints
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extend PatientSkid.QSkid only for Qperm plot
|
|
||||||
if (plot.id === 'Qperm-plot') {
|
if (plot.id === 'Qperm-plot') {
|
||||||
const qSkid = SkidData["QSkid"];
|
const qSkid = SkidData["QSkid"];
|
||||||
const skidX = [[timestamp]];
|
const qConso = DSData["Qconso"];
|
||||||
const skidY = [[qSkid !== undefined ? qSkid : null]];
|
Plotly.extendTraces(plot.id, { x: [[timestamp]], y: [[qSkid ?? null]] }, [plotTraceMap[plot.id].extra.qSkid], maxPoints);
|
||||||
const qSkidTraceIndex = puList.length + (plot.ref !== undefined ? 1 : 0); // last trace index
|
Plotly.extendTraces(plot.id, { x: [[timestamp]], y: [[qConso ?? null]] }, [plotTraceMap[plot.id].extra.qConso], maxPoints);
|
||||||
Plotly.extendTraces(plot.id, { x: skidX, y: skidY }, [qSkidTraceIndex], maxPoints);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (plot.id === 'Qdrain-plot') {
|
||||||
|
const qSkid = SkidData["QSkid"];
|
||||||
|
const qConso = DSData["Qconso"];
|
||||||
|
Plotly.extendTraces(plot.id, { x: [[timestamp]], y: [[qSkid ?? null]] }, [plotTraceMap[plot.id].extra.qSkid], maxPoints);
|
||||||
|
Plotly.extendTraces(plot.id, { x: [[timestamp]], y: [[qConso ?? null]] }, [plotTraceMap[plot.id].extra.qConso], maxPoints);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Sliding window (30s)
|
||||||
|
const layoutUpdate = {
|
||||||
|
'xaxis.range': [new Date(timestamp - windowMs), timestamp]
|
||||||
|
};
|
||||||
|
Plotly.relayout(plot.id, layoutUpdate);
|
||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Failed to update plots:", err);
|
console.error("Failed to update plots:", err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function makeTraces(quantity) {
|
||||||
|
return puList.map((pu) => ({
|
||||||
|
x: time0.slice(),
|
||||||
|
y: zero.slice(),
|
||||||
|
name: pu,
|
||||||
|
mode: 'lines',
|
||||||
|
line: { width: 2 },
|
||||||
|
legendgroup: pu
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function updateStatuses() {
|
||||||
|
try {
|
||||||
|
const res = await fetch("/api/pu_status");
|
||||||
|
const statuses = await res.json();
|
||||||
|
puList.forEach((pu, i) => {
|
||||||
|
const el = document.getElementById(`PU${i+1}-status`);
|
||||||
|
el.textContent = `${pu}: ${statuses[`PU${i+1}`] || "Unknown"}`;
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error fetching PU status:", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
initAllPlots();
|
initAllPlots();
|
||||||
setInterval(updateAllPlots, 1000);
|
setInterval(updateAllPlots, 1000);
|
||||||
|
setInterval(updateStatuses, 1000);
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,12 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "[UPDATE] Pulling latest code..."
|
|
||||||
cd /home/hmi/Desktop/HMI || exit 1
|
|
||||||
git reset --hard HEAD
|
|
||||||
git pull origin main
|
|
||||||
|
|
||||||
echo "[RESTART] Restarting HMI service..."
|
|
||||||
sudo /bin/systemctl restart hmi.service
|
|
||||||
|
|
||||||
echo "[DONE] HMI updated."
|
|
||||||
139
utils/analyze_from_csv.m
Normal file
139
utils/analyze_from_csv.m
Normal file
|
|
@ -0,0 +1,139 @@
|
||||||
|
%% Cellule 1 : Chargement des données
|
||||||
|
filename = 'recording_20250806_155908.csv';
|
||||||
|
opts = detectImportOptions(filename);
|
||||||
|
opts = setvaropts(opts, 'timestamp', 'Type', 'datetime');
|
||||||
|
df = readtable(filename, opts);
|
||||||
|
|
||||||
|
df_PatientSkid = df(strcmp(df.pu, 'PatientSkid'), :);
|
||||||
|
|
||||||
|
%% Cellule 2 : Affichage multi-PU par grandeur
|
||||||
|
reference_lines = struct('Qperm', 1200, 'Pdilute', 2.5);
|
||||||
|
quantities = {'Qperm', 'Qdilute', 'Qdrain', 'Pro', 'Pdilute','MV07_sp'};
|
||||||
|
n_quantities = numel(quantities);
|
||||||
|
pus_all = {'PU_1', 'PU_2', 'PU_3'};
|
||||||
|
|
||||||
|
figure('Name', 'Évolution des grandeurs par PU', 'Position', [100 100 1400 300*n_quantities]);
|
||||||
|
tiledlayout(n_quantities,1)
|
||||||
|
|
||||||
|
for i = 1:n_quantities
|
||||||
|
quantity = quantities{i};
|
||||||
|
nexttile
|
||||||
|
hold on
|
||||||
|
for j = 1:length(pus_all)
|
||||||
|
pu = pus_all{j};
|
||||||
|
df_pu = df(strcmp(df.pu, pu), :);
|
||||||
|
if any(strcmp(df_pu.Properties.VariableNames, quantity))
|
||||||
|
plot(df_pu.timestamp, df_pu.(quantity), 'DisplayName', pu,'LineWidth',1.5);
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
% Lignes de référence
|
||||||
|
if isfield(reference_lines, quantity)
|
||||||
|
yline(reference_lines.(quantity), '--r');
|
||||||
|
end
|
||||||
|
|
||||||
|
if strcmp(quantity, 'Qdilute') && ismember('QSkid', df_PatientSkid.Properties.VariableNames)
|
||||||
|
plot(df_PatientSkid.timestamp, df_PatientSkid.QSkid, 'DisplayName', 'QSkid','LineWidth',1.5);
|
||||||
|
end
|
||||||
|
|
||||||
|
ylabel(quantity)
|
||||||
|
grid on
|
||||||
|
legend('Location', 'northeast')
|
||||||
|
if i == n_quantities
|
||||||
|
xlabel('Timestamp')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
sgtitle('Évolution des grandeurs par PU')
|
||||||
|
|
||||||
|
%% Analyse initiale pour PU_1
|
||||||
|
df_pu_1 = df(strcmp(df.pu, 'PU_1'), :);
|
||||||
|
delta_t = seconds(diff(df_pu_1.timestamp));
|
||||||
|
|
||||||
|
figure('Name','Time between messages','Position',[100 100 1000 400])
|
||||||
|
histogram(delta_t, 10, 'Normalization', 'probability')
|
||||||
|
title("Time between messages for PU\_1")
|
||||||
|
xlabel("Δt (seconds)")
|
||||||
|
ylabel("Probability")
|
||||||
|
grid on
|
||||||
|
|
||||||
|
fprintf("Average time is %.3f seconds\n", mean(delta_t));
|
||||||
|
|
||||||
|
%% Affichage pour tous les PU
|
||||||
|
pus = unique(df.pu);
|
||||||
|
disp("PU disponibles :")
|
||||||
|
disp(pus)
|
||||||
|
|
||||||
|
pus = {'PU_2'}; % Modifier ici si besoin
|
||||||
|
for i = 1:length(pus)
|
||||||
|
pu = pus{i};
|
||||||
|
fprintf('\n--- Data for %s ---\n', pu)
|
||||||
|
plot_pu_data(df, df_PatientSkid, pu);
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
%% Fonction d'affichage PU (similaire à plot_pu_data)
|
||||||
|
function plot_pu_data(df, df_PatientSkid, pu_name)
|
||||||
|
df_pu = df(strcmp(df.pu, pu_name), :);
|
||||||
|
|
||||||
|
% --------- Plot 1: Débits ---------
|
||||||
|
flow_cols = {'Qperm', 'Qdilute', 'Qdrain', 'Qrecirc'};
|
||||||
|
available_flows = intersect(flow_cols, df_pu.Properties.VariableNames);
|
||||||
|
|
||||||
|
if ~isempty(available_flows)
|
||||||
|
figure('Name', [pu_name ' - Débits'])
|
||||||
|
hold on
|
||||||
|
for i = 1:length(available_flows)
|
||||||
|
plot(df_pu.timestamp, df_pu.(available_flows{i}), 'DisplayName', available_flows{i},'LineWidth',1.5);
|
||||||
|
end
|
||||||
|
if ismember('QSkid', df_PatientSkid.Properties.VariableNames)
|
||||||
|
plot(df_PatientSkid.timestamp, df_PatientSkid.QSkid, 'DisplayName', 'QSkid','LineWidth',1.5);
|
||||||
|
end
|
||||||
|
title([pu_name ' - Flow Rates'])
|
||||||
|
xlabel("Timestamp")
|
||||||
|
ylabel("Flow (L/min)")
|
||||||
|
legend('Location','northeast')
|
||||||
|
grid on
|
||||||
|
end
|
||||||
|
|
||||||
|
% --------- Plot 2: Pressions ---------
|
||||||
|
pressure_cols = {'Pro', 'Pdilute', 'Pretentate'};
|
||||||
|
available_pressures = intersect(pressure_cols, df_pu.Properties.VariableNames);
|
||||||
|
|
||||||
|
if ~isempty(available_pressures)
|
||||||
|
figure('Name', [pu_name ' - Pressions'])
|
||||||
|
hold on
|
||||||
|
for i = 1:length(available_pressures)
|
||||||
|
plot(df_pu.timestamp, df_pu.(available_pressures{i}), 'DisplayName', available_pressures{i},'LineWidth',1.5);
|
||||||
|
end
|
||||||
|
title([pu_name ' - Pressures'])
|
||||||
|
xlabel("Timestamp")
|
||||||
|
ylabel("Pressure (bar)")
|
||||||
|
legend('Location','northeast')
|
||||||
|
grid on
|
||||||
|
end
|
||||||
|
|
||||||
|
% --------- Plot 3: Vannes motorisées ---------
|
||||||
|
figure('Name', [pu_name ' - Motor Valve Positions'], 'Position', [100 100 1500 800])
|
||||||
|
tiledlayout(3,3)
|
||||||
|
idx = 1;
|
||||||
|
for mv = 2:8
|
||||||
|
mv_real = sprintf('MV0%d', mv);
|
||||||
|
mv_sp = sprintf('MV0%d_sp', mv);
|
||||||
|
nexttile
|
||||||
|
if ismember(mv_real, df_pu.Properties.VariableNames) && ...
|
||||||
|
ismember(mv_sp, df_pu.Properties.VariableNames)
|
||||||
|
plot(df_pu.timestamp, df_pu.(mv_real), 'b', 'DisplayName', 'Actual','LineWidth',1.5)
|
||||||
|
hold on
|
||||||
|
plot(df_pu.timestamp, df_pu.(mv_sp), '--', 'Color', [1 0.5 0], 'DisplayName', 'Setpoint','LineWidth',1.5)
|
||||||
|
title(mv_real)
|
||||||
|
ylabel("Position (%)")
|
||||||
|
legend
|
||||||
|
grid on
|
||||||
|
else
|
||||||
|
axis off
|
||||||
|
end
|
||||||
|
idx = idx + 1;
|
||||||
|
end
|
||||||
|
sgtitle([pu_name ' - Motor Valve Positions vs Setpoints'])
|
||||||
|
end
|
||||||
|
|
@ -4,12 +4,14 @@
|
||||||
PI_USER="hmi"
|
PI_USER="hmi"
|
||||||
PI_HOST="192.168.1.46"
|
PI_HOST="192.168.1.46"
|
||||||
REMOTE_FOLDER="/home/hmi/Desktop/HMI/recordings"
|
REMOTE_FOLDER="/home/hmi/Desktop/HMI/recordings"
|
||||||
LOCAL_FOLDER="/Users/Etienne/GitHub/NorthStar-HMI"
|
LOCAL_FOLDER="/Users/Etienne/Library/CloudStorage/OneDrive-nehemis/nehemis - 04 Records/HMI_data"
|
||||||
|
|
||||||
echo "Starting folder download from Raspberry Pi"
|
echo "Starting folder download from Raspberry Pi"
|
||||||
|
|
||||||
# Run scp with sshpass
|
# Run scp with sshpass
|
||||||
scp -r "$PI_USER@$PI_HOST:$REMOTE_FOLDER" "$LOCAL_FOLDER"
|
rsync -avz --progress --ignore-existing\
|
||||||
|
"$PI_USER@$PI_HOST:$REMOTE_FOLDER" \
|
||||||
|
"$LOCAL_FOLDER"
|
||||||
|
|
||||||
# Check if scp succeeded
|
# Check if scp succeeded
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
|
|
@ -1,30 +0,0 @@
|
||||||
import canopen
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
class ValveBackend:
|
|
||||||
def __init__(self, eds_file: str, node_id: int = 0x0F):
|
|
||||||
self.eds_file = eds_file
|
|
||||||
self.node_id = node_id
|
|
||||||
self.network = None
|
|
||||||
self.node = None
|
|
||||||
|
|
||||||
def connect(self):
|
|
||||||
try:
|
|
||||||
self.network = canopen.Network()
|
|
||||||
self.network.connect(channel='can0', bustype='socketcan')
|
|
||||||
self.node = canopen.RemoteNode(self.node_id, self.eds_file)
|
|
||||||
self.network.add_node(self.node)
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[VALVE CONNECT ERROR] {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def send_command(self, opening: int):
|
|
||||||
try:
|
|
||||||
if self.node is None:
|
|
||||||
raise RuntimeError("Valve node not initialized")
|
|
||||||
self.node.sdo[0x6000].raw = opening
|
|
||||||
print(f"[VALVE] Opening set to {opening}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[VALVE CMD ERROR] {e}")
|
|
||||||
Loading…
Reference in New Issue
Block a user