Compare commits

...

103 Commits

Author SHA1 Message Date
Etienne Chassaing
a141f56ee5 changed name 2025-08-28 14:34:44 +02:00
Etienne Chassaing
7fd287fd06 2 PUs sequence 2025-08-28 14:30:36 +02:00
Etienne Chassaing
516c4e9517 Cleaned automatic tests 2025-08-28 14:28:55 +02:00
Etienne Chassaing
8924d68958 Merge remote-tracking branch 'origin/main' 2025-08-28 14:28:44 +02:00
Etienne Chassaing
41323eb016 Cleaned automatic tests 2025-08-28 14:28:28 +02:00
e5c4f8a80b refactoring 2025-08-28 11:55:14 +02:00
Etienne Chassaing
fcab8ac473 Some refactoring 2025-08-28 11:51:31 +02:00
c8d892ced3 created a separate patient skid file 2025-08-28 11:49:00 +02:00
cf82c3f6ec automatic test working + auto recording + automatic change of record button 2025-08-28 11:45:00 +02:00
11e8fa6d37 delete eds 2025-08-28 10:53:21 +02:00
5df448841c Changes by etienne 2025-08-28 10:53:21 +02:00
Etienne Chassaing
dd6cc73cf0 Tank level in red inf below limit 2025-08-28 10:48:54 +02:00
Etienne Chassaing
1c603a5cb1 Merge remote-tracking branch 'origin/main' 2025-08-27 16:00:40 +02:00
Etienne Chassaing
186fdbb952 debug lines 2025-08-27 16:00:35 +02:00
09909ec041 Minor debug message fix 2025-08-27 16:00:07 +02:00
Etienne Chassaing
1dc82c238e debug lines 2025-08-27 15:50:49 +02:00
Etienne Chassaing
79c5a8e941 updated port 2025-08-27 15:32:39 +02:00
Etienne Chassaing
28ee78d055 send_command_with_delay is now using http request 2025-08-27 15:28:59 +02:00
Etienne Chassaing
ead7e3b647 Qperm setpoint modified 2025-08-27 15:23:52 +02:00
Etienne Chassaing
3cfa5ef80e Qperm setpoint modified 2025-08-27 15:21:37 +02:00
Etienne Chassaing
81983707c8 Changed delay in test 2025-08-27 15:08:05 +02:00
Etienne Chassaing
73b4eaf861 Adds auto test stop 2025-08-27 15:07:17 +02:00
Etienne Chassaing
d56b3614c4 Adds Qperm sp in auto tests 2025-08-27 14:47:46 +02:00
72bf6351a6 Merge branch 'main' of https://git.nehemis.fr/aniketSaha/NorthStar-HMI 2025-08-27 11:49:21 +02:00
f2b8f54b8e Required changes to push 2025-08-27 11:48:59 +02:00
Etienne Chassaing
6683bd16a5 create requirements.txt 2025-08-21 13:46:57 +02:00
Etienne Chassaing
2a745d035f create requirements.txt 2025-08-21 13:34:20 +02:00
Etienne Chassaing
279a65cdb7 create requirements.txt 2025-08-21 13:33:02 +02:00
Etienne Chassaing
148111c627 solved multi dashboard issuse 2025-08-21 09:10:17 +02:00
Etienne Chassaing
cfcd7e3436 Auto update of setpoints 2025-08-20 15:41:38 +02:00
Etienne Chassaing
3d590d3c37 Auto update of button 2025-08-20 15:17:48 +02:00
Etienne Chassaing
018a3757c3 changes Qperm settings 2025-08-20 15:12:01 +02:00
Etienne Chassaing
2ef90283d6 Automatic PROD MODE html update 2025-08-20 15:10:25 +02:00
Etienne Chassaing
18c34a3334 Automatic PROD MODE + changes Q conso location 2025-08-20 14:58:54 +02:00
Etienne Chassaing
cae91b5f05 Automatic PROD MODE + changes Q conso location 2025-08-20 14:52:23 +02:00
Etienne Chassaing
350125b659 Correct Qconso 2025-08-20 14:30:28 +02:00
Etienne Chassaing
25b3a114bf Correct Qconso 2025-08-20 14:29:50 +02:00
Etienne Chassaing
f9422fd210 Reduce max points 2025-08-20 14:25:11 +02:00
Etienne Chassaing
868aa81c78 Correct Qinlet reading 2025-08-20 14:19:40 +02:00
Etienne Chassaing
370b7797b9 Changes Ploop_sp plot location 2025-08-20 14:13:35 +02:00
Etienne Chassaing
eddedf1b43 Changes Ploop_sp plot location 2025-08-20 13:53:43 +02:00
Etienne Chassaing
ba5e38144e Changes Ploop_sp plot location 2025-08-20 13:49:42 +02:00
Etienne Chassaing
daa92510d3 Default sp to zero 2025-08-20 10:57:27 +02:00
Etienne Chassaing
d9b59c73b8 Changed setpoints of all PUs from PU2 sp 2025-08-20 10:56:07 +02:00
Etienne Chassaing
c7c850129c Adds setpoints to the dashboard 2025-08-20 10:53:17 +02:00
d68b170ccb Changes related to dual pu control 2025-08-20 09:25:21 +02:00
Etienne Chassaing
9dabcc81d0 Adds Qperm setpoint 2025-08-18 10:30:41 +02:00
Etienne Chassaing
4ee303f854 Switch pump to Qdrain_sp 2025-08-14 14:22:54 +02:00
Etienne Chassaing
19b235bfb6 Merge remote-tracking branch 'origin/main' 2025-08-14 14:11:53 +02:00
Etienne Chassaing
115ea2768e Adds pump plot 2025-08-14 14:11:47 +02:00
2446fb7b59 Added serial communication and removed obsolete files 2025-08-14 11:27:20 +02:00
41c8d49d31 corrected patient skid 2025-08-12 15:53:41 +02:00
e8755bd1de :wq
Merge branch 'main' of https://git.nehemis.fr/aniketSaha/NorthStar-HMI
2025-08-12 15:49:47 +02:00
Etienne Chassaing
f808b88f93 Updates readings from patient Skid 2025-08-12 11:42:04 +02:00
117265586c auto turning off of patient skid tested 2025-08-11 13:16:09 +02:00
Etienne Chassaing
ce8ff0a7bf Adds automatic turning off of patient skid 2025-08-11 13:07:14 +02:00
Etienne Chassaing
5bc81789e5 Updates download_recordings with faster protocol 2025-08-11 11:54:23 +02:00
5ac459a0d5 Added the csv reader 2025-08-07 11:04:06 +02:00
365162bd65 Changes to integrate qconso and fix minor exisiting bugs 2025-08-06 15:29:41 +02:00
Etienne Chassaing
158daccb3e Adds debug options in html 2025-08-06 12:05:36 +02:00
Etienne Chassaing
ded0565b10 Adds debug options in html 2025-08-06 12:02:43 +02:00
Etienne Chassaing
72deb7646e Adds debug options in html 2025-08-06 12:01:39 +02:00
Etienne Chassaing
985280cfe9 Qconso reading updated 2025-08-06 11:56:51 +02:00
Etienne Chassaing
f6180386d1 Merge remote-tracking branch 'origin/main' 2025-08-06 11:53:51 +02:00
Etienne Chassaing
34f0dda210 Qconso computation corrected 2025-08-06 11:53:47 +02:00
743aeee130 Changes for monitor pages fix 2025-08-06 11:52:40 +02:00
Etienne Chassaing
9ad18a17c8 Reformatting 2025-08-06 11:13:29 +02:00
400fe40bcd Changes for integrating the docking parameters 2025-08-06 11:03:15 +02:00
Etienne Chassaing
7edb759bd9 Adds DS data reading and monitor page 2025-08-06 10:49:56 +02:00
Etienne Chassaing
5b11f8006c Merge branch 'main' of https://git.nehemis.fr/aniketSaha/NorthStar-HMI 2025-08-06 10:45:50 +02:00
Etienne Chassaing
6aeb2f9d3e Adds DS data reading and monitor page 2025-08-06 10:45:46 +02:00
7d2e11a4ce Added other tpdo variables 2025-08-06 10:44:35 +02:00
Etienne Chassaing
7984e11514 Adds DS data reading 2025-08-06 10:24:00 +02:00
Etienne Chassaing
7a68c14813 Adds DS data reading 2025-08-06 10:23:18 +02:00
5f03efb2cb Added dictionary for docking parameters 2025-08-06 10:22:37 +02:00
6d29fa0059 Added docking board parameters on hmi backend 2025-08-06 10:13:45 +02:00
efd44dbf3e updates to html page 2025-08-05 13:23:03 +02:00
79b9f2d95d PU1 adress corrected 2025-08-05 11:34:38 +02:00
81353c8b1f conductivity reading update and logging of uvicorn set to warning 2025-08-05 11:08:32 +02:00
Etienne Chassaing
9b0daf2d06 Merge remote-tracking branch 'origin/main' 2025-08-05 09:43:28 +02:00
Etienne Chassaing
7b6f4ffe78 Changed rounding of conductivity 2025-08-05 09:43:19 +02:00
45f0c11196 Changes for auto connect and conductivity 2025-08-04 17:24:09 +02:00
Etienne Chassaing
c25a387e8b Connect button auto switch 2025-08-04 17:01:59 +02:00
Etienne Chassaing
9e79f343a5 Adds auto connect 2025-08-04 16:52:17 +02:00
Etienne Chassaing
d311af6da4 Switch port back to 8080 2025-08-04 16:35:31 +02:00
Etienne Chassaing
aef1d1cdfa Mock can update 2025-08-04 16:34:50 +02:00
Etienne Chassaing
d7e5d1e34e Mock can updat and multi conductivity reading 2025-08-04 16:31:06 +02:00
Etienne Chassaing
d53170fbb0 Adds Conductivity plot on monitor pages 2025-08-04 16:24:24 +02:00
6c26d9d6a2 Fixed valve feedback data discrepancy 2025-08-04 16:24:05 +02:00
e8479bd8f1 :wqMerge branch 'main' of https://git.nehemis.fr/aniketSaha/NorthStar-HMI 2025-08-04 15:59:00 +02:00
Etienne Chassaing
9e5e4c2a70 corrected active_PUs logic 2025-08-04 15:58:25 +02:00
ef91ff4426 Merge branch 'main' of https://git.nehemis.fr/aniketSaha/NorthStar-HMI 2025-08-04 15:49:10 +02:00
1aee1c012f Changes for conductivity 2025-08-04 15:48:59 +02:00
Etienne Chassaing
6587851267 Adds conductivity reading on the main panel 2025-08-04 15:47:58 +02:00
Etienne Chassaing
09f9e8feb2 Adds a state for automatic connected PU detection 2025-08-01 10:25:06 +02:00
9102812a6f Fixed subindex of Ploop 2025-07-31 16:28:10 +02:00
Etienne Chassaing
9443f5e598 feat: puts back the MV0i readings and remove recording button from monitor pages 2025-07-30 14:31:34 +02:00
Etienne Chassaing
8f87e6890b python for reading data from recording 2025-07-30 14:27:00 +02:00
19aa551af1 Removed obsolete file 2025-07-26 16:39:20 +02:00
bb39cede93 Removed credentials file from git 2025-07-26 16:37:39 +02:00
fe4a478ced Updated gitignore 2025-07-26 16:36:57 +02:00
feb48c6a19 Removed feedvalve logic (obsolete) and documented the backend class 2025-07-25 11:59:57 +02:00
3db1f96489 Added tpdo reading mechanism for faster data logging 2025-07-24 15:40:06 +02:00
28 changed files with 15232 additions and 3993 deletions

7
.gitignore vendored
View File

@ -1,6 +1,3 @@
__pycache__/main.cpython-311.pyc
recordings/recording_20250716_145535.csv
recordings/recording_20250716_143101.csv
recordings/recording_20250716_143537.csv
recordings/recording_20250716_150700.csv
recordings/recording_20250716_181008.csv
recordings/ *
credentials.json

5
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"python.analysis.extraPaths": [
"./serial"
]
}

View File

@ -7,7 +7,7 @@ class CANBackend:
def __init__(self, eds_file=None):
self.connected = False
def connect(self, node_id: int, eds_path: str) -> bool:
def connect(self) -> bool:
# Placeholder for connection logic
self.connected = True
return True
@ -23,7 +23,7 @@ class CANBackend:
# Placeholder for thermal loop cleaning
pass
def send_state_command(self, state: str, pu_number : int, ploop_setpoint : float):
def send_state_command(self, state: str, pu_number : int, ploop_setpoint : float, qperm_setpoint : float):
# Placeholder for sending mode command
PUs_states[pu_number-1] = {"PU_MODE": state, "ploop_setpoint":ploop_setpoint}
@ -40,7 +40,9 @@ class CANBackend:
"PS2": 6.2,
"PS3": 6.2,
"PS4": 6.2,
"Cond": 1* np.random.random(),
"Conductivity_Feed": 1* np.random.random(),
"Conductivity_Permeate": 1 * np.random.random(),
"Conductivity_Product": 1 * np.random.random(),
"MV02": round(100 * np.random.random(), 2),
"MV02_sp": round(100 * np.random.random(), 2),
"MV03": round(100 * np.random.random(), 2),
@ -64,7 +66,9 @@ class CANBackend:
"PS2": round(10 * np.random.random(), 2),
"PS3": round(10 * np.random.random(), 2),
"PS4": round(10 * np.random.random(), 2),
"Cond": 1* np.random.random(),
"Conductivity_Feed": 1 * np.random.random(),
"Conductivity_Permeate": 1 * np.random.random(),
"Conductivity_Product": 1 * np.random.random(),
"MV02": round(100 * np.random.random(), 2),
"MV02_sp": round(100 * np.random.random(), 2),
"MV03": round(100 * np.random.random(), 2),

153
analysis/read_recording.py Normal file
View File

@ -0,0 +1,153 @@
# Cell 1: Import des bibliothèques
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# Configuration pour des graphiques lisibles
sns.set(style="whitegrid")
plt.rcParams['figure.figsize'] = (14, 6)
# Chargement des données
df = pd.read_csv("../recordings/recording_20250718_112807.csv", parse_dates=["timestamp"])
df_PatientSkid = df[df['pu'] == 'PatientSkid'].copy()
# Cellule finale : Affichage multi-PU par grandeur
import matplotlib.dates as mdates
reference_lines = {
'Qperm': 1200,
'Pdilute': 2.5
}
quantities = ['Qperm', 'Qdilute', 'Qdrain', 'Pro', 'Pdilute','MV07_sp']
n_quantities = len(quantities)
pus_all =pus = ['PU_1','PU_2','PU_3']
fig, axes = plt.subplots(n_quantities, 1, figsize=(14, 3 * n_quantities), sharex=True)
fig.suptitle("Évolution des grandeurs par PU", fontsize=16)
for i, quantity in enumerate(quantities):
ax = axes[i]
for pu in pus_all:
df_pu = df[df['pu'] == pu]
if quantity in df_pu.columns:
ax.plot(df_pu['timestamp'], df_pu[quantity], label=pu)
if quantity in reference_lines:
ax.axhline(reference_lines[quantity], linestyle='--', color='red')
if quantity == 'Qdilute':
ax.plot(df_PatientSkid['timestamp'], df_PatientSkid['QSkid'], label='QSkid')
ax.set_ylabel(quantity)
ax.grid(True)
ax.legend(loc='upper right')
if i == n_quantities - 1:
ax.set_xlabel("Timestamp")
else:
ax.set_xlabel("")
ax.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M:%S"))
plt.tight_layout(rect=[0, 0, 1, 0.96])
plt.show()
# Analyse initiale pour PU_1
df_pu_1 = df[df['pu'] == 'PU_1'].copy()
df_pu_1.sort_values('timestamp', inplace=True)
df_pu_1['delta_t'] = df_pu_1['timestamp'].diff().dt.total_seconds()
df_pu_1 = df_pu_1.iloc[1:] # Supprimer la première valeur NaN
plt.figure('Time between messages',figsize=(10, 4))
sns.histplot(df_pu_1['delta_t'], bins=10,stat='probability')
plt.title("Time between messages for PU_1")
plt.xlabel("Timestamp")
plt.ylabel("Δt (seconds)")
plt.grid(True)
plt.tight_layout()
# plt.show()
print("Average time is ", df_pu_1['delta_t'].mean())
def plot_pu_data(pu_name):
# Filtrage
df_pu = df[df['pu'] == pu_name].copy()
df_pu['timestamp'] = pd.to_datetime(df_pu['timestamp'], errors='coerce')
df_pu = df_pu.dropna(subset=['timestamp'])
# --------- Plot 1: Débits ---------
flow_cols = ['Qperm', 'Qdilute', 'Qdrain', 'Qrecirc']
available_flows = [col for col in flow_cols if col in df_pu.columns]
if available_flows:
fig, ax = plt.subplots(figsize=(10, 4))
for col in available_flows:
ax.plot(df_pu['timestamp'], df_pu[col], label=col)
ax.plot(df_PatientSkid['timestamp'], df_PatientSkid['QSkid'],label='QSkid')
ax.set_title(f'{pu_name} - Flow Rates')
ax.set_xlabel("Timestamp")
ax.set_ylabel("Flow (L/min)")
ax.legend(loc='upper right')
ax.grid(True)
fig.tight_layout()
# plt.show()
# --------- Plot 2: Pressions ---------
pressure_cols = ['Pro', 'Pdilute', 'Pretentate']
available_pressures = [col for col in pressure_cols if col in df_pu.columns]
if available_pressures:
fig, ax = plt.subplots(figsize=(10, 4))
for col in available_pressures:
ax.plot(df_pu['timestamp'], df_pu[col], label=col)
ax.set_title(f'{pu_name} - Pressures')
ax.set_xlabel("Timestamp")
ax.set_ylabel("Pressure (bar)")
ax.legend(loc='upper right')
ax.grid(True)
fig.tight_layout()
# plt.show()
# --------- Plot 3: Motor Valve Positions ---------
mv_indices = range(2, 9) # MV02 à MV08
fig, axes = plt.subplots(3, 3, figsize=(15, 10), sharex=True)
fig.suptitle(f'{pu_name} - Motor Valve Positions vs Setpoints', fontsize=16)
plot_index = 0
for mv in mv_indices:
mv_real = f"MV0{mv}"
mv_sp = f"MV0{mv}_sp"
row, col = divmod(plot_index, 3)
ax = axes[row, col]
if mv_real in df_pu.columns and mv_sp in df_pu.columns:
ax.plot(df_pu['timestamp'], df_pu[mv_real], label='Actual', color='blue')
ax.plot(df_pu['timestamp'], df_pu[mv_sp], label='Setpoint', linestyle='--', color='orange')
ax.set_title(f"{mv_real}")
ax.set_ylabel("Position (%)")
ax.grid(True)
if row == 2:
ax.set_xlabel("Timestamp")
else:
ax.set_visible(False)
plot_index += 1
# Cacher les sous-graphiques inutilisés
while plot_index < 9:
row, col = divmod(plot_index, 3)
axes[row, col].set_visible(False)
plot_index += 1
handles, labels = axes[0][0].get_legend_handles_labels()
fig.legend(handles, labels, loc='upper right')
fig.tight_layout(rect=[0, 0, 1, 0.96])
# plt.show()
# Cell final : Affichage pour tous les PU
pus = df['pu'].dropna().unique()
print("PU disponibles :", pus)
pus = ['PU_1']
for pu in pus:
print(f"\n--- Data for {pu} ---\n")
plot_pu_data(pu)
plt.show()

View File

@ -1,221 +0,0 @@
import threading
import canopen
import time
import os
class CANBackend:
def __init__(self, eds_file =None):
self.network = None
self.nodes = {} # {1: RemoteNode(0x02), 2: RemoteNode(0x03), ...}
self.connected = False
self.lock = threading.Lock()
self.polling_thread = None
self.polling_active = False
self.latest_data = {
1: {}, # PU1
2: {}, # PU2
3: {} # PU3
}
if eds_file is None:
self.eds_path = os.path.join(os.path.dirname(__file__), "eds_file", "processBoard_0.eds")
else:
self.eds_path = eds_file
def connect(self):
try:
self.network = canopen.Network()
self.network.connect(channel='can0', bustype='socketcan')
# PU mapping: PU1->0x02, PU2->0x04, PU3->0x127
node_map = {
1: 0x02,
2: 0x04,
3: 0x127,
}
for pu_number, node_id in node_map.items():
node = canopen.RemoteNode(node_id, self.eds_path)
self.network.add_node(node)
self.nodes[pu_number] = node
self.connected = True
self._start_sdo_polling()
return True
except Exception as e:
print(f"[CONNECT ERROR] {e}")
return False
def shutdown(self):
self.polling_active = False
if self.network:
self.network.disconnect()
self.nodes.clear()
self.connected = False
def _start_sdo_polling(self):
if self.polling_thread and self.polling_thread.is_alive():
return
self.polling_active = True
self.polling_thread = threading.Thread(target=self._sdo_polling_loop, daemon=True)
self.polling_thread.start()
def _sdo_polling_loop(self):
while self.polling_active:
with self.lock:
try:
for pu_number, node in self.nodes.items():
try:
fm1 = node.sdo[0x2004][1].raw
fm2 = node.sdo[0x2004][2].raw
fm3 = node.sdo[0x2004][3].raw
fm4 = node.sdo[0x2004][4].raw
ps1 = node.sdo[0x2005][1].raw
ps2 = node.sdo[0x2005][2].raw
ps3 = node.sdo[0x2005][3].raw
ps4 = node.sdo[0x2005][4].raw
mv02Cmd = node.sdo[0x2014][1].raw
mv03Cmd = node.sdo[0x2012][1].raw
mv04Cmd = node.sdo[0x2019][1].raw
mv05Cmd = node.sdo[0x2020][1].raw
mv06Cmd = node.sdo[0x2021][1].raw
mv07Cmd = node.sdo[0x2015][1].raw
mv08Cmd = node.sdo[0x2022][1].raw
# mv02fb = node.sdo[0x3000][2].raw
# mv03fb = node.sdo[0x3000][3].raw
# mv04fb = node.sdo[0x3000][4].raw
# mv05fb = node.sdo[0x3000][5].raw
# mv06fb = node.sdo[0x3000][6].raw
# mv07fb = node.sdo[0x3000][7].raw
# mv08fb = node.sdo[0x3000][8].raw
self.latest_data[pu_number] = {
"FM1": (fm1 / 100.0) * 60.0,
"FM2": (fm2 / 100.0) * 60.0,
"FM3": (fm3 / 100.0) * 60.0,
"FM4": (fm4 / 100.0) * 60.0,
"PS1": ps1 / 1000.0,
"PS2": ps2 / 1000.0,
"PS3": ps3 / 1000.0,
"PS4": ps4 / 1000.0,
"MV02_sp" : mv02Cmd / 100.0,
"MV03_sp" : mv03Cmd / 100.0,
"MV04_sp" : mv04Cmd / 100.0,
"MV05_sp" : mv05Cmd / 100.0,
"MV06_sp" : mv06Cmd / 100.0,
"MV07_sp" : mv07Cmd / 100.0,
"MV08_sp" : mv08Cmd / 100.0,
# "MV02" : mv02fb,
# "MV03" : mv03fb,
# "MV04" : mv04fb,
# "MV05" : mv05fb,
# "MV06" : mv06fb,
# "MV07" : mv07fb,
# "MV08" : mv08fb,
}
print(f"[PU{pu_number}] FM1: {fm1}, PS1: {ps1}")
except Exception as inner_e:
print(f"[SDO READ ERROR] PU{pu_number}: {inner_e}")
except Exception as outer_e:
print(f"[SDO POLL ERROR] {outer_e}")
time.sleep(1.0)
def get_latest_data(self, pu_number: int):
with self.lock:
return self.latest_data.get(pu_number, {}).copy()
def read_current_state(self, pu_number: int):
try:
node = self.nodes.get(pu_number)
if node is None:
return "Offline"
state_raw = node.sdo[0x2000].raw
return self.decode_state(state_raw)
except Exception as e:
print(f"[PU{pu_number} READ ERROR] {e}")
return "Offline"
def decode_state(self, state_val: int) -> str:
state_map = {
0: "SYSTEM_MODE_INIT",
1: "SYSTEM_MODE_OFF",
2: "SYSTEM_MODE_READY",
3: "SYSTEM_MODE_PRODUCTION",
4: "SYSTEM_MODE_LOW_LOOP_PRESSURE",
5: "SYSTEM_MODE_LOOP_CLEANING",
6: "SYSTEM_MODE_HEATING_RO",
7: "SYSTEM_MODE_RINSING_RO",
8: "SYSTEM_MODE_HEATING_EDI",
9: "SYSTEM_MODE_COOLING_EDI",
10: "SYSTEM_MODE_RO_FLUSH",
11: "SYSTEM_MODE_RO_RINSE",
12: "SYSTEM_MODE_EDI_RINSE",
15: "SYSTEM_MODE_FAIL_SAFE",
16: "SYSTEM_MODE_FIRST_FLUSH",
255: "SYSTEM_MODE_DEFAULT"
}
return state_map.get(state_val, f"UNKNOWN({state_val})")
def send_state_command(self, state: str, pu_number: int, ploop_setpoint: float):
if not self.connected:
raise RuntimeError("CAN not connected")
state_map = {
"IDLE": 1,
"PRE-PRODUCTION": 2,
"PRODUCTION" : 3,
"MAINTENANCE": 8,
"EMERGENCY_STOP": 9,
"FIRST_START": 10
}
if state not in state_map:
raise ValueError(f"Invalid state: {state}")
try:
node = self.nodes.get(pu_number)
if node is None:
raise ValueError(f"PU{pu_number} not connected")
print(f"[DEBUG] Writing state {state_map[state]} to 0x2024:{pu_number}")
node.sdo[0x2024][0x01].raw = state_map[state]
print(f"[DEBUG] Writing ploop_setpoint {ploop_setpoint} to 0x2007")
node.sdo[0x2007].raw = int(ploop_setpoint * 100)
except Exception as e:
print(f"[SDO WRITE ERROR] PU{pu_number}: {e}")
raise
def send_thermal_loop_cleaning(self, mode: str, pu_number: int):
if not self.connected:
raise RuntimeError("CAN not connected")
mode_map = {
"IDLE": 0,
"ACTIVE": 1
}
if mode not in mode_map:
raise ValueError(f"Invalid thermal loop mode: {mode}")
try:
node = self.nodes.get(pu_number)
if node is None:
raise ValueError(f"PU{pu_number} not connected")
print(f"[DEBUG] Sending thermal loop mode {mode} to 0x2024:{pu_number}")
node.sdo[0x2024][pu_number].raw = mode_map[mode]
except Exception as e:
print(f"[THERMAL LOOP ERROR] PU{pu_number}: {e}")
raise

View File

@ -1,4 +0,0 @@
{
"username": "northstarNehemis",
"password": "NehemisNorthStar@2025"
}

6357
eds_file/dockingBoard_0.eds Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,385 +0,0 @@
; EDS file for inletvalveboard - generated by CANopen DeviceDesigner 3.14.2
[FileInfo]
FileName=inletvalveboard.eds
FileVersion=1.0
FileRevision=1.0
EDSVersion=4.0
Description=EDS
CreationTime=12:05PM
CreationDate=04-15-25
ModificationTime=12:05PM
ModificationDate=04-15-25
CreatedBy=Vineeta Gupta
ModifiedBy=Vineeta Gupta
[Comments]
Lines=1
Line1=generated by CANopen DeviceDesigner by emotas
[DeviceInfo]
VendorName=nehemis
VendorNumber=0x319
ProductName=InletValveController
ProductNumber=1234
RevisionNumber=0x1
OrderCode=InletValveController
BaudRate_10=0
BaudRate_20=0
BaudRate_50=0
BaudRate_125=0
BaudRate_250=1
BaudRate_500=0
BaudRate_800=0
BaudRate_1000=0
NrOfRxPDO=0
NrOfTxPDO=1
SimpleBootupSlave=1
SimpleBootupMaster=0
LSS_Supported=0
Granularity=0
DynamicChannelsSupported=0
GroupMessaging=0
[DummyUsage]
Dummy0001=0
Dummy0002=0
Dummy0003=0
Dummy0004=0
Dummy0005=0
Dummy0006=0
Dummy0007=0
[MandatoryObjects]
SupportedObjects=3
1=0x1000
2=0x1001
3=0x1018
[ManufacturerObjects]
SupportedObjects=2
1=0x2001
2=0x3000
[OptionalObjects]
SupportedObjects=13
1=0x1003
2=0x1008
3=0x1014
4=0x1015
5=0x1016
6=0x1017
7=0x1029
8=0x1200
9=0x1800
10=0x1a00
11=0x6000
12=0x6001
13=0x6002
[1000]
ParameterName=Device Type
ObjectType=7
DataType=7
AccessType=ro
PDOMapping=0
DefaultValue=0
[1001]
ParameterName=Error Register
ObjectType=7
DataType=5
AccessType=ro
PDOMapping=0
[1003]
ParameterName=Predefined Error Field
ObjectType=8
SubNumber=2
[1003sub0]
ParameterName=Number of Errors
ObjectType=7
DataType=5
AccessType=rw
PDOMapping=0
DefaultValue=0
[1003sub1]
ParameterName=Standard Error Field
ObjectType=7
DataType=7
AccessType=ro
PDOMapping=0
DefaultValue=0
[1008]
ParameterName=Manufacturer device name
ObjectType=7
DataType=9
AccessType=const
PDOMapping=0
DefaultValue=emotas Slave 1
[1014]
ParameterName=COB ID EMCY
ObjectType=7
DataType=7
AccessType=ro
PDOMapping=0
DefaultValue=$NODEID+0x80
[1015]
ParameterName=Inhibit Time Emergency
ObjectType=7
DataType=6
AccessType=rw
PDOMapping=0
DefaultValue=0x0
[1016]
ParameterName=Consumer Heartbeat Time
ObjectType=8
SubNumber=1
[1016sub0]
ParameterName=Number of entries
ObjectType=7
DataType=5
AccessType=ro
PDOMapping=0
DefaultValue=0
[1017]
ParameterName=Producer Heartbeat Time
ObjectType=7
DataType=6
AccessType=rw
PDOMapping=0
[1018]
ParameterName=Identity Object
ObjectType=9
SubNumber=5
[1018sub0]
ParameterName=Number of entries
ObjectType=7
DataType=5
AccessType=ro
PDOMapping=0
DefaultValue=4
[1018sub1]
ParameterName=Vendor Id
ObjectType=7
DataType=7
AccessType=ro
PDOMapping=0
DefaultValue=0x319
[1018sub2]
ParameterName=Product Code
ObjectType=7
DataType=7
AccessType=ro
PDOMapping=0
DefaultValue=1234
[1018sub3]
ParameterName=Revision number
ObjectType=7
DataType=7
AccessType=ro
PDOMapping=0
DefaultValue=0x1
[1018sub4]
ParameterName=Serial number
ObjectType=7
DataType=7
AccessType=ro
PDOMapping=0
[1029]
ParameterName=Error behaviour
ObjectType=8
SubNumber=3
[1029sub0]
ParameterName=Nr of Error Classes
ObjectType=7
DataType=5
AccessType=ro
PDOMapping=0
DefaultValue=2
[1029sub1]
ParameterName=Communication Error
ObjectType=7
DataType=5
AccessType=rw
PDOMapping=0
DefaultValue=1
[1029sub2]
ParameterName=Specific Error Class
ObjectType=7
DataType=5
AccessType=rw
PDOMapping=0
[1200]
ParameterName=Server SDO Parameter
ObjectType=9
SubNumber=3
[1200sub0]
ParameterName=Number of entries
ObjectType=7
DataType=5
AccessType=ro
PDOMapping=0
DefaultValue=2
[1200sub1]
ParameterName=COB ID Client to Server
ObjectType=7
DataType=7
AccessType=ro
PDOMapping=0
DefaultValue=$NODEID+0x600
[1200sub2]
ParameterName=COB ID Server to Client
ObjectType=7
DataType=7
AccessType=ro
PDOMapping=0
DefaultValue=$NODEID+0x580
[1800]
ParameterName=TPDO communication parameter
ObjectType=9
SubNumber=6
[1800sub0]
ParameterName=Highest sub-index supported
ObjectType=7
DataType=5
AccessType=const
PDOMapping=0
DefaultValue=6
[1800sub1]
ParameterName=COB-ID used by TPDO
ObjectType=7
DataType=7
AccessType=rw
PDOMapping=0
[1800sub2]
ParameterName=Transmission type
ObjectType=7
DataType=5
AccessType=rw
PDOMapping=0
[1800sub3]
ParameterName=Inhibit time
ObjectType=7
DataType=6
AccessType=rw
PDOMapping=0
[1800sub5]
ParameterName=Event timer
ObjectType=7
DataType=6
AccessType=rw
PDOMapping=0
[1800sub6]
ParameterName=SYNC start value
ObjectType=7
DataType=5
AccessType=rw
PDOMapping=0
[1a00]
ParameterName=Transmit PDO Mapping Parameter
ObjectType=9
SubNumber=2
;;This object contains the mapping for the PDO the device is able to transmit.
;;
[1a00sub0]
ParameterName=Highest sub-index supported
ObjectType=7
DataType=5
AccessType=const
PDOMapping=0
DefaultValue=1
[1a00sub1]
ParameterName=Mapping Entry 1
ObjectType=7
DataType=7
AccessType=const
PDOMapping=0
DefaultValue=0x60010008
[2001]
ParameterName=Manufacturer Object
ObjectType=7
DataType=4
AccessType=rw
PDOMapping=1
[3000]
ParameterName=Managed Array
ObjectType=8
SubNumber=3
[3000sub0]
ParameterName=NUmber of Entries
ObjectType=7
DataType=5
AccessType=ro
PDOMapping=0
DefaultValue=2
[3000sub1]
ParameterName=Sub 1
ObjectType=7
DataType=3
AccessType=ro
PDOMapping=1
[3000sub2]
ParameterName=sub 2
ObjectType=7
DataType=3
AccessType=rw
PDOMapping=1
[6000]
ParameterName=Position Set Point
ObjectType=7
DataType=5
AccessType=rw
PDOMapping=0
[6001]
ParameterName=Position Feedback
ObjectType=7
DataType=5
AccessType=rw
PDOMapping=1
[6002]
ParameterName=Motor Current
ObjectType=7
DataType=8
AccessType=rw
PDOMapping=1

File diff suppressed because it is too large Load Diff

Binary file not shown.

Binary file not shown.

377
hardware/classCAN.py Normal file
View File

@ -0,0 +1,377 @@
import threading
import canopen
import can
import time
import os
import logging
class CANBackend:
"""
CANBackend handles CANopen communication with two Process Units (PU1 and PU2).
It listens for TPDOs, tracks real-time data, and sends SDO control commands
such as setting system modes and setpoints.
"""
def __init__(self, eds_file=None):
"""
Initialize the CAN backend.
:param eds_file: Optional path to the EDS file to use for the master node.
"""
self.network = None
self.master_node = None
self.master_node_id = 0x16 # Docking board node ID
self.nodes = {}
self.connected = False
self.lock = threading.Lock()
self.latest_data = {
0: {}, #Docking Parameters
1: {}, # PU1 data
2: {}, # PU2 data
}
# Default EDS file path
self.eds_path = eds_file if eds_file else os.path.join(os.path.dirname(__file__), "eds_file", "dockingBoard_0.eds")
def connect(self):
"""
Connects to the CAN network and sets up the master node.
:return: True if successful, False otherwise.
"""
try:
self.network = canopen.Network()
self.network.connect(channel='can0', bustype='socketcan')
self.master_node = canopen.RemoteNode(self.master_node_id, self.eds_path)
self.network.add_node(self.master_node)
self.master_node.nmt.state = 'OPERATIONAL'
self.nodes[0] = self.master_node
# Start background listener for TPDOs
self.listener_active = True
self.bus = can.interface.Bus(channel='can0', bustype='socketcan')
self.listener_thread = threading.Thread(target=self._can_listener_loop, daemon=True)
self.listener_thread.start()
self.connected = True
return True
except Exception as e:
print(f"[CONNECT ERROR] {e}")
return False
def shutdown(self):
"""
Cleanly shuts down the CAN backend and listener.
"""
self.listener_active = False
if self.network:
self.network.disconnect()
if hasattr(self, 'bus'):
self.bus.shutdown()
self.nodes.clear()
self.connected = False
def _can_listener_loop(self):
"""
Background thread to listen for CAN TPDO messages.
Updates the internal state for PU1 and PU2 based on COB-ID.
"""
while self.listener_active:
msg = self.bus.recv(1.0)
if msg is None:
continue
try:
cob_id = msg.arbitration_id
data = msg.data
with self.lock:
# ========== PU1 COB-IDs ==========
if cob_id == 0x2A6 and len(data) >= 8:
self.latest_data[1].update({
"FM1": int.from_bytes(data[0:2], 'little') / 100.0 * 60.0,
"FM2": int.from_bytes(data[2:4], 'little') / 100.0 * 60.0,
"FM3": int.from_bytes(data[4:6], 'little') / 100.0 * 60.0,
"FM4": int.from_bytes(data[6:8], 'little') / 100.0 * 60.0,
})
elif cob_id == 0x2A7 and len(data) == 6:
self.latest_data[1].update({
"PS1": int.from_bytes(data[0:2], 'little') / 1000.0,
"PS2": int.from_bytes(data[2:4], 'little') / 1000.0,
"PS3": int.from_bytes(data[4:6], 'little') / 1000.0,
})
elif cob_id == 0x2A8 and len(data) >= 8:
self.latest_data[1].update({
"MV02_sp": int.from_bytes(data[0:2], 'little') / 100.0,
"MV03_sp": int.from_bytes(data[2:4], 'little') / 100.0,
"MV04_sp": int.from_bytes(data[4:6], 'little') / 100.0,
"MV05_sp": int.from_bytes(data[6:8], 'little') / 100.0,
})
elif cob_id == 0x2A9 and len(data) >= 8:
self.latest_data[1].update({
"MV06_sp": int.from_bytes(data[0:2], 'little') / 100.0,
"MV07_sp": int.from_bytes(data[2:4], 'little') / 100.0,
"MV08_sp": int.from_bytes(data[4:6], 'little') / 100.0,
"Pump_sp": int.from_bytes(data[6:8], 'little') / 100.0,
})
elif cob_id == 0x2AA and len(data) >= 7:
data = list(data)
self.latest_data[1].update({
"MV02": 100 * data[0] / 255,
"MV03": 100 * data[1] / 255,
"MV04": 100 * data[2] / 255,
"MV05": 100 * data[3] / 255,
"MV06": 100 * data[4] / 255,
"MV07": 100 * data[5] / 255,
"MV08": 100 * data[6] / 255,
})
elif cob_id == 0x2AB and len(data) >= 7:
self.latest_data[1].update({
"PU1_STATE" : data[0],
"Conductivity_Feed" : int.from_bytes(data[1:3], 'little') / 100.0,
"Conductivity_Permeate": int.from_bytes(data[3:5], 'little') / 100.0,
"Conductivity_Product" : int.from_bytes(data[5:7], 'little') / 100.0,
})
# ========== PU2 COB-IDs ==========
elif cob_id == 0x2AD and len(data) >= 8:
self.latest_data[2].update({
"FM1": int.from_bytes(data[0:2], 'little') / 100.0 * 60.0,
"FM2": int.from_bytes(data[2:4], 'little') / 100.0 * 60.0,
"FM3": int.from_bytes(data[4:6], 'little') / 100.0 * 60.0,
"FM4": int.from_bytes(data[6:8], 'little') / 100.0 * 60.0,
})
elif cob_id == 0x2AE and len(data) == 6:
self.latest_data[2].update({
"PS1": int.from_bytes(data[0:2], 'little') / 1000.0,
"PS2": int.from_bytes(data[2:4], 'little') / 1000.0,
"PS3": int.from_bytes(data[4:6], 'little') / 1000.0,
})
elif cob_id == 0x2AF and len(data) >= 8:
self.latest_data[2].update({
"MV02_sp": int.from_bytes(data[0:2], 'little') / 100.0,
"MV03_sp": int.from_bytes(data[2:4], 'little') / 100.0,
"MV04_sp": int.from_bytes(data[4:6], 'little') / 100.0,
"MV05_sp": int.from_bytes(data[6:8], 'little') / 100.0,
})
elif cob_id == 0x2B0 and len(data) >= 8:
self.latest_data[2].update({
"MV06_sp": int.from_bytes(data[0:2], 'little') / 100.0,
"MV07_sp": int.from_bytes(data[2:4], 'little') / 100.0,
"MV08_sp": int.from_bytes(data[4:6], 'little') / 100.0,
"Qdrain_sp": int.from_bytes(data[6:8], 'little') / 100.0,
})
elif cob_id == 0x2B1 and len(data) >= 7:
data = list(data)
self.latest_data[2].update({
"MV02": 100 * data[0] / 255,
"MV03": 100 * data[1] / 255,
"MV04": 100 * data[2] / 255,
"MV05": 100 * data[3] / 255,
"MV06": 100 * data[4] / 255,
"MV07": 100 * data[5] / 255,
"MV08": 100 * data[6] / 255,
})
# elif cob_id == 0x1B9 and len(data) >= 6:
# self.latest_data[1].update({
# "Conductivity_Feed": int.from_bytes(data[0:2], 'little'),
# "Conductivity_Permeate": int.from_bytes(data[2:4], 'little'),
# "Conductivity_Product": int.from_bytes(data[4:6], 'little'),
# })
# elif cob_id == 0x1BA and len(data) >= 6:
# self.latest_data[1].update({
# "Temperature_Feed": int.from_bytes(data[0:2], 'little'),
# "Temperature_Permeate": int.from_bytes(data[2:4], 'little'),
# "Temperature_Product": int.from_bytes(data[4:6], 'little'),
# })
# elif cob_id == 0x2B2 and len(data) >= 1:
# self.latest_data[2]["PU2_STATE"] = data[0]
elif cob_id == 0x2B2 and len(data) >= 7:
self.latest_data[2].update({
"PU2_STATE" : data[0],
"Conductivity_Feed" : int.from_bytes(data[1:3], 'little') / 100.0,
"Conductivity_Permeate": int.from_bytes(data[3:5], 'little') / 100.0,
"Conductivity_Product" : int.from_bytes(data[5:7], 'little') / 100.0,
})
# ========== Docking Parameters ==========
elif cob_id == 0x2AC and len(data) >= 8:
self.latest_data[0].update({
"Ploop_sp": int.from_bytes(data[0:2], 'little') / 1.0,
"Pdilute_sp": int.from_bytes(data[2:4], 'little') / 1.0,
"Qdrain_sp": int.from_bytes(data[4:6], 'little') / 1.0,
"TankLevel": int.from_bytes(data[6:8], 'little') / 1.0,
})
elif cob_id == 0x2B3 and len(data) >= 8:
self.latest_data[0].update({
"Inlet_flow": int.from_bytes(data[0:2], 'little') / 10.0,
"Outlet_flow": int.from_bytes(data[2:4], 'little') / 10.0,
"Pressure_perm": int.from_bytes(data[4:6], 'little') / 1000.0,
"Pressure_ro": int.from_bytes(data[6:8], 'little') / 1000.0,
})
# # ========== PU1 DRIFT CHECK ==========
# if cob_id in (0x2A6, 0x2A8): # FM1 or MV03_sp updates for PU1
# mv03_sp = self.latest_data[1].get("MV03_sp")
# qdrain = self.latest_data[1].get("FM1")
# if mv03_sp is not None and qdrain is not None:
# if mv03_sp <= 0 or qdrain <= 0:
# print(f"🔇 Skipping PU1 drift check (idle) → MV03_sp: {mv03_sp:.2f}, Qdrain: {qdrain:.2f}")
# elif detect_mv03_drift(mv03_sp, qdrain):
# print(f"⚠️ Drift detected on PU1 → MV03_sp: {mv03_sp:.2f} vs Qdrain: {qdrain:.2f}")
# else:
# print(f"✅ No drift on PU1 → MV03_sp: {mv03_sp:.2f} vs Qdrain: {qdrain:.2f}")
# # ========== PU2 DRIFT CHECK ==========
# if cob_id in (0x2AD, 0x2AF): # FM1 or MV03_sp updates for PU2
# mv03_sp = self.latest_data[2].get("MV03_sp")
# qdrain = self.latest_data[2].get("FM1")
# if mv03_sp is not None and qdrain is not None:
# if mv03_sp <= 0 or qdrain <= 0:
# print(f"🔇 Skipping PU2 drift check (idle) → MV03_sp: {mv03_sp:.2f}, Qdrain: {qdrain:.2f}")
# elif detect_mv03_drift(mv03_sp, qdrain):
# print(f"⚠️ Drift detected on PU2 → MV03_sp: {mv03_sp:.2f} vs Qdrain: {qdrain:.2f}")
# else:
# print(f"✅ No drift on PU2 → MV03_sp: {mv03_sp:.2f} vs Qdrain: {qdrain:.2f}")
except Exception as e:
print(f"[TPDO PARSE ERROR] {e}")
def get_latest_data(self, pu_number: int):
"""
Retrieve the latest real-time data for the given PU.
:param pu_number: 1 or 2
:return: Dictionary of flow, pressure, valve data
"""
with self.lock:
return self.latest_data.get(pu_number, {}).copy()
def read_current_state(self, pu_number: int):
"""
Get the system mode (decoded string) of the given PU.
:param pu_number: 1 or 2
:return: State name or "Offline"
"""
state_val = self.latest_data.get(pu_number, {}).get(f"PU{pu_number}_STATE")
return self.decode_state(state_val) if state_val is not None else "Offline"
def decode_state(self, state_val: int) -> str:
"""
Convert system state integer to human-readable label.
:param state_val: Integer value from TPDO
:return: String state name
"""
state_map = {
0: "SYSTEM_MODE_INIT",
1: "SYSTEM_MODE_OFF",
2: "SYSTEM_MODE_READY",
3: "SYSTEM_MODE_PRODUCTION",
4: "SYSTEM_MODE_LOW_LOOP_PRESSURE",
5: "SYSTEM_MODE_LOOP_CLEANING",
6: "SYSTEM_MODE_HEATING_RO",
7: "SYSTEM_MODE_RINSING_RO",
8: "SYSTEM_MODE_HEATING_EDI",
9: "SYSTEM_MODE_COOLING_EDI",
10: "SYSTEM_MODE_RO_FLUSH",
11: "SYSTEM_MODE_RO_RINSE",
12: "SYSTEM_MODE_EDI_RINSE",
15: "SYSTEM_MODE_FAIL_SAFE",
16: "SYSTEM_MODE_FIRST_FLUSH",
255: "SYSTEM_MODE_DEFAULT"
}
return state_map.get(state_val, f"UNKNOWN({state_val})")
def send_state_command(self, state: str, pu_number: int, ploop_setpoint: float, qperm_setpoint : float): # TODO : use qperm_setpoint
"""
Send the PU state and pressure loop setpoint to the master node.
:param state: State string (e.g., "PRODUCTION")
:param pu_number: PU1 or PU2
:param ploop_setpoint: Float setpoint in bar (will be scaled)
"""
if not self.connected:
raise RuntimeError("CAN not connected")
state_map = {
"IDLE": 1,
"PRE-PRODUCTION": 2,
"PRODUCTION": 3,
"MAINTENANCE": 8,
"EMERGENCY_STOP": 9,
"FIRST_START": 10
}
if state not in state_map:
raise ValueError(f"Invalid state: {state}")
try:
master_node = self.nodes.get(0)
if master_node is None:
raise ValueError("Master node not connected")
state_index = 0x3000
setpoint_index = 0x3001
print(f"[DEBUG] Writing state {state_map[state]} to master OD 0x{state_index:04X}:{pu_number:02X}")
master_node.sdo[state_index][pu_number].raw = state_map[state] & 0xFF
print(f"[DEBUG] Writing ploop_setpoint {ploop_setpoint} to master OD 0x{setpoint_index:04X}:{pu_number:02X}")
master_node.sdo[setpoint_index][1].raw = int(ploop_setpoint * 100)
print(f"[DEBUG] Writing qperm_setpoint {qperm_setpoint} to master OD 0x{setpoint_index:04X}:{pu_number:02X}")
master_node.sdo[setpoint_index][5].raw = int(qperm_setpoint)
except Exception as e:
print(f"[MASTER SDO WRITE ERROR] PU{pu_number}: {e}")
raise
def send_thermal_loop_cleaning(self, mode: str, pu_number: int):
"""
Activate or deactivate thermal loop cleaning mode.
:param mode: "IDLE" or "ACTIVE"
:param pu_number: PU1 or PU2
"""
if not self.connected:
raise RuntimeError("CAN not connected")
mode_map = {
"IDLE": 0,
"ACTIVE": 1
}
if mode not in mode_map:
raise ValueError(f"Invalid thermal loop mode: {mode}")
try:
node = self.nodes.get(pu_number)
if node is None:
raise ValueError(f"PU{pu_number} not connected")
print(f"[DEBUG] Sending thermal loop mode {mode} to 0x2024:{pu_number}")
node.sdo[0x2024][pu_number].raw = mode_map[mode]
except Exception as e:
print(f"[THERMAL LOOP ERROR] PU{pu_number}: {e}")
raise

File diff suppressed because it is too large Load Diff

30
hardware/patient_skid.py Normal file
View File

@ -0,0 +1,30 @@
import httpx
import logging
def handle_patient_skid_for_idle() -> None:
"""Send the special commands to patient skid when entering IDLE."""
try:
url = "http://192.168.1.28:8000/stop_test"
response = httpx.get(url, timeout=1.0)
logging.info(f"Stopping test on Patient Skid: {response.status_code}")
url = "http://192.168.1.28:8000/close_valves"
response = httpx.get(url, timeout=1.0)
logging.info(f"Closing valves on Patient Skid: {response.status_code}")
except Exception as e:
logging.error(f"Error handling patient skid for IDLE: {e}")
raise
def set_patient_skid_users(count: int = 0):
try:
url = f"http://192.168.1.28:8000/set_users/{count}"
response = httpx.get(url, timeout=5.0)
response_2 = httpx.get("http://192.168.1.28:8000/start_defined_test", timeout=5.0)
if response.status_code == 200:
return {"status": "success", "detail": response.json()}
else:
raise HTTPException(status_code=502, detail=f"Remote server error: {response.text}")
except httpx.RequestError as e:
raise HTTPException(status_code=500, detail=f"Request to external server failed: {str(e)}")

510
main.py
View File

@ -1,47 +1,44 @@
from fastapi import FastAPI, HTTPException, Query, Form, Depends
from fastapi import FastAPI, HTTPException, Form
from fastapi.staticfiles import StaticFiles
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
import logging
import os
from fastapi import Request, APIRouter
import platform
from fastapi.templating import (
Jinja2Templates,
) # pip install fastapi uvicorn jinja2 python-multipart passlib
from fastapi.templating import Jinja2Templates
from starlette.middleware.sessions import SessionMiddleware
from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.status import HTTP_302_FOUND
import json
from pathlib import Path
from typing import Optional, Dict, Any
from typing import Dict, Any
from fastapi import Query
import asyncio
import datetime
from valveBackend import ValveBackend
import csv
from collections import deque
import numpy as np
import aiohttp
import httpx
from hardware.patient_skid import handle_patient_skid_for_idle, set_patient_skid_users
from serial_manager import SerialConfig, SerialStore, SerialReader
from protocol_decoder import decode_frames
from serial_csv_logger import SerialCsvLogger # <-- CSV logger
if platform.system() in ["Darwin"]: # macOS or Windows
from MockCAN import CANBackend
logging.basicConfig(level=logging.INFO)
else:
from hardware.classCAN import CANBackend # Your real backend
logging.basicConfig(level=logging.INFO)
else:
from classCAN import CANBackend # Your real backend
logging.basicConfig(level=logging.ERROR)
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
app = FastAPI()
app.add_middleware(SessionMiddleware, secret_key="your_super_secret_key")
router = APIRouter()
templates = Jinja2Templates(directory="templates")
can_backend = CANBackend()
valve_backend = ValveBackend(
eds_file="/home/hmi/Desktop/HMI/eds_file/inletvalveboard.eds"
)
# Serve static files (HTML, JS, CSS)
app.mount("/static", StaticFiles(directory="static"), name="static")
@ -52,10 +49,29 @@ latest_data: Dict[str, Any] = {
"PU_1": None,
"PU_2": None,
"PU_3": None,
"DS": None,
"PatientSkid": {"QSkid": 0.0},
}
DEFAULT_FEED_VALVE = 0.0
latest_setpoints: Dict[str, Any] = {
"PU_1": {"Ploop_sp": 0.0, "Qperm_sp": 0.0},
"PU_2": {"Ploop_sp": 0.0, "Qperm_sp": 0.0},
"PU_3": {"Ploop_sp": 0.0, "Qperm_sp": 0.0},
}
active_PUs: list[int] = []
VALID_STATES = {
"IDLE",
"PRE-PRODUCTION",
"PRODUCTION",
"FIRST_START",
"THERMALLOOPCLEANING",
"DISINFECTION",
"SLEEP",
}
# Dictionary to hold running tasks
tasks: dict[str, asyncio.Task] = {}
# RECORDER
recording_flag = False
@ -66,27 +82,31 @@ write_buffer = deque()
flush_interval = 1.0 # flush every 1 second
last_flush_time = datetime.datetime.now()
# ---- Serial intake globals ----
serial_store = SerialStore(capacity=5000)
serial_reader: SerialReader | None = None
serial_csv: SerialCsvLogger | None = None # <-- added
## LOGGING
def format_data(data):
def format_PU_data(data):
return {
"timestamp": datetime.datetime.now().isoformat(),
"Qperm": np.round(data.get("FM2", 0.0), 1),
"Qdilute": np.round(data.get("FM1", 0.0), 1),
"Qdrain": np.round(data.get("FM4", 0.0), 1),
"Qrecirc": np.round(data.get("FM3", 0.0), 1),
"QdrainEDI": np.round(data.get("FM2", 0.0), 1)- np.round(data.get("FM1", 0.0), 1),
"QdrainEDI": np.round(data.get("FM2", 0.0), 1) - np.round(data.get("FM1", 0.0), 1),
"Pro": np.round(data.get("PS2", 0.0), 2),
"Pdilute": np.round(data.get("PS3", 0.0), 2),
"Pretentate": np.round(data.get("PS1", 0.0), 2),
"Conductivity": np.round(data.get("Cond", 0.0), 1),
"Cfeed": data.get("Conductivity_Feed", 0.0),
"Cperm": data.get("Conductivity_Permeate", 0.0),
"Cdilute": data.get("Conductivity_Product", 0.0),
"MV02": np.round(data.get("MV02", 0.0), 1),
"MV02_sp": np.round(data.get("MV02_sp", 0.0), 1),
"MV03": np.round(data.get("MV03", 0.0), 1),
"MV03_sp": np.round(data.get("MV03_sp", 0.0), 1),
"MV04": np.round(data.get("MV05", 0.0), 1),
"MV04": np.round(data.get("MV04", 0.0), 1),
"MV04_sp": np.round(data.get("MV04_sp", 0.0), 1),
"MV05": np.round(data.get("MV05", 0.0), 1),
"MV05_sp": np.round(data.get("MV05_sp", 0.0), 1),
@ -96,12 +116,34 @@ def format_data(data):
"MV07_sp": np.round(data.get("MV07_sp", 0.0), 1),
"MV08": np.round(data.get("MV08", 0.0), 1),
"MV08_sp": np.round(data.get("MV08_sp", 0.0), 1),
"Qdrain_sp" : max(60*np.round(data.get("Qdrain_sp", 0.0), 2),350.0),
}
def format_DS_data(data):
q_conso = max(np.round(data.get("Inlet_flow", 0.0), 1) - np.round(data.get("Outlet_flow", 0.0), 1),0)
return {
"timestamp": datetime.datetime.now().isoformat(),
"Qconso": q_conso ,
"TankLevel": np.round(data.get("TankLevel", 0.0), 2),
"Qinlet": np.round(data.get("Inlet_flow", 0.0), 1),
"Qoutlet": np.round(data.get("Outlet_flow", 0.0), 1),
}
## Fetch setpoints
def update_setpoints(p_loop_setpoint : float, q_perm_setpoint : float, pu : int):
global latest_setpoints
pu_key = "PU_"+str(pu)
latest_setpoints[pu_key]["Ploop_sp"] = p_loop_setpoint
latest_setpoints[pu_key]["Qperm_sp"] = q_perm_setpoint
def format_setpoints(pu: int): # THis is a bit convoluted to pass from an object to another but it works
global latest_setpoints, latest_data
pu_key = "PU_" + str(pu)
latest_data[pu_key]["Ploop_sp"] = latest_setpoints[pu_key]["Ploop_sp"]
latest_data[pu_key]["Qperm_sp"] = latest_setpoints[pu_key]["Qperm_sp"]
# CREDENTIALS
# Load users from JSON file at startup
CREDENTIAL_PATH = Path("credentials.json")
if CREDENTIAL_PATH.exists():
with CREDENTIAL_PATH.open("r") as f:
@ -112,7 +154,6 @@ else:
USERNAME = CREDENTIALS["username"]
PASSWORD = CREDENTIALS["password"]
# ======== LOGIN & SESSION HANDLING ========
def require_login(request: Request):
user = request.session.get("user")
@ -121,12 +162,10 @@ def require_login(request: Request):
raise StarletteHTTPException(status_code=302, detail="Redirect to login")
return user
@app.get("/", response_class=HTMLResponse)
def login_form(request: Request):
return templates.TemplateResponse("login.html", {"request": request})
@app.post("/login")
def login(request: Request, username: str = Form(...), password: str = Form(...)):
if username == USERNAME and password == PASSWORD:
@ -136,160 +175,215 @@ def login(request: Request, username: str = Form(...), password: str = Form(...)
"login.html", {"request": request, "error": "Invalid credentials.json"}
)
@app.get("/logout")
def logout(request: Request):
request.session.clear()
return RedirectResponse("/", status_code=HTTP_302_FOUND)
# ======== PROTECTED INTERFACE / STARTUP-SHUTDOWN ========
@app.on_event("startup")
async def startup_event():
# ----- CSV logger -----
global serial_csv
serial_csv = SerialCsvLogger(out_dir="serial_logs", rotate_daily=True)
# ======== PROTECTED INTERFACE ========
# ----- start the serial reader -----
global serial_reader
cfg = SerialConfig(
port=os.getenv("SERIAL_PORT", "/dev/ttyUSB0"),
baudrate=int(os.getenv("SERIAL_BAUD", "115200")),
csv_log_path=None, # disable the generic CSV inside reader; use segregated logger instead
ring_capacity=int(os.getenv("SERIAL_RING", "5000")),
)
serial_reader = SerialReader(
cfg,
serial_store,
decoder=decode_frames,
on_message=(lambda p: serial_csv.log(p)) # write CSV per message type
)
serial_reader.start()
# ----- your existing tasks -----
asyncio.create_task(update_latest_data())
asyncio.create_task(update_latest_flow())
@app.on_event("shutdown")
def _serial_stop():
if serial_reader:
serial_reader.stop()
if serial_csv:
serial_csv.close()
# ======== PAGES ========
@app.get("/control", response_class=HTMLResponse)
def control_page(request: Request):
can_backend.connect()
if request.session.get("user") != USERNAME:
return RedirectResponse("/", status_code=HTTP_302_FOUND)
return templates.TemplateResponse("control.html", {"request": request})
@app.get("/monitor-page", response_class=HTMLResponse)
@app.get("/monitor-DS", response_class=HTMLResponse)
def monitor_page(request: Request):
with open("static/monitor.html") as f:
with open("static/monitor_DS.html") as f:
return HTMLResponse(f.read())
@app.get("/multi-monitor-page", response_class=HTMLResponse)
@app.get("/monitor-PU", response_class=HTMLResponse)
def monitor_page(request: Request):
with open("static/monitor_PU.html") as f:
return HTMLResponse(f.read())
@app.get("/multi-monitor-PU", response_class=HTMLResponse)
def monitor_page(request: Request):
with open("static/multi_pu_dashboard.html") as f:
return HTMLResponse(f.read())
# ======== SERIAL API ========
@app.get("/serial/messages")
def serial_messages(n: int = 100):
return serial_store.latest(min(max(n, 1), 1000))
@app.get("/serial/stats")
def serial_stats():
return serial_store.stats()
@app.get("/serial/snapshot")
def serial_snapshot():
return serial_store.latest_by_id()
# ======== CAN + BACKEND ROUTES ========
@app.post("/connect_toggle")
def connect_toggle():
logging.info("Toggling CAN connection...")
logging.info(f"Toggling CAN connection, CAN is {can_backend.connected}")
if can_backend.connected:
can_backend.shutdown()
logging.info("Shutting down CAN connection...")
return {"connected": False}
else:
success = can_backend.connect()
try:
valve_backend.connect()
except Exception as e:
print(f"Connection error : {e}")
if not success:
raise HTTPException(status_code=500, detail="Connection failed.")
return {"connected": True}
return {"connected": can_backend.connected}
@app.get("/is_connected")
def is_connected():
return {"connected": can_backend.connected}
@app.post("/command/{state}/pu/{pu_number}")
def send_command(state: str, pu_number: int, ploop_setpoint: float = Query(...)):
global DEFAULT_FEED_VALVE
VALID_STATES = {
"IDLE",
"PRE-PRODUCTION",
"PRODUCTION",
"FIRST_START",
"THERMALLOOPCLEANING",
"DISINFECTION",
"SLEEP",
}
# PU CONTROL
def validate_state(state: str) -> str:
"""Normalize and validate the requested state."""
state = state.upper()
if state not in VALID_STATES:
raise HTTPException(status_code=400, detail=f"Invalid state '{state}'")
return state
logging.info(f"Sending state '{state}' to PU {pu_number}")
if state == "PRE-PRODUCTION":
valve_backend.send_command(70)
elif "IDLE":
valve_backend.send_command(DEFAULT_FEED_VALVE)
def expand_pu_number(pu_number: int) -> list[int]:
"""Temporary rule: if PU = 3 → run on [1, 2]."""
return [pu_number] if pu_number != 3 else [1, 2]
def send_command_to_pu(
pu: int, state: str, ploop_setpoint: float, qperm_setpoint: float
) -> dict:
"""Send a state command + update setpoints for one PU."""
state = validate_state(state)
if state == "IDLE":
handle_patient_skid_for_idle()
update_setpoints(ploop_setpoint, qperm_setpoint, pu)
can_backend.send_state_command(state, pu, ploop_setpoint, qperm_setpoint)
current_state = can_backend.read_current_state(pu)
try:
can_backend.send_state_command(state, pu_number, ploop_setpoint)
current_state = can_backend.read_current_state(pu_number)
return {
"status": "success",
"pu": pu,
"command": state,
"pu": pu_number,
"ploop_setpoint": ploop_setpoint,
"qperm_setpoint": qperm_setpoint,
"current_state": current_state,
}
@app.post("/command/{state}/pu/{pu_number}")
def send_command_endpoint(
state: str,
pu_number: int,
ploop_setpoint: float = Query(...),
qperm_setpoint: float = Query(...),
):
logging.info(f"Sending state '{state}' to PU {pu_number}")
pus = expand_pu_number(pu_number)
try:
results = []
for pu in pus:
result = send_command_to_pu(pu, state, ploop_setpoint, qperm_setpoint)
results.append(result)
return {"status": "success", "results": results}
except Exception as e:
logging.error(str(e))
raise HTTPException(status_code=500, detail=str(e))
## MONITORING
@app.get("/api/pu_status")
def get_pu_status():
global active_PUs, latest_setpoints
states = {
"PU1": can_backend.read_current_state(1),
"PU2": can_backend.read_current_state(2),
"PU3": can_backend.read_current_state(3),
}
logging.info(f"[PU STATUS] {states}")
logging.debug(f"[PU STATUS] {states}")
if states["PU1"] == "SYSTEM_MODE_READY":
send_command_to_pu(state="PRODUCTION", pu_number = 1, ploop_setpoint = latest_setpoints["PU_1"]["Ploop_sp"] , qperm_setpoint=latest_setpoints["PU_1"]["Qperm_sp"])
if states["PU2"] == "SYSTEM_MODE_READY":
send_command_to_pu(state="PRODUCTION", pu_number = 2, ploop_setpoint = latest_setpoints["PU_2"]["Ploop_sp"] , qperm_setpoint=latest_setpoints["PU_2"]["Qperm_sp"])
if states["PU3"] == "SYSTEM_MODE_READY":
send_command_to_pu(state="PRODUCTION", pu_number = 3, ploop_setpoint = latest_setpoints["PU_3"]["Ploop_sp"] , qperm_setpoint=latest_setpoints["PU_3"]["Qperm_sp"])
active_PUs = [
index + 1
for index, (pu, status) in enumerate(states.items())
if status != "Offline"
]
logging.debug(f"[ACTIVE PU] {active_PUs}")
return JSONResponse(content=states)
async def update_latest_data():
global active_PUs
while True:
for pu in [
1,
2,
]: # TODO: REPLACE THIS WITH CONNECTED PUs, IS GET PU STATUS SLOW?
# DS
data = can_backend.get_latest_data(pu_number=0)
latest_data["DS"] = format_DS_data(data)
# PUs
for pu in active_PUs:
data = can_backend.get_latest_data(pu_number=pu)
latest_data[f"PU_{pu}"] = format_data(data)
current_data = latest_data[f"PU_{pu}"]
logging.debug(f"[MONITOR BUFFER] PU{pu}: {current_data}")
# logging.info(f"[MONITOR BUFFER] latest_data: {latest_data}")
latest_data[f"PU_{pu}"] = format_PU_data(data)
format_setpoints(pu)
logging.debug(f"[MONITOR DS BUFFER] latest_data: {latest_data}")
await asyncio.sleep(0.05)
@app.get("/monitor")
async def get_monitor_data(pu_number: Optional[float] = Query(None)):
print(f"pu_number is {pu_number}")
if pu_number is not None:
return latest_data.get(f"PU_{pu_number}", {})
else:
# print(latest_data)
async def get_monitor_data():
return latest_data
@app.on_event("startup")
async def startup_event():
asyncio.create_task(update_latest_data())
asyncio.create_task(update_latest_flow())
@app.get("/can_status")
def can_status():
"""Return current CAN connection status."""
return {"connected": can_backend.connected}
@app.post("/command/feed_valve")
def feedvalve_control(MV01_opening: int = Query(...)):
"""Control MV01 feed valve"""
global DEFAULT_FEED_VALVE
DEFAULT_FEED_VALVE = MV01_opening
valve_backend.send_command(MV01_opening)
logging.info(f"Feed valve opening to {MV01_opening}")
return {"status": "ok"}
# LOCAL RECORDER
@app.post("/start_recording")
async def start_recording():
# --- internal helpers (not endpoints) ---
async def start_recording_internal():
global recording_flag, recording_task, recording_file, recording_writer
if recording_flag:
raise HTTPException(status_code=400, detail="Already recording.")
logging.warning("Recording already in progress.")
return None
now = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"recording_{now}.csv"
@ -297,22 +391,29 @@ async def start_recording():
filepath = os.path.join("recordings", filename)
recording_file = open(filepath, "w", newline="")
fieldnames = ["timestamp", "pu", "QSkid"] + list(format_data({}).keys())
fieldnames_common = ["timestamp", "pu", "QSkid"]
fieldnames_DS = list(format_DS_data({}).keys())
fieldnames_DS.pop(0)
fieldnames_PUs = list(format_PU_data({}).keys())
fieldnames_PUs.pop(0)
fieldnames = fieldnames_common + fieldnames_DS + fieldnames_PUs + ["Qperm_sp", "Ploop_sp"]
recording_writer = csv.DictWriter(recording_file, fieldnames=fieldnames)
recording_writer.writeheader()
recording_flag = True
recording_task = asyncio.create_task(record_data_loop())
logging.info(f"[RECORDING STARTED] File: {filepath}")
return {"status": "recording started", "file": filename}
return filename
@app.post("/stop_recording")
async def stop_recording():
async def stop_recording_internal():
global recording_flag, recording_task, recording_file
if not recording_flag:
raise HTTPException(status_code=400, detail="Not recording.")
logging.warning("No active recording to stop.")
return False
recording_flag = False
if recording_task:
@ -324,8 +425,29 @@ async def stop_recording():
recording_file = None
logging.info("[RECORDING STOPPED]")
return True
# --- API endpoints ---
@app.post("/start_recording")
async def start_recording():
filename = await start_recording_internal()
if not filename:
raise HTTPException(status_code=400, detail="Already recording.")
return {"status": "recording started", "file": filename}
@app.post("/stop_recording")
async def stop_recording():
success = await stop_recording_internal()
if not success:
raise HTTPException(status_code=400, detail="Not recording.")
return {"status": "recording stopped"}
@app.get("/is_recording")
async def is_recording():
"""Return True if recording is on, False otherwise"""
return JSONResponse(content={"recording": recording_flag})
async def record_data_loop():
global recording_writer, recording_file, write_buffer, last_flush_time
@ -334,107 +456,151 @@ async def record_data_loop():
timestamp = datetime.datetime.now().isoformat()
for pu, data in latest_data.items():
if data:
row = {
"timestamp": timestamp,
"pu": pu,
**data
}
row = {"timestamp": timestamp, "pu": pu, **data}
recording_writer.writerow(row)
# Flush every flush_interval seconds
if (
datetime.datetime.now() - last_flush_time
).total_seconds() >= flush_interval:
if (datetime.datetime.now() - last_flush_time).total_seconds() >= flush_interval:
recording_file.flush()
last_flush_time = datetime.datetime.now()
await asyncio.sleep(0.05) # 10 Hz
## AUTOMATIC TESTING
async def send_command_with_delay(state: str, pu: int, delay_s: int = 0, ploop_setpoint: float = 0.0):
async def send_command_with_delay(
state: str,
pu: int,
delay_s: int = 0,
ploop_setpoint: float = 2.5,
qperm_setpoint: float = 1200.0,
):
await asyncio.sleep(delay_s)
logging.info(f"[AUTO TEST] Sending {state} to PU{pu} after {delay_s}s")
can_backend.send_state_command(state, pu, ploop_setpoint)
try:
result = send_command_to_pu(pu, state, ploop_setpoint, qperm_setpoint)
except Exception as e:
logging.error(f"[AUTO TEST] Failed to send {state} to PU{pu}: {e}")
return {"status": "error", "detail": str(e)}
async def set_patients_with_delay(count: int, delay_s: int):
await asyncio.sleep(delay_s)
logging.info(f"[AUTO TEST] Sending {count} patients to patient skid after {delay_s}s")
set_patient_skid_users(count)
@router.post("/test/auto/1")
async def auto_test_pu1(ploop_setpoint: float = Query(0.0)):
pu = 1
logging.info("[AUTO TEST] Starting automatic test for 1 PU")
asyncio.create_task(run_auto_test_pu1(pu, ploop_setpoint))
return {"status": "started", "pu": pu}
@router.post("/test/auto/2")
async def auto_test_pu2(ploop_setpoint: float = Query(0.0)):
logging.info("[AUTO TEST] Starting automatic test for 2 PUs")
asyncio.create_task(run_auto_test_pu2(ploop_setpoint))
return {"status": "started", "pu": [1, 2]}
@router.post("/test/auto/{pu_number}")
async def auto_test(pu_number: int ):
"""
Start automatic test for PU1 or PU2.
"""
global tasks
async def run_auto_test_pu1(pu: int, ploop_setpoint: float):
await send_command_with_delay("PRE-PRODUCTION", pu, delay_s=0, ploop_setpoint=ploop_setpoint)
await send_command_with_delay("PRODUCTION", pu, delay_s=180, ploop_setpoint=ploop_setpoint)
await set_patients_with_delay(5, delay_s=60)
await set_patients_with_delay(10, delay_s=60)
await send_command_with_delay("IDLE", pu, delay_s=60, ploop_setpoint=ploop_setpoint)
logging.info(f"[AUTO TEST] Starting automatic test for PU{pu_number}")
key = f"pu{pu_number}"
if key in tasks and not tasks[key].done():
tasks[key].cancel()
logging.info(f"[AUTO TEST] PU{pu_number} Cancelled")
await start_recording_internal()
logging.info("[AUTO TEST] Recorder started")
if pu_number == 1:
task = asyncio.create_task(run_auto_test_1())
result = {"status": "started", "pu": 1}
elif pu_number == 2:
task = asyncio.create_task(run_auto_test_2())
result = {"status": "started", "pu": [2]}
elif pu_number == 3:
task = asyncio.create_task(run_auto_test_3())
result = {"status": "started", "pu": [2]}
else:
return {"status": "error", "message": "Invalid PU number"}
tasks[key] = task
return result
@router.post("/test/auto/stop/{pu}")
async def stop_auto_test(pu: int):
global tasks
key = f"pu{pu}"
logging.info(f"[AUTO TEST] Stopping {pu}")
await stop_recording_internal()
logging.info("[AUTO TEST] Recorder stopped")
if key in tasks and not tasks[key].done():
tasks[key].cancel()
await send_command_with_delay("IDLE", pu =pu, delay_s=0)
logging.info(f"[AUTO TEST] Test of {key} canceled and PU stopped")
return {"status": "stopped", "pu": pu}
logging.info(f"[AUTO TEST] Stopping {pu} No test Runining")
return {"status": "no task running", "pu": pu}
async def run_auto_test_1(pu: int = 1):
try:
await send_command_with_delay("PRE-PRODUCTION", pu = pu, delay_s=0, ploop_setpoint=2.5, qperm_setpoint=1200.0)
await asyncio.sleep(180) # Starting time of the machine
await set_patients_with_delay(5, delay_s=10)
await set_patients_with_delay(10, delay_s=20)
await set_patients_with_delay(0, delay_s=20)
await send_command_with_delay("IDLE", pu =pu, delay_s=20, ploop_setpoint=2.5, qperm_setpoint=1200.0)
logging.info("[AUTO TEST] Finished PU1 test")
await stop_recording_internal()
logging.info("[AUTO TEST] Recorder stopped")
except asyncio.CancelledError:
logging.info(f"[AUTO TEST] PU 1 task cancelled")
raise
async def run_auto_test_pu2(ploop_setpoint: float):
# Step 1: Run PU1 test
await run_auto_test_pu1(1, ploop_setpoint)
async def run_auto_test_2():
try:
await send_command_with_delay("PRE-PRODUCTION", pu=1, delay_s=0, ploop_setpoint=2.5, qperm_setpoint=1200.0)
await send_command_with_delay("PRE-PRODUCTION", pu=1, delay_s=90, ploop_setpoint=2.5, qperm_setpoint=1200.0)
await asyncio.sleep(90) # Starting time of the machine
await set_patients_with_delay(5, delay_s=10)
await set_patients_with_delay(10, delay_s=40)
await asyncio.sleep(100)
await send_command_with_delay("IDLE", pu=1, delay_s=0, ploop_setpoint=2.5, qperm_setpoint=1200.0)
await send_command_with_delay("IDLE", pu=2, delay_s=10, ploop_setpoint=2.5, qperm_setpoint=1200.0)
# Step 2: PU2 sequence
await send_command_with_delay("PRE-PRODUCTION", 2, delay_s=0, ploop_setpoint=ploop_setpoint)
await send_command_with_delay("PRODUCTION", 2, delay_s=180, ploop_setpoint=ploop_setpoint)
await set_patients_with_delay(15, delay_s=60)
await set_patients_with_delay(0, delay_s=60)
await send_command_with_delay("IDLE", 2, delay_s=60, ploop_setpoint=ploop_setpoint)
await send_command_with_delay("IDLE", 1, delay_s=60, ploop_setpoint=ploop_setpoint)
logging.info("[AUTO TEST] Finished PU1 + PU2 test")
except asyncio.CancelledError:
logging.info(f"[AUTO TEST] PU 2 task cancelled")
# optional cleanup
raise
async def run_auto_test_3():
try:
# Step 1: Run PU1 test
# await run_auto_test_1()
# TODO : TODO
logging.info("[AUTO TEST] Finished PU1 + PU2 test")
except asyncio.CancelledError:
logging.info(f"[AUTO TEST] PU 2 task cancelled")
# optional cleanup
raise
@router.post("/test/auto/3")
async def auto_test_pu3():
# Call the function for PU3 auto test
logging.info("Start auto test of 3 PU")
return {"status": "started", "pu": 3}
# PATIENT SKID HELPERS
async def update_latest_flow():
global active_PUs
async with aiohttp.ClientSession() as session:
while True:
try:
async with session.get("http://192.168.1.28:8000/instant_flow") as resp:
data = await resp.json()
latest_flow = int(data["log"]["flow"])
latest_flow = int(data["log"])
logging.debug(f"Updated flow: {latest_flow}")
latest_data["PatientSkid"]["QSkid"] = latest_flow
except Exception as e:
logging.error(f"Error fetching flow: {e}")
await asyncio.sleep(1.0)
def set_patient_skid_users(count: int = 1):
try:
url = f"http://192.168.1.28:8000/set_users/{count}"
response = httpx.get(url, timeout=5.0)
if response.status_code == 200:
return {"status": "success", "detail": response.json()}
else:
raise HTTPException(status_code=502, detail=f"Remote server error: {response.text}")
except httpx.RequestError as e:
raise HTTPException(status_code=500, detail=f"Request to external server failed: {str(e)}")
app.include_router(router)
if __name__ == "__main__":
import uvicorn
uvicorn.run(
"main:app",
host="127.0.0.1",

94
protocol_decoder.py Normal file
View File

@ -0,0 +1,94 @@
from typing import Dict, Any, List, Tuple
import re
RE_PU_VP = re.compile(r'^P(?P<pu>[1-3])VP$')
RE_PU_CO = re.compile(r'^P(?P<pu>[1-3])CO$')
RE_DOCK_VP = re.compile(r'^D0VP$')
RE_DOCK_CO = re.compile(r'^(D0CO|DOCO)$') # be tolerant
def _to_i(s: str) -> int:
try: return int(s.strip())
except: return 0
def _to_pct(s: str) -> int:
try: return int(s.strip())
except:
try: return int(float(s))
except: return 0
def _to_bool(s: str) -> bool:
return str(s).strip() in ("1","true","True","TRUE")
def _dock_vp(vals: List[str]) -> Dict[str, Any]:
names = ["mv01","mv09","mv10","mv11","mmv01","mmv02","mmv03","sv01","sv02","sv03"]
out: Dict[str, Any] = {}
for k, v in zip(names, vals):
out[k] = _to_bool(v) if k.startswith("sv") else _to_pct(v)
return out
def _dock_co(vals: List[str]) -> Dict[str, Any]:
out: Dict[str, Any] = {}
for name, v in zip(["cs01","cs02"], vals):
q = _to_i(v) # 0.1 µS
out[f"{name}_0p1uS"] = q
out[f"{name}_uS"] = q*0.1
return out
def _pu_vp(pu: int, vals: List[str]) -> Dict[str, Any]:
out: Dict[str, Any] = {"pu": pu}
for k, v in zip(["mv02","mv03","mv04","mv05","mv06","mv07","mv08"], vals):
out[k] = _to_pct(v)
return out
def _pu_co(pu: int, vals: List[str]) -> Dict[str, Any]:
out: Dict[str, Any] = {"pu": pu}
for name, v in zip(["cs03","cs04","cs05"], vals):
q = _to_i(v)
out[f"{name}_0p1uS"] = q
out[f"{name}_uS"] = q*0.1
return out
def decode_frames(buffer: bytes) -> Tuple[List[Tuple[bytes, Dict[str, Any]]], bytes, int]:
msgs: List[Tuple[bytes, Dict[str, Any]]] = []
errors = 0
parts = buffer.split(b"\n")
remaining = parts[-1]
for line in parts[:-1]:
raw = line.strip().rstrip(b"\r")
if not raw: continue
try:
t = raw.decode("utf-8")
fields = [f.strip() for f in t.split(",")]
if len(fields) < 3: raise ValueError("too few fields")
version, msg_id, ts_ms = fields[0], fields[1], fields[2]
data = fields[3:]
parsed: Dict[str, Any] = {"version":version, "msg_id":msg_id, "ts_ms": int(ts_ms)}
if RE_DOCK_VP.match(msg_id):
parsed.update({"src":"dock","type":"valves"})
parsed.update(_dock_vp(data))
elif RE_DOCK_CO.match(msg_id):
parsed.update({"src":"dock","type":"cond"})
parsed.update(_dock_co(data))
else:
m = RE_PU_VP.match(msg_id)
if m:
pu = int(m.group("pu"))
parsed.update({"src":"pu","type":"valves","pu":pu})
parsed.update(_pu_vp(pu, data))
else:
m = RE_PU_CO.match(msg_id)
if m:
pu = int(m.group("pu"))
parsed.update({"src":"pu","type":"cond","pu":pu})
parsed.update(_pu_co(pu, data))
else:
parsed.update({"src":"unknown","type":"raw","data":data})
msgs.append((raw, parsed))
except Exception:
errors += 1
return msgs, remaining, errors

View File

@ -1,4 +1,14 @@
fastapi
uvicorn[standard]
python-can
canopen
aiohttp==3.12.14
canopen==2.3.0
fastapi==0.116.1
httpx==0.28.1
matplotlib==3.10.5
numpy==2.3.2
pandas==2.3.2
pyserial==3.5
python_can==4.5.0
seaborn==0.13.2
starlette==0.47.2
uvicorn==0.35.0
jinja2
itsdangerous

120
serial_csv_logger.py Normal file
View File

@ -0,0 +1,120 @@
# serial_csv_logger.py
import os, csv, datetime, json
from typing import Dict, Any, Tuple, Optional
class SerialCsvLogger:
"""
Writes parsed serial frames to CSV, segregated by message type:
- D0VP_YYYY-MM-DD.csv (Docking valves)
- D0CO_YYYY-MM-DD.csv (Docking conductivity)
- P1VP_YYYY-MM-DD.csv (PU1 valves), P2VP..., P3VP...
- P1CO_YYYY-MM-DD.csv (PU1 conductivity), etc.
- Unknown_YYYY-MM-DD.csv (for anything unmatched)
"""
def __init__(self, out_dir: str = "serial_logs", rotate_daily: bool = True):
self.out_dir = out_dir
self.rotate_daily = rotate_daily
self._writers: Dict[str, Tuple[csv.DictWriter, Any, str]] = {} # key -> (writer, file, date_str)
os.makedirs(self.out_dir, exist_ok=True)
def close(self):
for _, (_, f, _) in self._writers.items():
try: f.close()
except: pass
self._writers.clear()
# ---------- public API ----------
def log(self, parsed: Dict[str, Any]):
msg_id = parsed.get("msg_id", "Unknown")
date_str = datetime.date.today().isoformat() if self.rotate_daily else "all"
key = f"{msg_id}"
# rotate if day changed
if key in self._writers and self._writers[key][2] != date_str:
self._writers[key][1].close()
del self._writers[key]
writer, _, _ = self._ensure_writer(key, msg_id, date_str)
row = self._build_row(msg_id, parsed)
writer.writerow(row)
# ---------- internals ----------
def _ensure_writer(self, key: str, msg_id: str, date_str: str):
if key in self._writers:
return self._writers[key]
fname = f"{msg_id}_{date_str}.csv"
path = os.path.join(self.out_dir, fname)
f = open(path, "a", newline="")
headers = self._headers_for(msg_id)
writer = csv.DictWriter(f, fieldnames=headers)
# write header only if file is empty
if f.tell() == 0:
writer.writeheader()
self._writers[key] = (writer, f, date_str)
return self._writers[key]
def _headers_for(self, msg_id: str):
# Common heads
base = ["ts_iso", "ts_ms", "version", "msg_id"]
if msg_id == "D0VP":
return base + ["mv01","mv09","mv10","mv11","mmv01","mmv02","mmv03","sv01","sv02","sv03"]
if msg_id in ("D0CO", "DOCO"):
# write both scaled (uS) and raw (0.1 uS) for traceability
return base + ["cs01_uS","cs01_0p1uS","cs02_uS","cs02_0p1uS"]
if msg_id.endswith("VP") and len(msg_id) == 4 and msg_id[0] == "P":
# P1VP / P2VP / P3VP
return base + ["pu","mv02","mv03","mv04","mv05","mv06","mv07","mv08"]
if msg_id.endswith("CO") and len(msg_id) == 4 and msg_id[0] == "P":
# P1CO / P2CO / P3CO
return base + ["pu","cs03_uS","cs03_0p1uS","cs04_uS","cs04_0p1uS","cs05_uS","cs05_0p1uS"]
# fallback
return base + ["payload_json"]
def _build_row(self, msg_id: str, p: Dict[str, Any]) -> Dict[str, Any]:
ts_iso = datetime.datetime.fromtimestamp(p.get("ts_ms", 0)/1000.0).isoformat() if "ts_ms" in p else ""
row = {"ts_iso": ts_iso, "ts_ms": p.get("ts_ms", ""), "version": p.get("version",""), "msg_id": msg_id}
if msg_id == "D0VP":
row.update({
"mv01": p.get("mv01"), "mv09": p.get("mv09"), "mv10": p.get("mv10"), "mv11": p.get("mv11"),
"mmv01": p.get("mmv01"), "mmv02": p.get("mmv02"), "mmv03": p.get("mmv03"),
"sv01": p.get("sv01"), "sv02": p.get("sv02"), "sv03": p.get("sv03"),
})
return row
if msg_id in ("D0CO", "DOCO"):
row.update({
"cs01_uS": p.get("cs01_uS"), "cs01_0p1uS": p.get("cs01_0p1uS"),
"cs02_uS": p.get("cs02_uS"), "cs02_0p1uS": p.get("cs02_0p1uS"),
})
return row
if msg_id.endswith("VP") and len(msg_id) == 4 and msg_id[0] == "P":
row.update({
"pu": p.get("pu"),
"mv02": p.get("mv02"), "mv03": p.get("mv03"), "mv04": p.get("mv04"),
"mv05": p.get("mv05"), "mv06": p.get("mv06"), "mv07": p.get("mv07"), "mv08": p.get("mv08"),
})
return row
if msg_id.endswith("CO") and len(msg_id) == 4 and msg_id[0] == "P":
row.update({
"pu": p.get("pu"),
"cs03_uS": p.get("cs03_uS"), "cs03_0p1uS": p.get("cs03_0p1uS"),
"cs04_uS": p.get("cs04_uS"), "cs04_0p1uS": p.get("cs04_0p1uS"),
"cs05_uS": p.get("cs05_uS"), "cs05_0p1uS": p.get("cs05_0p1uS"),
})
return row
# Unknown → keep full payload as JSON for later inspection
pay = {k:v for k,v in p.items() if k not in ("version","msg_id","ts_ms")}
row["payload_json"] = json.dumps(pay, separators=(",",":"))
return row

234
serial_manager.py Normal file
View File

@ -0,0 +1,234 @@
# serial_manager.py
import threading
import time
import csv
from collections import deque
from dataclasses import dataclass
from typing import Any, Callable, Deque, Dict, List, Optional, Tuple
import serial # provided by python3-serial
@dataclass
class SerialConfig:
"""
Configuration for the read-only serial intake.
"""
port: str = "/dev/ttyUSB0"
baudrate: int = 115200
bytesize: int = serial.EIGHTBITS
parity: str = serial.PARITY_NONE
stopbits: int = serial.STOPBITS_ONE
timeout: float = 0.05
rtscts: bool = False
dsrdtr: bool = False
xonxoff: bool = False
ring_capacity: int = 5000
# If set, a single "generic" CSV will be written here (append mode).
# If you want segregated CSVs per message type, leave this as None and
# supply an `on_message` callback that writes where you want.
csv_log_path: Optional[str] = None # e.g. "/home/pi/hmi/serial_log.csv"
class SerialStore:
"""
Thread-safe store for recent parsed messages and intake stats.
Stores parsed dicts as returned by the decoder.
"""
def __init__(self, capacity: int):
self._buf: Deque[Dict[str, Any]] = deque(maxlen=capacity)
self._lock = threading.Lock()
self._stats = {
"frames_in": 0,
"frames_ok": 0,
"frames_bad": 0,
"restarts": 0,
"last_err": "",
}
self._latest_by_id: Dict[str, Dict[str, Any]] = {}
def add(self, msg: Dict[str, Any], ok: bool = True):
with self._lock:
self._buf.append(msg)
self._stats["frames_in"] += 1
if ok:
self._stats["frames_ok"] += 1
else:
self._stats["frames_bad"] += 1
mid = msg.get("msg_id")
if mid:
self._latest_by_id[mid] = msg
def latest(self, n: int = 100) -> List[Dict[str, Any]]:
with self._lock:
return list(self._buf)[-n:]
def latest_by_id(self) -> Dict[str, Dict[str, Any]]:
with self._lock:
return dict(self._latest_by_id)
def stats(self) -> Dict[str, Any]:
with self._lock:
return dict(self._stats)
def set_error(self, err: str):
with self._lock:
self._stats["last_err"] = err
def inc_restart(self):
with self._lock:
self._stats["restarts"] += 1
class SerialReader:
"""
Background read-only serial reader.
Args:
cfg: SerialConfig
store: SerialStore
decoder: function(buffer: bytes) ->
(messages: List[Tuple[raw_frame: bytes, parsed: Dict]], remaining: bytes, errors: int)
on_message: optional callback called for each parsed dict (e.g., segregated CSV logger)
"""
def __init__(
self,
cfg: SerialConfig,
store: SerialStore,
decoder: Callable[[bytes], Tuple[List[Tuple[bytes, Dict[str, Any]]], bytes, int]],
on_message: Optional[Callable[[Dict[str, Any]], None]] = None,
):
self.cfg = cfg
self.store = store
self.decoder = decoder
self.on_message = on_message
self._ser: Optional[serial.Serial] = None
self._th: Optional[threading.Thread] = None
self._stop = threading.Event()
self._buffer = b""
# Optional generic CSV (single file) if cfg.csv_log_path is set
self._csv_file = None
self._csv_writer = None
# ---------- lifecycle ----------
def start(self):
self._stop.clear()
self._open_serial()
self._open_csv()
self._th = threading.Thread(target=self._run, name="SerialReader", daemon=True)
self._th.start()
def stop(self):
self._stop.set()
if self._th and self._th.is_alive():
self._th.join(timeout=2.0)
self._close_serial()
self._close_csv()
# ---------- internals ----------
def _open_serial(self):
try:
self._ser = serial.Serial(
port=self.cfg.port,
baudrate=self.cfg.baudrate,
bytesize=self.cfg.bytesize,
parity=self.cfg.parity,
stopbits=self.cfg.stopbits,
timeout=self.cfg.timeout,
rtscts=self.cfg.rtscts,
dsrdtr=self.cfg.dsrdtr,
xonxoff=self.cfg.xonxoff,
)
except Exception as e:
self.store.set_error(f"Open error: {e}")
self._ser = None
def _close_serial(self):
try:
if self._ser and self._ser.is_open:
self._ser.close()
except Exception:
pass
self._ser = None
def _open_csv(self):
if not self.cfg.csv_log_path:
return
try:
self._csv_file = open(self.cfg.csv_log_path, "a", newline="")
self._csv_writer = csv.writer(self._csv_file)
# Write header only if file is empty (avoid duplicates on restart)
if self._csv_file.tell() == 0:
self._csv_writer.writerow(["ts_ms", "msg_id", "raw_hex", "parsed"])
self._csv_file.flush()
except Exception as e:
self.store.set_error(f"CSV open error: {e}")
self._csv_file = None
self._csv_writer = None
def _close_csv(self):
try:
if self._csv_file:
self._csv_file.close()
except Exception:
pass
self._csv_file = None
self._csv_writer = None
def _log_csv(self, raw: bytes, parsed: Dict[str, Any]):
"""Write to the optional single generic CSV."""
if not self._csv_writer:
return
try:
self._csv_writer.writerow(
[parsed.get("ts_ms"), parsed.get("msg_id"), raw.hex(), parsed]
)
self._csv_file.flush()
except Exception as e:
self.store.set_error(f"CSV write error: {e}")
def _run(self):
backoff = 0.5
while not self._stop.is_set():
if not self._ser or not self._ser.is_open:
# reconnect with exponential backoff (capped)
self._close_serial()
time.sleep(backoff)
self.store.inc_restart()
self._open_serial()
backoff = min(backoff * 1.5, 5.0)
continue
backoff = 0.5
try:
data = self._ser.read(4096) # non-blocking due to timeout
if data:
self._buffer += data
frames, remaining, errors = self.decoder(self._buffer)
self._buffer = remaining
for raw, parsed in frames:
# store
self.store.add(parsed, ok=True)
# optional generic CSV
self._log_csv(raw, parsed)
# optional segregated sink
if self.on_message:
try:
self.on_message(parsed)
except Exception as e:
self.store.set_error(f"CSV sink error: {e}")
# count decode errors
for _ in range(errors):
self.store.add({"error": "decode"}, ok=False)
else:
time.sleep(0.01)
except Exception as e:
self.store.set_error(f"Read/Decode error: {e}")
self._close_serial()
time.sleep(0.5)

View File

@ -1,309 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Live Monitoring Dashboard</title>
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
<style>
body {
font-family: Arial, sans-serif;
margin: 0;
padding: 20px;
}
.plot-container {
display: flex;
flex-wrap: wrap;
justify-content: center;
gap: 20px;
}
.large-plot {
width: 45%;
height: 300px;
}
.small-plot {
width: 30%;
height: 250px;
}
h1 {
text-align: center;
}
#recordButton {
background-color: #ff4444;
color: white;
border: none;
padding: 10px 20px;
font-size: 16px;
cursor: pointer;
border-radius: 5px;
margin: 10px;
}
.status-container {
background-color: #f0f0f0;
padding: 10px;
border-radius: 5px;
margin: 10px auto;
text-align: center;
font-size: 18px;
}
</style>
</head>
<body>
<h1 id="pageTitle">Live Monitoring Dashboard</h1>
<div class="status-container">
<p>Current Status: <span id="currentStatus">Loading...</span></p>
</div>
<button id="recordButton" onclick="toggleRecording()">Record</button>
<div class="plot-container">
<div id="flow-plot-1" class="large-plot"></div>
<div id="pressure-plot-1" class="large-plot"></div>
<div id="flow-plot-2" class="large-plot"></div>
<div id="pressure-plot-2" class="large-plot"></div>
<div id="MV02_sp-plot" class="small-plot"></div>
<div id="MV03_sp-plot" class="small-plot"></div>
<div id="MV04_sp-05-plot" class="small-plot"></div>
<div id="MV06_sp-plot" class="small-plot"></div>
<div id="MV07_sp-plot" class="small-plot"></div>
<div id="MV08_sp-plot" class="small-plot"></div>
</div>
<script>
// Extract PU number from URL
const urlParams = new URLSearchParams(window.location.search);
const puNumber = urlParams.get('pu_number') || '1'; // Default to PU 1 if not specified
document.getElementById('pageTitle').textContent = `Live Monitoring Dashboard - PU ${puNumber}`;
let isRecording = false;
let recordedData = [];
let recordingInterval;
let csvFileName = '';
async function toggleRecording() {
const recordButton = document.getElementById('recordButton');
if (!isRecording) {
isRecording = true;
recordButton.style.backgroundColor = '#ff0000';
recordButton.textContent = 'Stop Recording';
recordedData = [];
csvFileName = `monitoring_data_PU${puNumber}_${new Date().toISOString().replace(/[:.]/g, '-')}.csv`;
startRecording();
} else {
isRecording = false;
recordButton.style.backgroundColor = '#ff4444';
recordButton.textContent = 'Record';
stopRecording();
}
}
function startRecording() {
recordingInterval = setInterval(async () => {
const response = await fetch('/monitor');
if (!response.ok) {
console.error(`HTTP error! status: ${response.status}`);
return;
}
const allData = await response.json();
const puData = allData[`PU_${puNumber}`];
const SkidData = allData[`PatientSkid`];
recordedData.push({
timestamp: new Date().toISOString(),
Qperm: puData.Qperm,
Qdilute: puData.Qdilute,
Qdrain: puData.Qdrain,
Qrecirc: puData.Qrecirc,
QdrainEDI: puData.QdrainEDI,
Pro: puData.Pro,
Pdilute: puData.Pdilute,
Pretentate: puData.Pretentate,
MV02_sp: puData.MV02_sp,
MV03_sp: puData.MV03_sp,
MV04_sp: puData.MV04_sp,
MV05_sp: puData.MV05_sp,
MV06_sp: puData.MV06_sp,
MV07_sp: puData.MV07_sp,
MV08_sp: puData.MV08_sp,
QSkid: SkidData.QSkid,
});
}, 100);
}
async function stopRecording() {
clearInterval(recordingInterval);
if (recordedData.length > 0) {
const csvContent = "data:text/csv;charset=utf-8," +
"Timestamp,Qperm,Qdilute,Qdrain,Qrecirc,QdrainEDI,Pro,Pdilute,Pretentate,MV02_sp,MV03_sp,MV04_sp,MV05_sp,MV06_sp,MV07_sp,MV08_sp,QSkid\n" +
recordedData.map(row =>
`${row.timestamp},${row.Qperm},${row.Qdilute},${row.Qdrain},${row.Qrecirc},${row.QdrainEDI},${row.Pro},${row.Pdilute},${row.Pretentate},${row.MV02_sp},${row.MV03_sp},${row.MV04_sp},${row.MV05_sp},${row.MV06_sp},${row.MV07_sp},${row.MV08_sp},${row.QSkid}`
).join("\n");
const encodedUri = encodeURI(csvContent);
const link = document.createElement("a");
link.setAttribute("href", encodedUri);
link.setAttribute("download", csvFileName);
document.body.appendChild(link);
link.click();
}
}
window.onbeforeunload = function() {
if (isRecording) {
stopRecording();
}
};
const maxPoints = 100;
function getLastMinuteRange() {
const now = new Date();
const oneMinuteAgo = new Date(now.getTime() - 60 * 1000);
return [oneMinuteAgo, now];
}
async function updatePlots() {
try {
const response = await fetch('/monitor');
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
const allData = await response.json();
const puData = allData[`PU_${puNumber}`];
const SkidData = allData[`PatientSkid`];
const timestamp = new Date(puData.timestamp);
Plotly.extendTraces('flow-plot-1', {
x: [[timestamp], [timestamp]],
y: [[puData.Qperm], [puData.Qdilute]]
}, [0, 1], maxPoints);
Plotly.extendTraces('flow-plot-2', {
x: [[timestamp], [timestamp], [timestamp], [timestamp]],
y: [[puData.Qdrain], [puData.Qrecirc], [SkidData.QSkid], [puData.QdrainEDI]]
}, [0, 1, 2, 3], maxPoints);
Plotly.extendTraces('pressure-plot-1', {
x: [[timestamp], [timestamp]],
y: [[puData.Pro], [puData.Pretentate]]
}, [0, 1], maxPoints);
Plotly.extendTraces('pressure-plot-2', {
x: [[timestamp]],
y: [[puData.Pdilute]]
}, [0], maxPoints);
Plotly.extendTraces('MV02_sp-plot', { x: [[timestamp]], y: [[puData.MV02_sp]] }, [0], maxPoints);
Plotly.extendTraces('MV03_sp-plot', { x: [[timestamp]], y: [[puData.MV03_sp]] }, [0], maxPoints);
Plotly.extendTraces('MV04_sp-05-plot', {
x: [[timestamp], [timestamp]],
y: [[puData.MV04_sp], [puData.MV05_sp]]
}, [0, 1], maxPoints);
Plotly.extendTraces('MV06_sp-plot', { x: [[timestamp]], y: [[puData.MV06_sp]] }, [0], maxPoints);
Plotly.extendTraces('MV07_sp-plot', { x: [[timestamp]], y: [[puData.MV07_sp]] }, [0], maxPoints);
Plotly.extendTraces('MV08_sp-plot', { x: [[timestamp]], y: [[puData.MV08_sp]] }, [0], maxPoints);
const range = getLastMinuteRange();
const plotIds = ['flow-plot-1', 'flow-plot-2', 'pressure-plot-1', 'pressure-plot-2', 'MV02_sp-plot', 'MV03_sp-plot', 'MV04_sp-05-plot', 'MV06_sp-plot', 'MV07_sp-plot', 'MV08_sp-plot'];
// plotIds.forEach(id => {
// Plotly.relayout(id, { 'xaxis.range': range });
// });
} catch (error) {
console.error("Error updating plots:", error);
}
}
async function fetchPUStatus() {
try {
const response = await fetch("/api/pu_status");
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
const data = await response.json();
const status = data[`PU${puNumber}`] || "Unknown";
document.getElementById("currentStatus").textContent = status;
} catch (error) {
console.error("Error fetching PU status:", error);
document.getElementById("currentStatus").textContent = "Error fetching status";
}
}
function initPlots() {
const time0 = [new Date()];
Plotly.newPlot('flow-plot-1', [
{ x: time0, y: [0], name: 'Qperm', mode: 'lines', line: { color: 'blue' } },
{ x: time0, y: [0], name: 'Qdilute', mode: 'lines', line: { color: 'green' } }
], {
title: 'Qperm and Qdilute Flow Rates Over Time',
xaxis: { title: 'Time', type: 'date' },
yaxis: { title: 'Flow (L/h)' }
});
Plotly.newPlot('flow-plot-2', [
{ x: time0, y: [0], name: 'Qdrain', mode: 'lines', line: { color: 'red' } },
{ x: time0, y: [0], name: 'Qrecirc', mode: 'lines', line: { color: 'orange' } },
{ x: time0, y: [0], name: 'QSkid', mode: 'lines', line: { color: 'green' } },
{ x: time0, y: [0], name: 'QdrainEDI', mode: 'lines', line: { color: 'blue' } }
], {
title: 'Qdrain, Qrecirc, Qskid and QdrainEDI Flow Rates Over Time',
xaxis: { title: 'Time', type: 'date' },
yaxis: { title: 'Flow (L/h)' }
});
Plotly.newPlot('pressure-plot-1', [
{ x: time0, y: [0], name: 'Pro', mode: 'lines', line: { color: 'purple' } },
{ x: time0, y: [0], name: 'Pretentate', mode: 'lines', line: { color: 'gray' } }
], {
title: 'Pro and Pretentate Pressure Over Time',
xaxis: { title: 'Time', type: 'date' },
yaxis: { title: 'Pressure (bar)' }
});
Plotly.newPlot('pressure-plot-2', [
{ x: time0, y: [0], name: 'Pdilute', mode: 'lines', line: { color: 'teal' } }
], {
title: 'Pdilute Pressure Over Time',
xaxis: { title: 'Time', type: 'date' },
yaxis: { title: 'Pressure (bar)' }
});
Plotly.newPlot('MV02_sp-plot', [{
x: time0, y: [0], name: 'MV02_sp', mode: 'lines'
}], {
title: 'MV02_sp (%)', yaxis: { }, xaxis: { type: 'date' }
});
Plotly.newPlot('MV03_sp-plot', [{
x: time0, y: [0], name: 'MV03_sp', mode: 'lines'
}], {
title: 'MV03_sp (%)', yaxis: { }, xaxis: { type: 'date' }
});
Plotly.newPlot('MV04_sp-05-plot', [
{ x: time0, y: [0], name: 'MV04_sp', mode: 'lines' },
{ x: time0, y: [0], name: 'MV05_sp', mode: 'lines' }
], {
title: 'MV04_sp + MV05_sp (%)', yaxis: { range: [0, 100] }, xaxis: { type: 'date' }
});
Plotly.newPlot('MV06_sp-plot', [{
x: time0, y: [0], name: 'MV06_sp', mode: 'lines'
}], {
title: 'MV06_sp (%)', yaxis: { }, xaxis: { type: 'date' }
});
Plotly.newPlot('MV07_sp-plot', [{
x: time0, y: [0], name: 'MV07_sp', mode: 'lines'
}], {
title: 'MV07_sp (%)', yaxis: { }, xaxis: { type: 'date' }
});
Plotly.newPlot('MV08_sp-plot', [{
x: time0, y: [0], name: 'MV08_sp', mode: 'lines'
}], {
title: 'MV08_sp (%)', yaxis: { range: [0, 100] }, xaxis: { type: 'date' }
});
setInterval(updatePlots, 500);
}
window.onload = function() {
initPlots();
fetchPUStatus();
setInterval(fetchPUStatus, 5000); // Update status every 5 seconds
};
</script>
</body>
</html>

119
static/monitor_DS.html Normal file
View File

@ -0,0 +1,119 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Live Monitoring Dashboard</title>
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
<style>
body {
font-family: Arial, sans-serif;
margin: 0;
padding: 20px;
}
.plot-container {
display: flex;
flex-wrap: wrap;
justify-content: center;
gap: 20px;
}
.large-plot {
width: 45%;
height: 300px;
}
.small-plot {
width: 30%;
height: 250px;
}
h1 {
text-align: center;
}
.status-container {
background-color: #f0f0f0;
padding: 10px;
border-radius: 5px;
margin: 10px auto;
text-align: center;
font-size: 18px;
}
</style>
</head>
<body>
<h1 id="pageTitle">Live Monitoring Dashboard - DS</h1>
<div class="status-container">
<p>Current Status: <span id="currentStatus">Loading...</span></p>
</div>
<div class="plot-container">
<div id="tank-level-plot" class="large-plot"></div>
<div id="flow-plot" class="large-plot"></div>
</div>
<script>
const maxPoints = 50;
async function updatePlots() {
try {
const response = await fetch('/monitor');
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
const allData = await response.json();
const dsData = allData['DS'];
const t = new Date(dsData.timestamp);
Plotly.extendTraces('tank-level-plot', {
x: [[t]],
y: [[dsData.TankLevel]]
}, [0], maxPoints);
Plotly.extendTraces('flow-plot', {
x: [[t], [t], [t]],
y: [[dsData.Qconso], [dsData.Qinlet], [dsData.Qoutlet]]
}, [0, 1, 2], maxPoints);
} catch (e) {
console.error("Error updating plots:", e);
}
}
async function fetchDSStatus() {
try {
const res = await fetch("/api/ds_status");
const data = await res.json();
const status = data['DS'] || "Unknown";
document.getElementById("currentStatus").textContent = status;
} catch (e) {
console.error("Error fetching DS status:", e);
document.getElementById("currentStatus").textContent = "Error fetching status";
}
}
function initPlots() {
const time0 = [new Date()];
Plotly.newPlot('tank-level-plot', [
{ x: time0, y: [0], name: 'Tank Level', mode: 'lines' }
], {
title: 'Tank Level',
xaxis: { type: 'date' },
yaxis: { title: 'Level' }
});
Plotly.newPlot('flow-plot', [
{ x: time0, y: [0], name: 'Qconso', mode: 'lines' },
{ x: time0, y: [0], name: 'Qinlet', mode: 'lines' },
{ x: time0, y: [0], name: 'Qoutlet', mode: 'lines' }
], {
title: 'Flow Measurements',
xaxis: { type: 'date' },
yaxis: { title: 'Flow (L/h)' }
});
setInterval(updatePlots, 500);
}
window.onload = function () {
initPlots();
fetchDSStatus();
setInterval(fetchDSStatus, 5000);
};
</script>
</body>
</html>

280
static/monitor_PU.html Normal file
View File

@ -0,0 +1,280 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Live Monitoring Dashboard</title>
<script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
<style>
body {
font-family: Arial, sans-serif;
margin: 0;
padding: 20px;
}
.plot-container {
display: flex;
flex-wrap: wrap;
justify-content: center;
gap: 20px;
}
.large-plot {
width: 45%;
height: 300px;
}
.small-plot {
width: 30%;
height: 250px;
}
h1 {
text-align: center;
}
.status-container {
background-color: #f0f0f0;
padding: 10px;
border-radius: 5px;
margin: 10px auto;
text-align: center;
font-size: 18px;
}
</style>
</head>
<body>
<h1 id="pageTitle">Live Monitoring Dashboard</h1>
<div class="status-container">
<p>Current Status: <span id="currentStatus">Loading...</span></p>
</div>
<div class="plot-container">
<div id="flow-plot-1" class="large-plot"></div>
<div id="pressure-plot-1" class="large-plot"></div>
<div id="flow-plot-2" class="large-plot"></div>
<div id="pressure-plot-2" class="large-plot"></div>
<div id="conductivity-plot" class="large-plot"></div>
<div id="MV07-plot" class="small-plot"></div>
<div id="MV02-plot" class="small-plot"></div>
<div id="MV03-plot" class="small-plot"></div>
<div id="MV04_sp-05-plot" class="small-plot"></div>
<div id="MV06-plot" class="small-plot"></div>
<div id="MV08-plot" class="small-plot"></div>
<div id="pump-plot" class="small-plot"></div>
</div>
<script>
const urlParams = new URLSearchParams(window.location.search);
const puNumber = urlParams.get('pu_number') || '1';
document.getElementById('pageTitle').textContent = `Live Monitoring Dashboard - PU ${puNumber}`;
const maxPoints = 50;
async function updatePlots() {
try {
const response = await fetch('/monitor');
if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
const allData = await response.json();
const puData = allData[`PU_${puNumber}`];
const SkidData = allData[`PatientSkid`];
const DSData = allData[`DS`];
const t = new Date(puData.timestamp);
Plotly.extendTraces('flow-plot-1',
{ x: [[t], [t], [t]], y: [[puData.Qperm], [puData.Qdilute], [puData.Qperm_sp]] },
[0, 1, 2],
maxPoints
);
Plotly.extendTraces('flow-plot-2', {
x: [[t], [t], [t], [t], [t], [t]],
y: [[puData.Qdrain], [puData.Qrecirc], [SkidData.QSkid], [puData.QdrainEDI], [DSData.Qconso], [puData.Qdrain_sp]]
}, [0, 1, 2, 3, 4, 5], maxPoints);
Plotly.extendTraces('pressure-plot-1',
{ x: [[t], [t]], y: [[puData.Pro], [puData.Pretentate]] },
[0, 1],
maxPoints
);
Plotly.extendTraces('pressure-plot-2', {
x: [[t], [t]],
y: [[puData.Pdilute], [puData.Ploop_sp]]
}, [0, 1], maxPoints);
Plotly.extendTraces('conductivity-plot', {
x: [[t], [t], [t]],
y: [[puData.Cfeed], [puData.Cperm], [puData.Cdilute]]
}, [0, 1, 2], maxPoints);
Plotly.extendTraces('MV07-plot', {
x: [[t], [t]],
y: [[puData.MV07_sp], [puData.MV07]]
}, [0, 1], maxPoints);
Plotly.extendTraces('MV02-plot', {
x: [[t], [t]],
y: [[puData.MV02_sp], [puData.MV02]]
}, [0, 1], maxPoints);
Plotly.extendTraces('MV03-plot', {
x: [[t], [t]],
y: [[puData.MV03_sp], [puData.MV03]]
}, [0, 1], maxPoints);
Plotly.extendTraces('MV04_sp-05-plot', {
x: [[t], [t], [t], [t]],
y: [[puData.MV04_sp], [puData.MV04], [puData.MV05_sp], [puData.MV05]]
}, [0, 1, 2, 3], maxPoints);
Plotly.extendTraces('MV06-plot', {
x: [[t], [t]],
y: [[puData.MV06_sp], [puData.MV06]]
}, [0, 1], maxPoints);
Plotly.extendTraces('MV08-plot', {
x: [[t], [t]],
y: [[puData.MV08_sp], [puData.MV08]]
}, [0, 1], maxPoints);
Plotly.extendTraces('pump-plot', {
x: [[t]],
y: [[puData.Pump_sp]]
}, [0], maxPoints);
} catch (e) {
console.error("Error updating plots:", e);
}
}
async function fetchPUStatus() {
try {
const res = await fetch("/api/pu_status");
const data = await res.json();
const status = data[`PU${puNumber}`] || "Unknown";
document.getElementById("currentStatus").textContent = status;
} catch (e) {
console.error("Error fetching PU status:", e);
document.getElementById("currentStatus").textContent = "Error fetching status";
}
}
function initPlots() {
const time0 = [new Date()];
Plotly.newPlot('flow-plot-1', [
{ x: time0, y: [0], name: 'Qperm', mode: 'lines' },
{ x: time0, y: [0], name: 'Qdilute', mode: 'lines' },
{ x: time0, y: [0], name: 'Qperm_sp', mode: 'lines', line: { dash: 'dash', color: 'red' } }
], {
title: 'Qperm and Qdilute',
xaxis: { type: 'date' },
yaxis: { title: 'Flow (L/h)' }
});
Plotly.newPlot('flow-plot-2', [
{ x: time0, y: [0], name: 'Qdrain', mode: 'lines' },
{ x: time0, y: [0], name: 'Qrecirc', mode: 'lines' },
{ x: time0, y: [0], name: 'QSkid', mode: 'lines' },
{ x: time0, y: [0], name: 'QdrainEDI', mode: 'lines' },
{ x: time0, y: [0], name: 'Qconso', mode: 'lines' },
{ x: time0, y: [0], name: 'Qdrain_sp', mode: 'lines' },
], {
title: 'Other Flows', xaxis: { type: 'date' }, yaxis: { title: 'Flow (L/h)' }
});
Plotly.newPlot('pressure-plot-1', [
{ x: time0, y: [0], name: 'Pro', mode: 'lines' },
{ x: time0, y: [0], name: 'Pretentate', mode: 'lines' },
], {
title: 'Pro and Pretentate',
xaxis: { type: 'date' },
yaxis: { title: 'Pressure (bar)' }
});
Plotly.newPlot('pressure-plot-2', [
{ x: time0, y: [0], name: 'Pdilute', mode: 'lines' },
{ x: time0, y: [0], name: 'Ploop_sp', mode: 'lines', line: { dash: 'dash', color: 'red' } }
], {
title: 'Pdilute Pressure', xaxis: { type: 'date' }, yaxis: { title: 'Pressure (bar)' }
});
Plotly.newPlot('conductivity-plot', [
{ x: time0, y: [0], name: 'Cfeed', mode: 'lines' },
{ x: time0, y: [0], name: 'Cperm', mode: 'lines' },
{ x: time0, y: [0], name: 'Cdilute', mode: 'lines' }
], {
title: 'Conductivity Measurements',
xaxis: { type: 'date' },
yaxis: { title: 'Conductivity (µS/cm)' }
});
Plotly.newPlot('MV02-plot', [
{ x: time0, y: [0], name: 'MV02_sp', mode: 'lines' },
{ x: time0, y: [0], name: 'MV02', mode: 'lines' }
], {
title: 'MV02: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: {}
});
Plotly.newPlot('MV03-plot', [
{ x: time0, y: [0], name: 'MV03_sp', mode: 'lines' },
{ x: time0, y: [0], name: 'MV03', mode: 'lines' }
], {
title: 'MV03: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: {}
});
Plotly.newPlot('MV04_sp-05-plot', [
{ x: time0, y: [0], name: 'MV04_sp', mode: 'lines' },
{ x: time0, y: [0], name: 'MV04', mode: 'lines' },
{ x: time0, y: [0], name: 'MV05_sp', mode: 'lines' },
{ x: time0, y: [0], name: 'MV05', mode: 'lines' }
], {
title: 'MV04 & MV05: Setpoints and Actuals', xaxis: { type: 'date' }, yaxis: { range: [0, 100] }
});
Plotly.newPlot('MV06-plot', [
{ x: time0, y: [0], name: 'MV06_sp', mode: 'lines' },
{ x: time0, y: [0], name: 'MV06', mode: 'lines' }
], {
title: 'MV06: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: {}
});
Plotly.newPlot('MV07-plot', [
{ x: time0, y: [0], name: 'MV07_sp', mode: 'lines' },
{ x: time0, y: [0], name: 'MV07', mode: 'lines' }
], {
title: 'MV07: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: {}
});
Plotly.newPlot('MV08-plot', [
{ x: time0, y: [0], name: 'MV08_sp', mode: 'lines' },
{ x: time0, y: [0], name: 'MV08', mode: 'lines' }
], {
title: 'MV08: Setpoint vs Actual', xaxis: { type: 'date' }, yaxis: { range: [0, 100] }
});
Plotly.newPlot('pump-plot', [
{ x: time0, y: [0], name: 'Pump_sp', mode: 'lines' },
], {
title: 'Pump: Setpoint ', xaxis: { type: 'date' }, yaxis: { range: [0, 100] }
});
setInterval(updatePlots, 500);
}
window.onload = function () {
initPlots();
fetchPUStatus();
setInterval(fetchPUStatus, 1000);
};
</script>
</body>
</html>

View File

@ -14,10 +14,27 @@
h1 {
text-align: center;
}
.status-container {
display: flex;
justify-content: center;
gap: 20px;
margin: 10px 0;
padding: 10px;
background-color: #f5f5f5;
border-radius: 8px;
}
.status-box {
padding: 8px 15px;
border-radius: 5px;
background-color: #e0e0e0;
font-weight: bold;
min-width: 100px;
text-align: center;
}
.plot-container {
display: flex;
flex-direction: column;
gap: 10px;
gap: 5px;
align-items: center;
}
.plot {
@ -28,74 +45,82 @@
</head>
<body>
<h1>Multi-PU Monitoring Dashboard</h1>
<!-- Statuses for each PU -->
<div class="status-container" id="statusContainer">
<div id="PU1-status" class="status-box">PU1: Loading...</div>
<div id="PU2-status" class="status-box">PU2: Loading...</div>
<div id="PU3-status" class="status-box">PU3: Loading...</div>
</div>
<div class="plot-container">
<div id="Qperm-plot" class="plot"></div>
<div id="Pdilute-plot" class="plot"></div>
<div id="Pro-plot" class="plot"></div>
<div id="Qdilute-plot" class="plot"></div>
<div id="Qdrain-plot" class="plot"></div>
<div id="Cdilute-plot" class="plot"></div>
</div>
<script>
const time0 = [new Date()];
const zero = [0];
const maxPoints = 200;
const maxPoints = 100;
const puList = ['PU_1', 'PU_2', 'PU_3'];
const windowMs = 30 * 1000; // 30 seconds
const plots = [
{ id: 'Qperm-plot', quantity: 'Qperm', title: 'Qperm per PU', ref: 1200 },
{ id: 'Qperm-plot', quantity: 'Qperm', title: 'Qperm per PU', refKey: 'Qperm_sp' },
{ id: 'Qdilute-plot', quantity: 'Qdilute', title: 'Qdilute per PU' },
{ id: 'Qdrain-plot', quantity: 'Qdrain', title: 'Qdrain per PU' },
{ id: 'Pro-plot', quantity: 'Pro', title: 'Pro per PU' },
{ id: 'Pdilute-plot', quantity: 'Pdilute', title: 'Pdilute per PU', ref: 2.5 },
{ id: 'Pdilute-plot', quantity: 'Pdilute', title: 'Pdilute per PU' , refKey: 'Ploop_sp'},
{ id: 'Cdilute-plot', quantity: 'Cdilute', title: 'Cdilute per PU' },
];
function makeTraces(quantity) {
return puList.map((pu, i) => ({
x: time0.slice(),
y: zero.slice(),
name: pu,
mode: 'lines',
line: { width: 2 },
legendgroup: pu
}));
}
const plotTraceMap = {}; // track trace indices per plot
function initAllPlots() {
plots.forEach(plot => {
const data = makeTraces(plot.quantity);
const layout = {
title: plot.title,
xaxis: { title: 'Time', type: 'date' },
yaxis: { title: plot.id.includes('P') ? 'Pressure (bar)' : 'Flow (L/h)' },
};
plotTraceMap[plot.id] = { pu: [0,1,2], extra: {} }; // base 3 PUs
// Add ref line if present
if (plot.ref !== undefined) {
data.push({
x: [time0[0], time0[0]],
y: [plot.ref, plot.ref],
mode: 'lines',
line: { dash: 'dash', color: 'red' },
name: `Ref ${plot.ref}`,
showlegend: true
});
}
// Add QSkid trace only for Qperm plot
if (plot.id === 'Qperm-plot') {
if (plot.refKey) {
data.push({
x: time0.slice(),
y: zero.slice(),
name: 'QSkid',
y: [0],
mode: 'lines',
line: { color: 'black', width: 2, dash: 'dot' },
legendgroup: 'PatientSkid'
line: { dash: 'dash', color: 'red' },
name: `${plot.refKey} (PU2)`,
});
plotTraceMap[plot.id].extra.ref = data.length - 1;
}
Plotly.newPlot(plot.id, data, layout);
if (plot.id === 'Qperm-plot') {
data.push({ x: time0.slice(), y: zero.slice(), name: 'QSkid', mode: 'lines' });
plotTraceMap[plot.id].extra.qSkid = data.length - 1;
data.push({ x: time0.slice(), y: zero.slice(), name: 'Qconso', mode: 'lines' });
plotTraceMap[plot.id].extra.qConso = data.length - 1;
}
if (plot.id === 'Qdrain-plot') {
data.push({ x: time0.slice(), y: zero.slice(), name: 'QSkid', mode: 'lines' });
plotTraceMap[plot.id].extra.qSkid = data.length - 1;
data.push({ x: time0.slice(), y: zero.slice(), name: 'Qconso', mode: 'lines' });
plotTraceMap[plot.id].extra.qConso = data.length - 1;
}
Plotly.newPlot(plot.id, data, {
title: plot.title,
xaxis: { type: 'date' },
yaxis: { title: plot.id.includes('P') ? 'Pressure (bar)' : 'Flow (L/h)' }
});
});
}
async function updateAllPlots() {
try {
const res = await fetch('/monitor');
@ -103,14 +128,15 @@ async function updateAllPlots() {
const allData = await res.json();
const timestamp = new Date();
// SkidData is only fetched once
const SkidData = allData["PatientSkid"] || {};
const DSData = allData["DS"] || {};
const pu2Data = allData["PU_2"] || {}; // <--- take ref values from PU_3
plots.forEach(plot => {
const xUpdates = [];
const yUpdates = [];
// Extend PU1, PU2, PU3 values
puList.forEach(pu => {
const puData = allData[pu] || {};
const value = puData[plot.quantity];
@ -120,30 +146,78 @@ async function updateAllPlots() {
Plotly.extendTraces(plot.id, { x: xUpdates, y: yUpdates }, puList.map((_, i) => i), maxPoints);
if (plot.ref !== undefined) {
Plotly.extendTraces(plot.id, {
x: [[timestamp]],
y: [[plot.ref]]
}, [puList.length], maxPoints); // the ref line is always the last trace
// Update PU2 reference line dynamically
Plotly.extendTraces(plot.id,
{ x: xUpdates, y: yUpdates },
plotTraceMap[plot.id].pu,
maxPoints
);
if (plot.refKey) {
const refVal = pu2Data[plot.refKey];
Plotly.extendTraces(plot.id,
{ x: [[timestamp]], y: [[refVal ?? null]] },
[plotTraceMap[plot.id].extra.ref],
maxPoints
);
}
// Extend PatientSkid.QSkid only for Qperm plot
if (plot.id === 'Qperm-plot') {
const qSkid = SkidData["QSkid"];
const skidX = [[timestamp]];
const skidY = [[qSkid !== undefined ? qSkid : null]];
const qSkidTraceIndex = puList.length + (plot.ref !== undefined ? 1 : 0); // last trace index
Plotly.extendTraces(plot.id, { x: skidX, y: skidY }, [qSkidTraceIndex], maxPoints);
const qConso = DSData["Qconso"];
Plotly.extendTraces(plot.id, { x: [[timestamp]], y: [[qSkid ?? null]] }, [plotTraceMap[plot.id].extra.qSkid], maxPoints);
Plotly.extendTraces(plot.id, { x: [[timestamp]], y: [[qConso ?? null]] }, [plotTraceMap[plot.id].extra.qConso], maxPoints);
}
if (plot.id === 'Qdrain-plot') {
const qSkid = SkidData["QSkid"];
const qConso = DSData["Qconso"];
Plotly.extendTraces(plot.id, { x: [[timestamp]], y: [[qSkid ?? null]] }, [plotTraceMap[plot.id].extra.qSkid], maxPoints);
Plotly.extendTraces(plot.id, { x: [[timestamp]], y: [[qConso ?? null]] }, [plotTraceMap[plot.id].extra.qConso], maxPoints);
}
// Sliding window (30s)
const layoutUpdate = {
'xaxis.range': [new Date(timestamp - windowMs), timestamp]
};
Plotly.relayout(plot.id, layoutUpdate);
});
} catch (err) {
console.error("Failed to update plots:", err);
}
}
function makeTraces(quantity) {
return puList.map((pu) => ({
x: time0.slice(),
y: zero.slice(),
name: pu,
mode: 'lines',
line: { width: 2 },
legendgroup: pu
}));
}
async function updateStatuses() {
try {
const res = await fetch("/api/pu_status");
const statuses = await res.json();
puList.forEach((pu, i) => {
const el = document.getElementById(`PU${i+1}-status`);
el.textContent = `${pu}: ${statuses[`PU${i+1}`] || "Unknown"}`;
});
} catch (err) {
console.error("Error fetching PU status:", err);
}
}
initAllPlots();
setInterval(updateAllPlots, 1000);
setInterval(updateStatuses, 1000);
</script>
</body>

View File

@ -1,5 +1,6 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
@ -17,6 +18,7 @@
display: flex;
flex-direction: column;
}
.header {
background-color: #1e1e1e;
padding: 10px 20px;
@ -24,6 +26,14 @@
justify-content: space-between;
align-items: center;
}
.header-row {
display: flex;
justify-content: space-between;
width: 100%;
margin-bottom: 5px;
}
.connect-button {
background-color: #ff4444;
color: white;
@ -36,9 +46,11 @@
align-items: center;
gap: 10px;
}
.connected {
background-color: #00C851;
}
.container {
display: flex;
flex: 1;
@ -46,17 +58,21 @@
overflow-x: hidden;
box-sizing: border-box;
}
.left-panel, .right-panel {
.left-panel,
.right-panel {
flex: 1;
padding: 20px;
overflow-y: auto;
}
.left-panel {
background-color: #1e1e1e;
display: flex;
flex-direction: column;
gap: 10px;
}
.mode-block {
background-color: #333;
padding: 15px;
@ -65,10 +81,12 @@
flex-direction: column;
gap: 10px;
}
.pu-buttons {
display: flex;
gap: 10px;
}
.mode-block button {
background-color: #4285F4;
color: white;
@ -80,39 +98,49 @@
transition: background-color 0.3s;
flex: 1;
}
.mode-block button:hover {
background-color: #3367d6;
}
.mode-block button.active {
background-color: #00C851;
}
.mode-block button.in-progress {
background-color: #ffcc00;
color: #000;
}
.mode-block button.ready {
background-color: #00C851;
color: #fff;
}
.mode-block button.disabled {
background-color: #777;
cursor: not-allowed;
}
.in-progress {
background-color: yellow !important;
color: black !important;
}
.ready {
background-color: orange !important;
color: black !important;
}
.production {
background-color: green !important;
color: white !important;
}
.pu-status {
margin-top: 20px;
}
.pu-item {
background-color: #333;
padding: 10px;
@ -122,24 +150,28 @@
justify-content: space-between;
align-items: center;
}
.monitor-block {
background-color: #333;
padding: 15px;
border-radius: 5px;
margin-bottom: 15px;
}
.monitor-values {
display: grid;
grid-template-columns: repeat(3, 1fr);
display: flex;
gap: 10px;
margin-top: 10px;
}
.monitor-value {
background-color: #444;
padding: 10px;
text-align: center;
border-radius: 5px;
flex: 1;
}
.slider-container {
background-color: #1e1e1e;
padding: 10px;
@ -147,12 +179,14 @@
color: #fff;
width: 95%;
}
.slider-container label {
font-size: 1.2rem;
font-weight: bold;
margin-bottom: 10px;
display: block;
}
.slider-values {
display: flex;
justify-content: space-between;
@ -161,10 +195,12 @@
width: 100%;
overflow: hidden;
}
.slider-values span#currentValue {
font-weight: bold;
color: #00bfff;
}
.slider {
width: 100%;
height: 8px;
@ -174,62 +210,56 @@
appearance: none;
cursor: pointer;
}
.slider::-webkit-slider-thumb, .slider::-moz-range-thumb {
.slider::-webkit-slider-thumb,
.slider::-moz-range-thumb {
height: 18px;
width: 18px;
background: #007bff;
border-radius: 50%;
cursor: pointer;
}
.monitor-link {
color: white;
background-color: #007bff;
padding: 10px 15px;
padding: 5px 10px;
border-radius: 5px;
text-decoration: none;
font-weight: bold;
font-size: 12px;
}
.monitor-link:hover {
background-color: #0056b3;
}
.feed-valve-buttons {
display: flex;
gap: 10px;
margin-top: 10px;
}
.feed-valve-buttons button {
flex: 1;
padding: 10px;
border: none;
border-radius: 5px;
cursor: pointer;
background-color: #444;
color: white;
}
.feed-valve-buttons button.active {
background-color: #00C851;
}
.monitor-pu-buttons {
display: flex;
gap: 10px;
gap: 5px;
margin: 10px;
}
.monitor-pu-buttons a {
color: white;
background-color: #007bff;
padding: 10px 15px;
padding: 5px 10px;
border-radius: 5px;
text-decoration: none;
font-weight: bold;
font-size: 12px;
}
.monitor-pu-buttons a:hover {
background-color: #0056b3;
}
.button-group {
margin-top: 10px;
display: flex;
justify-content: space-around;
}
.button-group button {
padding: 8px 16px;
font-size: 1rem;
@ -239,61 +269,78 @@
border: none;
cursor: pointer;
}
.button-group button:hover {
background-color: #005f6b;
}
.auto-running {
background-color: #ffcc00 !important;
/* yellow */
color: black !important;
}
</style>
</head>
<body>
<div class="header">
<div class="header">
<h1>Hydraulic Machine Control</h1>
<div class="monitor-pu-buttons">
<!-- New multi-monitor button -->
<a href="/multi-monitor-page" target="_blank" class="monitor-link">
<a href="/multi-monitor-PU" target="_blank" class="monitor-link">
<i class="fas fa-chart-bar"></i> Monitor All PUs
</a>
<a href="/monitor-page?pu_number=1" target="_blank" class="monitor-link">
<a href="/monitor-PU?pu_number=1" target="_blank" class="monitor-link">
<i class="fas fa-chart-line"></i> Monitor PU 1
</a>
<a href="/monitor-page?pu_number=2" target="_blank" class="monitor-link">
<a href="/monitor-PU?pu_number=2" target="_blank" class="monitor-link">
<i class="fas fa-chart-line"></i> Monitor PU 2
</a>
<a href="/monitor-page?pu_number=3" target="_blank" class="monitor-link">
<a href="/monitor-PU?pu_number=3" target="_blank" class="monitor-link">
<i class="fas fa-chart-line"></i> Monitor PU 3
</a>
<a href="/monitor-DS" target="_blank" class="monitor-link">
<i class="fas fa-chart-line"></i> Monitor DS
</a>
<!-- New Record Button -->
<button id="recordButton" class="connect-button" onclick="toggleRecording()">
<i class="fas fa-circle"></i> Start Recording
</button>
</div>
<button id="connectButton" class="connect-button" onclick="toggleConnection()">
<i class="fas fa-power-off"></i> Connect
<i class="fas fa-power-off"></i> Disconnect
</button>
</div>
<div class="container">
</div>
<div class="container">
<div class="left-panel">
<div class="mode-block">
<div class="pu-buttons">
<button onclick="sendCommand('IDLE', 1, this)" data-action="IDLE" data-pu="1"><i class="fas fa-power-off"></i> IDLE PU 1</button>
<button onclick="sendCommand('IDLE', 2, this)" data-action="IDLE" data-pu="2"><i class="fas fa-power-off"></i> IDLE PU 2</button>
<button onclick="sendCommand('IDLE', 3, this)" data-action="IDLE" data-pu="3"><i class="fas fa-power-off"></i> IDLE PU 3</button>
<button onclick="sendCommand('IDLE', 1, this)" data-action="IDLE" data-pu="1"><i
class="fas fa-power-off"></i> IDLE PU 1</button>
<button onclick="sendCommand('IDLE', 2, this)" data-action="IDLE" data-pu="2"><i
class="fas fa-power-off"></i> IDLE PU 2</button>
<button onclick="sendCommand('IDLE', 3, this)" data-action="IDLE" data-pu="3"><i
class="fas fa-power-off"></i> IDLE BOTH</button>
</div>
</div>
<div class="mode-block">
<div class="pu-buttons">
<button onclick="sendCommand('PRE-PRODUCTION', 1, this)" data-action="PRE-PRODUCTION" data-pu="1"><i class="fas fa-play"></i> PRE-PROD PU 1</button>
<button onclick="sendCommand('PRE-PRODUCTION', 2, this)" data-action="PRE-PRODUCTION" data-pu="2"><i class="fas fa-play"></i> PRE-PROD PU 2</button>
<button onclick="sendCommand('PRE-PRODUCTION', 3, this)" data-action="PRE-PRODUCTION" data-pu="3"><i class="fas fa-play"></i> PRE-PROD PU 3</button>
<button onclick="sendCommand('PRE-PRODUCTION', 1, this)" data-action="PRE-PRODUCTION" data-pu="1"><i
class="fas fa-play"></i> PRE-PROD PU 1</button>
<button onclick="sendCommand('PRE-PRODUCTION', 2, this)" data-action="PRE-PRODUCTION" data-pu="2"><i
class="fas fa-play"></i> PRE-PROD PU 2</button>
<button onclick="sendCommand('PRE-PRODUCTION', 3, this)" data-action="PRE-PRODUCTION" data-pu="3"><i
class="fas fa-play"></i> PRE-PROD BOTH</button>
</div>
</div>
<div class="mode-block">
<div class="pu-buttons">
<button onclick="sendCommand('FIRST_START', 1, this)" data-action="FIRST_START" data-pu="1"><i class="fas fa-power-off"></i> FIRST START PU 1</button>
<button onclick="sendCommand('FIRST_START', 2, this)" data-action="FIRST_START" data-pu="2"><i class="fas fa-power-off"></i> FIRST START PU 2</button>
<button onclick="sendCommand('FIRST_START', 3, this)" data-action="FIRST_START" data-pu="3"><i class="fas fa-power-off"></i> FIRST START PU 3</button>
<button onclick="sendCommand('FIRST_START', 1, this)" data-action="FIRST_START" data-pu="1"><i
class="fas fa-power-off"></i> FIRST START PU 1</button>
<button onclick="sendCommand('FIRST_START', 2, this)" data-action="FIRST_START" data-pu="2"><i
class="fas fa-power-off"></i> FIRST START PU 2</button>
<button onclick="sendCommand('FIRST_START', 3, this)" data-action="FIRST_START" data-pu="3"><i
class="fas fa-power-off"></i> FIRST START BOTH</button>
</div>
</div>
<div class="slider-container">
@ -303,26 +350,37 @@
<span id="currentValue">2.5</span>
<span id="maxValue">3.5</span>
</div>
<input type="range" min="0.5" max="3.5" step="0.1" value="2.5" id="ploopSetpoint" class="slider" oninput="updatePloopSetpoint(this.value)">
<input type="range" min="0.5" max="3.5" step="0.1" value="2.5" id="ploopSetpoint" class="slider"
oninput="updatePloopSetpoint(this.value)">
</div>
<div class="slider-container">
<label for="qpermSetpoint">Qperm Setpoint (L/h):</label>
<div class="slider-values">
<span id="qpermMin">1200</span>
<span id="qpermCurrent">1200</span>
<span id="qpermMax">1400</span>
</div>
<input type="range" min="1200" max="1400" step="50" value="1200" id="qpermSetpoint" class="slider"
oninput="updateQpermSetpoint(this.value)">
</div>
<div class="mode-block">
<button onclick="sendCommand('ThermalLoopCleaning', 0, this)"><i class="fas fa-fire"></i> Thermal Loop Cleaning</button>
<button onclick="sendCommand('ThermalLoopCleaning', 0, this)"><i class="fas fa-fire"></i> Thermal Loop
Cleaning</button>
</div>
<div class="pu-status">
<div class="pu-item"><span>PU 1: </span><span id="pu1-status">Offline</span></div>
<div class="pu-item"><span>PU 2: </span><span id="pu2-status">Offline</span></div>
<div class="pu-item"><span>PU 3: </span><span id="pu3-status">Offline</span></div>
</div>
<div class="feed-valve-buttons">
<button onclick="setFeedValve(0, this)">Feed Valve 0%</button>
<button onclick="setFeedValve(50, this)">Feed Valve 50%</button>
<button onclick="setFeedValve(100, this)">Feed Valve 100%</button>
</div>
<div class="button-group">
<button onclick="runAutoTest(1)">Automatic Test PU1</button>
<button onclick="runAutoTest(2)">Automatic Test PU2</button>
<button onclick="runAutoTest(3)">Automatic Test PU3</button>
<button onclick="runAutoTest(1)">Automatic Test 1</button>
<button onclick="runAutoTest(2)">Automatic Test 2</button>
<button onclick="runAutoTest(3)">Automatic Test 3</button>
</div>
</div>
<div class="right-panel">
<div class="monitor-block">
@ -342,8 +400,8 @@
</div>
</div>
<div class="monitor-block">
<h2><i class="fas fa-bolt"></i> Conductivity</h2>
<div class="monitor-values" id="Conductivity">
<h2><i class="fas fa-bolt"></i> Cdilute</h2>
<div class="monitor-values" id="Cdilute">
<div class="monitor-value">#1<br>0.0 µS/cm</div>
<div class="monitor-value">#2<br>0.0 µS/cm</div>
<div class="monitor-value">#3<br>0.0 µS/cm</div>
@ -352,49 +410,98 @@
<div class="monitor-block">
<h2><i class="fas fa-thermometer-half"></i> Pro</h2>
<div class="monitor-values" id="Pro">
<div class="monitor-value">#1<br>0.0 units</div>
<div class="monitor-value">#2<br>0.0 units</div>
<div class="monitor-value">#3<br>0.0 units</div>
<div class="monitor-value">#1<br>0.0 bar</div>
<div class="monitor-value">#2<br>0.0 bar</div>
<div class="monitor-value">#3<br>0.0 bar</div>
</div>
</div>
<div class="monitor-block">
<div style="display: flex; justify-content: space-between;">
<div id="TankLevel" style="flex: 1; margin-right: 10px;">
<h2><i class="fas fa-tachometer-alt"></i> Tank Level</h2>
<div class="monitor-value"><br>0.0 %</div>
</div>
<div id="Qconso" style="flex: 1; margin-left: 10px;">
<h2><i class="fas fa-exchange-alt"></i> Qconso</h2>
<div class="monitor-value"><br>0.0 L/h</div>
</div>
</div>
</div>
</div>
<script>
</div>
</div>
<script>
function updatePloopSetpoint(value) {
document.getElementById('currentValue').textContent = value;
}
async function toggleConnection() {
const response = await fetch('/connect_toggle', {method: 'POST'});
function updateQpermSetpoint(value) {
document.getElementById('qpermCurrent').textContent = value;
}
async function getConnectionStatus() {
const response = await fetch('/is_connected', { method: 'GET' });
const data = await response.json();
const connectButton = document.getElementById('connectButton');
connectButton.classList.toggle('connected', data.connected);
connectButton.innerHTML = `<i class="fas fa-power-off"></i> ${data.connected ? 'Disconnect' : 'Connect'}`;
if (data.connected) {
connectButton.classList.add('connected');
connectButton.innerHTML = '<i class="fas fa-power-off"></i> Disconnect';
} else {
connectButton.classList.remove('connected');
connectButton.innerHTML = '<i class="fas fa-power-off"></i> Connect';
}
}
async function toggleConnection() {
const response = await fetch('/connect_toggle', { method: 'POST' });
const data = await response.json();
await getConnectionStatus();
}
let isRecording = false;
async function toggleRecording() {
const button = document.getElementById('recordButton');
try {
if (!isRecording) {
await fetch('/start_recording', { method: 'POST' });
button.innerHTML = '<i class="fas fa-stop-circle"></i> Stop Recording';
button.classList.add('connected'); // Optional: green background
} else {
await fetch('/stop_recording', { method: 'POST' });
button.innerHTML = '<i class="fas fa-circle"></i> Start Recording';
button.classList.remove('connected');
}
isRecording = !isRecording;
await getRecordingStatus(); // ✅ refresh button state
} catch (error) {
console.error('Recording toggle failed:', error);
alert('Failed to toggle recording. Check connection.');
}
}
async function getRecordingStatus() {
try {
const response = await fetch('/is_recording', { method: 'GET' });
const data = await response.json();
const button = document.getElementById('recordButton');
isRecording = data.recording;
if (isRecording) {
button.innerHTML = '<i class="fas fa-stop-circle"></i> Stop Recording';
button.classList.add('connected'); // green
button.style.backgroundColor = '#00C851'; // ✅ Green when active
} else {
button.innerHTML = '<i class="fas fa-circle"></i> Start Recording';
button.classList.remove('connected');
button.style.backgroundColor = '#ff4444'; // ✅ Red when off
}
} catch (error) {
console.error('Error fetching recording status:', error);
}
}
async function sendCommand(state, puNumber, buttonEl) {
const ploopSetpoint = document.getElementById('ploopSetpoint').value;
await fetch(`/command/${state}/pu/${puNumber}?ploop_setpoint=${ploopSetpoint}`, {method: 'POST'});
const qpermSetpoint = document.getElementById('qpermSetpoint').value;
await fetch(`/command/${state}/pu/${puNumber}?ploop_setpoint=${ploopSetpoint}&qperm_setpoint=${qpermSetpoint}`, { method: 'POST' });
document.querySelectorAll('button').forEach(btn => {
btn.classList.remove('in-progress', 'ready', 'production');
});
@ -402,15 +509,18 @@
buttonEl.classList.add('in-progress');
buttonEl.textContent = `Waiting... PU ${puNumber}`;
buttonEl.disabled = true;
const checkReady = async () => {
const res = await fetch(`/api/pu_status`);
const states = await res.json();
const currentState = states[`PU${puNumber}`];
if (currentState === 'SYSTEM_MODE_READY') {
buttonEl.classList.remove('in-progress');
buttonEl.classList.add('ready');
buttonEl.textContent = `START PRODUCTION PU ${puNumber}`;
buttonEl.disabled = false;
buttonEl.onclick = async () => {
await sendCommand("PRODUCTION", puNumber, buttonEl);
buttonEl.classList.remove('ready');
@ -418,15 +528,28 @@
buttonEl.textContent = `PRODUCTION ON PU ${puNumber}`;
buttonEl.disabled = true;
};
} else {
}
else if (currentState === 'SYSTEM_MODE_PRODUCTION') {
// ✅ Directly update if already in production
buttonEl.classList.remove('in-progress');
buttonEl.classList.add('production');
buttonEl.textContent = `PRODUCTION ON PU ${puNumber}`;
buttonEl.disabled = true;
}
else {
setTimeout(checkReady, 1000);
}
};
checkReady();
} else if (state === 'PRODUCTION') {
// ✅ Handles initial load case
buttonEl.classList.add('production');
buttonEl.textContent = `PRODUCTION ON PU ${puNumber}`;
} else if (state === 'IDLE' || state === 'FIRST_START') {
buttonEl.disabled = true;
}
else if (state === 'IDLE' || state === 'FIRST_START') {
buttonEl.classList.remove('in-progress', 'ready', 'production');
buttonEl.classList.add('production');
buttonEl.textContent = `${state.replace('_', ' ')} PU ${puNumber}`;
@ -453,37 +576,32 @@
}
}
}
function runAutoTest(puNumber) {
const endpoint = `/test/auto/${puNumber}`; // Example: /test/auto/1
fetch(endpoint, {
method: 'POST'
})
.then(response => {
if (!response.ok) {
throw new Error(`Test PU${puNumber} failed`);
async function runAutoTest(puNumber) {
const button = document.querySelector(`button[onclick="runAutoTest(${puNumber})"]`);
if (!button.classList.contains("auto-running")) {
// START test
const res = await fetch(`/test/auto/${puNumber}`, { method: "POST" });
if (res.ok) {
button.classList.add("auto-running");
button.textContent = `Stop Auto Test PU${puNumber}`;
} else {
alert("Failed to start auto test");
}
} else {
// STOP test
const res = await fetch(`/test/auto/stop/${puNumber}`, { method: "POST" });
if (res.ok) {
button.classList.remove("auto-running");
button.textContent = `Automatic Test ${puNumber}`;
} else {
alert("Failed to stop auto test");
}
}
return response.json();
})
// .then(data => {
// alert(`Automatic Test PU${puNumber} started successfully.`);
// console.log(data);
// })
// .catch(error => {
// alert(`Error starting test for PU${puNumber}: ${error.message}`);
// console.error(error);
// });
}
async function setFeedValve(opening, buttonEl) {
await fetch(`/command/feed_valve?MV01_opening=${opening}`, {method: 'POST'});
document.querySelectorAll('.feed-valve-buttons button').forEach(btn => {
btn.classList.remove('active');
});
buttonEl.classList.add('active');
}
async function fetchPUStatus() {
const response = await fetch("/api/pu_status");
const data = await response.json();
@ -493,7 +611,7 @@
}
fetchPUStatus();
setInterval(fetchPUStatus, 5000);
setInterval(fetchPUStatus, 1000);
async function updateMonitorData() {
const response = await fetch('/monitor');
@ -510,40 +628,30 @@
<div class="monitor-value">P_ro<br>${puData.Pro.toFixed(1)} bar</div>
<div class="monitor-value">P_dilute<br>${puData.Pdilute.toFixed(1)} bar</div>
<div class="monitor-value">P_retentate<br>${puData.Pretentate.toFixed(1)} bar</div>
<div class="monitor-value">Conductivity<br>${puData.Conductivity.toFixed(1)} µS/cm</div>
<div class="monitor-value">Cdilute<br>${puData.Cdilute.toFixed(1)} µS/cm</div>
`;
}
}
function updateMonitorValues(id, values, unit) {
const container = document.getElementById(id);
const valueElements = container.querySelectorAll('.monitor-value');
valueElements.forEach((element, index) => {
if (index < values.length) {
element.innerHTML = `#${index + 1}<br>${values[index]} ${unit}`;
}
});
}
setInterval(updateMonitorData, 1000);
async function fetchMonitorData() {
setInterval(updateMonitorData, 1000);
async function fetchMonitorData() {
try {
const puLabels = ["PU_1", "PU_2", "PU_3"];
const fields = {
"Qperm": "L/h",
"Pdilute": "bar",
"Conductivity": "µS/cm",
"Cdilute": "µS/cm",
"Pro": "bar"
};
const dataResponse = await fetch('/monitor');
const allData = await dataResponse.json();
for (const [fieldId, unit] of Object.entries(fields)) {
const container = document.getElementById(fieldId);
if (!container) continue;
const valueElements = container.querySelectorAll('.monitor-value');
puLabels.forEach((puLabel, index) => {
const puData = allData[puLabel];
const value = puData && fieldId in puData ? puData[fieldId] : 0.0;
@ -552,13 +660,47 @@ async function fetchMonitorData() {
}
});
}
// Update Tank Level and Qconso
const dsData = allData["DS"];
if (dsData) {
const tankLevelElement = document.querySelector("#TankLevel .monitor-value");
const qconsoElement = document.querySelector("#Qconso .monitor-value");
if (tankLevelElement) {
const tankLevelValue = dsData.TankLevel;
tankLevelElement.innerHTML = `<br>${tankLevelValue.toFixed(1)} %`;
// ✅ Make it red if below 35
if (tankLevelValue < 35) {
tankLevelElement.style.color = "red";
tankLevelElement.style.fontWeight = "bold";
} else {
tankLevelElement.style.color = "white"; // reset to normal
tankLevelElement.style.fontWeight = "normal";
}
}
if (qconsoElement) {
qconsoElement.innerHTML = `<br>${dsData.Qconso.toFixed(1)} L/h`;
} else {
console.error('Element with selector "#Qconso .monitor-value" not found.');
}
}
} catch (error) {
console.error('Error fetching monitor data:', error);
}
}
}
setInterval(fetchMonitorData, 1000);
fetchMonitorData();
</script>
setInterval(getConnectionStatus, 1000);
getConnectionStatus();
setInterval(fetchMonitorData, 1000);
fetchMonitorData();
setInterval(getRecordingStatus, 1000);
getRecordingStatus();
</script>
</body>
</html>

View File

@ -1,12 +0,0 @@
#!/bin/bash
set -e
echo "[UPDATE] Pulling latest code..."
cd /home/hmi/Desktop/HMI || exit 1
git reset --hard HEAD
git pull origin main
echo "[RESTART] Restarting HMI service..."
sudo /bin/systemctl restart hmi.service
echo "[DONE] HMI updated."

139
utils/analyze_from_csv.m Normal file
View File

@ -0,0 +1,139 @@
%% Cellule 1 : Chargement des données
filename = 'recording_20250806_155908.csv';
opts = detectImportOptions(filename);
opts = setvaropts(opts, 'timestamp', 'Type', 'datetime');
df = readtable(filename, opts);
df_PatientSkid = df(strcmp(df.pu, 'PatientSkid'), :);
%% Cellule 2 : Affichage multi-PU par grandeur
reference_lines = struct('Qperm', 1200, 'Pdilute', 2.5);
quantities = {'Qperm', 'Qdilute', 'Qdrain', 'Pro', 'Pdilute','MV07_sp'};
n_quantities = numel(quantities);
pus_all = {'PU_1', 'PU_2', 'PU_3'};
figure('Name', 'Évolution des grandeurs par PU', 'Position', [100 100 1400 300*n_quantities]);
tiledlayout(n_quantities,1)
for i = 1:n_quantities
quantity = quantities{i};
nexttile
hold on
for j = 1:length(pus_all)
pu = pus_all{j};
df_pu = df(strcmp(df.pu, pu), :);
if any(strcmp(df_pu.Properties.VariableNames, quantity))
plot(df_pu.timestamp, df_pu.(quantity), 'DisplayName', pu,'LineWidth',1.5);
end
end
% Lignes de référence
if isfield(reference_lines, quantity)
yline(reference_lines.(quantity), '--r');
end
if strcmp(quantity, 'Qdilute') && ismember('QSkid', df_PatientSkid.Properties.VariableNames)
plot(df_PatientSkid.timestamp, df_PatientSkid.QSkid, 'DisplayName', 'QSkid','LineWidth',1.5);
end
ylabel(quantity)
grid on
legend('Location', 'northeast')
if i == n_quantities
xlabel('Timestamp')
end
end
sgtitle('Évolution des grandeurs par PU')
%% Analyse initiale pour PU_1
df_pu_1 = df(strcmp(df.pu, 'PU_1'), :);
delta_t = seconds(diff(df_pu_1.timestamp));
figure('Name','Time between messages','Position',[100 100 1000 400])
histogram(delta_t, 10, 'Normalization', 'probability')
title("Time between messages for PU\_1")
xlabel("Δt (seconds)")
ylabel("Probability")
grid on
fprintf("Average time is %.3f seconds\n", mean(delta_t));
%% Affichage pour tous les PU
pus = unique(df.pu);
disp("PU disponibles :")
disp(pus)
pus = {'PU_2'}; % Modifier ici si besoin
for i = 1:length(pus)
pu = pus{i};
fprintf('\n--- Data for %s ---\n', pu)
plot_pu_data(df, df_PatientSkid, pu);
end
%% Fonction d'affichage PU (similaire à plot_pu_data)
function plot_pu_data(df, df_PatientSkid, pu_name)
df_pu = df(strcmp(df.pu, pu_name), :);
% --------- Plot 1: Débits ---------
flow_cols = {'Qperm', 'Qdilute', 'Qdrain', 'Qrecirc'};
available_flows = intersect(flow_cols, df_pu.Properties.VariableNames);
if ~isempty(available_flows)
figure('Name', [pu_name ' - Débits'])
hold on
for i = 1:length(available_flows)
plot(df_pu.timestamp, df_pu.(available_flows{i}), 'DisplayName', available_flows{i},'LineWidth',1.5);
end
if ismember('QSkid', df_PatientSkid.Properties.VariableNames)
plot(df_PatientSkid.timestamp, df_PatientSkid.QSkid, 'DisplayName', 'QSkid','LineWidth',1.5);
end
title([pu_name ' - Flow Rates'])
xlabel("Timestamp")
ylabel("Flow (L/min)")
legend('Location','northeast')
grid on
end
% --------- Plot 2: Pressions ---------
pressure_cols = {'Pro', 'Pdilute', 'Pretentate'};
available_pressures = intersect(pressure_cols, df_pu.Properties.VariableNames);
if ~isempty(available_pressures)
figure('Name', [pu_name ' - Pressions'])
hold on
for i = 1:length(available_pressures)
plot(df_pu.timestamp, df_pu.(available_pressures{i}), 'DisplayName', available_pressures{i},'LineWidth',1.5);
end
title([pu_name ' - Pressures'])
xlabel("Timestamp")
ylabel("Pressure (bar)")
legend('Location','northeast')
grid on
end
% --------- Plot 3: Vannes motorisées ---------
figure('Name', [pu_name ' - Motor Valve Positions'], 'Position', [100 100 1500 800])
tiledlayout(3,3)
idx = 1;
for mv = 2:8
mv_real = sprintf('MV0%d', mv);
mv_sp = sprintf('MV0%d_sp', mv);
nexttile
if ismember(mv_real, df_pu.Properties.VariableNames) && ...
ismember(mv_sp, df_pu.Properties.VariableNames)
plot(df_pu.timestamp, df_pu.(mv_real), 'b', 'DisplayName', 'Actual','LineWidth',1.5)
hold on
plot(df_pu.timestamp, df_pu.(mv_sp), '--', 'Color', [1 0.5 0], 'DisplayName', 'Setpoint','LineWidth',1.5)
title(mv_real)
ylabel("Position (%)")
legend
grid on
else
axis off
end
idx = idx + 1;
end
sgtitle([pu_name ' - Motor Valve Positions vs Setpoints'])
end

View File

@ -4,12 +4,14 @@
PI_USER="hmi"
PI_HOST="192.168.1.46"
REMOTE_FOLDER="/home/hmi/Desktop/HMI/recordings"
LOCAL_FOLDER="/Users/Etienne/GitHub/NorthStar-HMI"
LOCAL_FOLDER="/Users/Etienne/Library/CloudStorage/OneDrive-nehemis/nehemis - 04 Records/HMI_data"
echo "Starting folder download from Raspberry Pi"
# Run scp with sshpass
scp -r "$PI_USER@$PI_HOST:$REMOTE_FOLDER" "$LOCAL_FOLDER"
rsync -avz --progress --ignore-existing\
"$PI_USER@$PI_HOST:$REMOTE_FOLDER" \
"$LOCAL_FOLDER"
# Check if scp succeeded
if [ $? -eq 0 ]; then

View File

@ -1,30 +0,0 @@
import canopen
import os
class ValveBackend:
def __init__(self, eds_file: str, node_id: int = 0x0F):
self.eds_file = eds_file
self.node_id = node_id
self.network = None
self.node = None
def connect(self):
try:
self.network = canopen.Network()
self.network.connect(channel='can0', bustype='socketcan')
self.node = canopen.RemoteNode(self.node_id, self.eds_file)
self.network.add_node(self.node)
return True
except Exception as e:
print(f"[VALVE CONNECT ERROR] {e}")
return False
def send_command(self, opening: int):
try:
if self.node is None:
raise RuntimeError("Valve node not initialized")
self.node.sdo[0x6000].raw = opening
print(f"[VALVE] Opening set to {opening}")
except Exception as e:
print(f"[VALVE CMD ERROR] {e}")