Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 55 additions & 20 deletions src/petab_gui/views/simple_plot_view.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from collections import defaultdict

import petab.v1.C as PETAB_C
import qtawesome as qta
from matplotlib import pyplot as plt
from matplotlib.backends.backend_qtagg import FigureCanvasQTAgg as FigureCanvas
Expand Down Expand Up @@ -77,6 +78,43 @@ def __init__(self, parent=None):
self.observable_to_subplot = {}
self.no_plotting_rn = False

# DataFrame caching system for performance optimization
self._df_cache = {
"measurements": None,
"simulations": None,
"conditions": None,
"visualization": None,
}
self._cache_valid = {
"measurements": False,
"simulations": False,
"conditions": False,
"visualization": False,
}

def _invalidate_cache(self, table_name):
"""Invalidate cache for specific table."""
self._cache_valid[table_name] = False

def _get_cached_df(self, table_name, proxy_model):
"""Get cached DataFrame or convert if invalid."""
if not self._cache_valid[table_name]:
self._df_cache[table_name] = proxy_to_dataframe(proxy_model)
self._cache_valid[table_name] = True
return self._df_cache[table_name]

def _connect_proxy_signals(self, proxy, cache_key):
"""Connect proxy signals for cache invalidation and plotting."""
for signal in [
proxy.dataChanged,
proxy.rowsInserted,
proxy.rowsRemoved,
]:
signal.connect(
lambda *, key=cache_key: self._invalidate_cache(key)
)
signal.connect(self._debounced_plot)

def initialize(
self, meas_proxy, sim_proxy, cond_proxy, vis_proxy, petab_model
):
Expand All @@ -86,20 +124,15 @@ def initialize(
self.vis_proxy = vis_proxy
self.petab_model = petab_model

# Connect data changes
# Connect cache invalidation and data changes
self.options_manager.option_changed.connect(self._debounced_plot)
self.meas_proxy.dataChanged.connect(self._debounced_plot)
self.meas_proxy.rowsInserted.connect(self._debounced_plot)
self.meas_proxy.rowsRemoved.connect(self._debounced_plot)
self.cond_proxy.dataChanged.connect(self._debounced_plot)
self.cond_proxy.rowsInserted.connect(self._debounced_plot)
self.cond_proxy.rowsRemoved.connect(self._debounced_plot)
self.sim_proxy.dataChanged.connect(self._debounced_plot)
self.sim_proxy.rowsInserted.connect(self._debounced_plot)
self.sim_proxy.rowsRemoved.connect(self._debounced_plot)
self.vis_proxy.dataChanged.connect(self._debounced_plot)
self.vis_proxy.rowsInserted.connect(self._debounced_plot)
self.vis_proxy.rowsRemoved.connect(self._debounced_plot)

# Connect proxy signals for all tables
self._connect_proxy_signals(self.meas_proxy, "measurements")
self._connect_proxy_signals(self.cond_proxy, "conditions")
self._connect_proxy_signals(self.sim_proxy, "simulations")
self._connect_proxy_signals(self.vis_proxy, "visualization")

self.visibilityChanged.connect(self._debounced_plot)

self.plot_it()
Expand All @@ -113,10 +146,11 @@ def plot_it(self):
# If the dock is not visible, do not plot
return

measurements_df = proxy_to_dataframe(self.meas_proxy)
simulations_df = proxy_to_dataframe(self.sim_proxy)
conditions_df = proxy_to_dataframe(self.cond_proxy)
visualisation_df = proxy_to_dataframe(self.vis_proxy)
# Use cached DataFrames for performance
measurements_df = self._get_cached_df("measurements", self.meas_proxy)
simulations_df = self._get_cached_df("simulations", self.sim_proxy)
conditions_df = self._get_cached_df("conditions", self.cond_proxy)
visualisation_df = self._get_cached_df("visualization", self.vis_proxy)
group_by = self.options_manager.get_option()
# group_by different value in petab.visualize
if group_by == "condition":
Expand Down Expand Up @@ -265,8 +299,8 @@ def highlight_from_selection(
if not proxy:
return

x_axis_col = "time"
observable_col = "observableId"
x_axis_col = PETAB_C.TIME
observable_col = PETAB_C.OBSERVABLE_ID

def column_index(name):
for col in range(proxy.columnCount()):
Expand Down Expand Up @@ -308,7 +342,8 @@ def plot_residuals(self):
return

problem = self.petab_model.current_petab_problem
simulations_df = proxy_to_dataframe(self.sim_proxy)
# Reuse cached DataFrame instead of converting again
simulations_df = self._get_cached_df("simulations", self.sim_proxy)

if simulations_df.empty:
return
Expand Down
81 changes: 50 additions & 31 deletions src/petab_gui/views/utils.py
Original file line number Diff line number Diff line change
@@ -1,45 +1,64 @@
import pandas as pd
from petab.v1.C import (
CONDITION_ID,
MEASUREMENT,
OBSERVABLE_ID,
PARAMETER_ID,
SIMULATION,
TIME,
X_OFFSET,
Y_OFFSET,
)
from PySide6.QtCore import Qt


def proxy_to_dataframe(proxy_model):
"""Convert Proxy Model to pandas DataFrame."""
rows = proxy_model.rowCount()
cols = proxy_model.columnCount()

if rows <= 1: # <=1 due to "New row..." in every table
return pd.DataFrame()

headers = [proxy_model.headerData(c, Qt.Horizontal) for c in range(cols)]
data = []

data = []
for r in range(rows - 1):
row = {headers[c]: proxy_model.index(r, c).data() for c in range(cols)}
for key, value in row.items():
if isinstance(value, str) and value == "":
row[key] = None
row = []
for c in range(cols):
value = proxy_model.index(r, c).data()
# Convert empty strings to None
row.append(
None if (isinstance(value, str) and value == "") else value
)
data.append(row)

if not data:
return pd.DataFrame()
if proxy_model.source_model.table_type == "condition":
data = pd.DataFrame(data).set_index("conditionId")
elif proxy_model.source_model.table_type == "observable":
data = pd.DataFrame(data).set_index("observableId")
elif proxy_model.source_model.table_type == "parameter":
data = pd.DataFrame(data).set_index("parameterId")
elif proxy_model.source_model.table_type == "measurement":
# turn measurement and time to float
data = pd.DataFrame(data)
data["measurement"] = data["measurement"].astype(float)
data["time"] = data["time"].astype(float)
elif proxy_model.source_model.table_type == "simulation":
# turn simulation and time to float
data = pd.DataFrame(data)
data["simulation"] = data["simulation"].astype(float)
data["time"] = data["time"].astype(float)
elif proxy_model.source_model.table_type == "visualization":
data = pd.DataFrame(data)
if "xOffset" in data.columns:
data["xOffset"] = data["xOffset"].astype(float)
if "yOffset" in data.columns:
data["yOffset"] = data["yOffset"].astype(float)
else:
data = pd.DataFrame(data)

return data

# Create DataFrame in one shot
df = pd.DataFrame(data, columns=headers)

# Apply type-specific transformations
table_type = proxy_model.source_model.table_type

if table_type == "condition":
df = df.set_index(CONDITION_ID)
elif table_type == "observable":
df = df.set_index(OBSERVABLE_ID)
elif table_type == "parameter":
df = df.set_index(PARAMETER_ID)
elif table_type == "measurement":
# Use pd.to_numeric with errors='coerce' for robust conversion
df[MEASUREMENT] = pd.to_numeric(df[MEASUREMENT], errors="coerce")
df[TIME] = pd.to_numeric(df[TIME], errors="coerce")
elif table_type == "simulation":
df[SIMULATION] = pd.to_numeric(df[SIMULATION], errors="coerce")
df[TIME] = pd.to_numeric(df[TIME], errors="coerce")
elif table_type == "visualization":
if X_OFFSET in df.columns:
df[X_OFFSET] = pd.to_numeric(df[X_OFFSET], errors="coerce")
if Y_OFFSET in df.columns:
df[Y_OFFSET] = pd.to_numeric(df[Y_OFFSET], errors="coerce")

return df