@@ -161,7 +161,6 @@ def map_stat(sota_mle_score: dict | None) -> str:
161161 return sota_exp_stat
162162
163163
164- @cache_with_pickle (_log_path_hash_func , force = True )
165164def get_best_report (log_path : Path ) -> dict | None :
166165 log_storage = FileStorage (log_path )
167166 mle_reports = [extract_json (i .content ) for i in log_storage .iter_msg (pattern = "**/running/mle_score/*/*.pkl" )]
@@ -176,11 +175,14 @@ def get_best_report(log_path: Path) -> dict | None:
176175 return None
177176
178177
178+ if UI_SETTING .enable_cache :
179+ get_best_report = cache_with_pickle (_log_path_hash_func , force = True )(get_best_report )
180+
181+
179182def _get_sota_exp_stat_hash_func (log_path : Path , selector : Literal ["auto" , "best_valid" ] = "auto" ) -> str :
180183 return _log_path_hash_func (log_path ) + selector
181184
182185
183- @cache_with_pickle (_get_sota_exp_stat_hash_func , force = True )
184186def get_sota_exp_stat (
185187 log_path : Path , selector : Literal ["auto" , "best_valid" ] = "auto"
186188) -> tuple [DSExperiment | None , int | None , dict | None , str | None ]:
@@ -253,11 +255,14 @@ def get_sota_exp_stat(
253255 return sota_exp , sota_loop_id , sota_mle_score , map_stat (sota_mle_score )
254256
255257
258+ if UI_SETTING .enable_cache :
259+ get_sota_exp_stat = cache_with_pickle (_get_sota_exp_stat_hash_func , force = True )(get_sota_exp_stat )
260+
261+
256262def _get_score_stat_hash_func (log_path : Path , sota_loop_id : int ) -> str :
257263 return _log_path_hash_func (log_path ) + str (sota_loop_id )
258264
259265
260- @cache_with_pickle (_get_score_stat_hash_func , force = True )
261266def get_score_stat (log_path : Path , sota_loop_id : int ) -> tuple [float | None , float | None , bool , float | None ]:
262267 """
263268 Get the scores before and after merge period.
@@ -351,7 +356,10 @@ def get_score_stat(log_path: Path, sota_loop_id: int) -> tuple[float | None, flo
351356 return valid_improve , test_improve , submit_is_merge , merge_sota_rate
352357
353358
354- @cache_with_pickle (_log_path_hash_func , force = True )
359+ if UI_SETTING .enable_cache :
360+ get_score_stat = cache_with_pickle (_get_score_stat_hash_func , force = True )(get_score_stat )
361+
362+
355363def load_times_deprecated (log_path : Path ):
356364 try :
357365 session_path = log_path / "__session__"
@@ -365,7 +373,10 @@ def load_times_deprecated(log_path: Path):
365373 return rd_times
366374
367375
368- @cache_with_pickle (_log_path_hash_func , force = True )
376+ if UI_SETTING .enable_cache :
377+ load_times_deprecated = cache_with_pickle (_log_path_hash_func , force = True )(load_times_deprecated )
378+
379+
369380def load_times_info (log_path : Path ) -> dict [int , dict [str , dict [Literal ["start_time" , "end_time" ], datetime ]]]:
370381 """
371382 Load timing information for each loop and step.
@@ -403,6 +414,10 @@ def load_times_info(log_path: Path) -> dict[int, dict[str, dict[Literal["start_t
403414 return times_info
404415
405416
417+ if UI_SETTING .enable_cache :
418+ load_times_info = cache_with_pickle (_log_path_hash_func , force = True )(load_times_info )
419+
420+
406421def _log_folders_summary_hash_func (log_folder : str | Path , hours : int | None = None ):
407422 summary_p = Path (log_folder ) / (f"summary.pkl" if hours is None else f"summary_{ hours } h.pkl" )
408423 if summary_p .exists ():
@@ -412,7 +427,6 @@ def _log_folders_summary_hash_func(log_folder: str | Path, hours: int | None = N
412427 return md5_hash (hash_str )
413428
414429
415- @cache_with_pickle (_log_folders_summary_hash_func , force = True )
416430def get_summary_df (log_folder : str | Path , hours : int | None = None ) -> tuple [dict , pd .DataFrame ]:
417431 """Process experiment logs and generate summary DataFrame.
418432
@@ -645,6 +659,10 @@ def compare_score(s1, s2):
645659 return summary , base_df
646660
647661
662+ if UI_SETTING .enable_cache :
663+ get_summary_df = cache_with_pickle (_log_folders_summary_hash_func , force = True )(get_summary_df )
664+
665+
648666def percent_df (summary_df : pd .DataFrame , show_origin = True ) -> pd .DataFrame :
649667 """
650668 Convert the summary DataFrame to a percentage format.
0 commit comments