Coverage for moptipyapps / prodsched / result_summary.py: 9%
260 statements
« prev ^ index » next coverage.py v7.14.0, created at 2026-05-13 08:40 +0000
« prev ^ index » next coverage.py v7.14.0, created at 2026-05-13 08:40 +0000
1"""Load an ROP multi-simulation summary from a log file."""
3import argparse
4from typing import Any, Callable, Final, Generator, Iterable
6from moptipy.api.logging import (
7 PREFIX_SECTION_ARCHIVE,
8 SECTION_ARCHIVE_QUALITY,
9 SUFFIX_SECTION_ARCHIVE_X,
10 SUFFIX_SECTION_ARCHIVE_Y,
11)
12from moptipy.api.space import Space
13from moptipy.spaces.intspace import IntSpace
14from moptipy.utils.logger import SECTION_END, SECTION_START
15from pycommons.io.console import logger
16from pycommons.io.csv import COMMENT_START
17from pycommons.io.csv import CSV_SEPARATOR as __CS
18from pycommons.io.path import Path
19from pycommons.math.stream_statistics import (
20 StreamStatistics,
21 StreamStatisticsAggregate,
22)
23from pycommons.strings.string_conv import num_or_none_to_str as __n
24from pycommons.types import type_error
26from moptipyapps.prodsched.instances import get_instances
27from moptipyapps.prodsched.multistatistics import (
28 MultiStatistics,
29 MultiStatisticsSpace,
30)
31from moptipyapps.prodsched.statistics import Statistics
32from moptipyapps.utils.shared import moptipyapps_argparser
35def __m(v: int | float | StreamStatistics | None) -> str:
36 """
37 Convert a value to a string.
39 :param v: the value
40 :return: the string
41 """
42 if v is None:
43 return ""
44 if isinstance(v, StreamStatistics):
45 return __n(v.mean_arith)
46 return __n(v)
49def summarize_multi_statistics_to_text(y: MultiStatistics) \
50 -> Generator[str, None, None]:
51 """
52 Summarize multi-statistics to text.
54 :param y: the multi-statistics
55 :return: the text
56 """
57 pi: Final[tuple[Statistics, ...]] = y.per_instance
59 n_inst: Final[int] = tuple.__len__(pi)
60 if n_inst <= 0:
61 raise ValueError("No instance.")
62 n_prod: Final[int] = list.__len__(pi[0].immediate_rates)
64 yield "## Per-Instance Statistics"
65 yield (f"instance{__CS}fillRate{__CS}stockLevel{__CS}meanWaitingTime"
66 f"{__CS}meanProductionTime")
67 irs = StreamStatistics.aggregate()
68 sls = StreamStatistics.aggregate()
69 wts = StreamStatistics.aggregate()
70 pts = StreamStatistics.aggregate()
71 for i, st in enumerate(pi):
72 yield (f"{i}{__CS}{__m(st.immediate_rate)}{__CS}{__m(st.stock_level)}"
73 f"{__CS}{__m(st.waiting_time)}{__CS}{__m(st.production_time)}")
74 if st.immediate_rate is not None:
75 irs.add(st.immediate_rate)
76 if st.stock_level is not None:
77 sls.add(st.stock_level)
78 if (st.waiting_time is not None) and (
79 st.waiting_time.mean_arith is not None):
80 wts.add(st.waiting_time.mean_arith)
81 if (st.production_time is not None) and (
82 st.production_time.mean_arith is not None):
83 pts.add(st.production_time.mean_arith)
84 irss = irs.result_or_none()
85 slss = sls.result_or_none()
86 wtss = wts.result_or_none()
87 ptss = pts.result_or_none()
88 yield (f"min{__CS}{'' if irss is None else __n(irss.minimum)}{__CS}"
89 f"{'' if slss is None else __n(slss.minimum)}{__CS}"
90 f"{'' if wtss is None else __n(wtss.minimum)}{__CS}"
91 f"{'' if ptss is None else __n(ptss.minimum)}")
92 yield (f"mean{__CS}{'' if irss is None else __n(irss.mean_arith)}{__CS}"
93 f"{'' if slss is None else __n(slss.mean_arith)}{__CS}"
94 f"{'' if wtss is None else __n(wtss.mean_arith)}{__CS}"
95 f"{'' if ptss is None else __n(ptss.mean_arith)}")
96 yield (f"max{__CS}{'' if irss is None else __n(irss.maximum)}{__CS}"
97 f"{'' if slss is None else __n(slss.maximum)}{__CS}"
98 f"{'' if wtss is None else __n(wtss.maximum)}{__CS}"
99 f"{'' if ptss is None else __n(ptss.maximum)}")
100 yield (f"stddev{__CS}{'' if irss is None else __n(irss.stddev)}{__CS}"
101 f"{'' if slss is None else __n(slss.stddev)}{__CS}"
102 f"{'' if wtss is None else __n(wtss.stddev)}{__CS}"
103 f"{'' if ptss is None else __n(ptss.stddev)}")
104 yield ""
105 yield "## Per-Instance and Per-Product Statistics"
107 s: str = "instance"
108 collect: list[StreamStatisticsAggregate] = []
109 for j in range(n_prod):
110 js = str(j + 1)
111 s = (f"{s}{__CS}fillRate[{js}]{__CS}stockLevel[{js}]{__CS}"
112 f"meanWaitingTime[{js}]{__CS}meanProductionTime[{js}]")
113 collect.extend((StreamStatistics.aggregate(),
114 StreamStatistics.aggregate(),
115 StreamStatistics.aggregate(),
116 StreamStatistics.aggregate()))
117 yield s
118 for i, st in enumerate(pi):
119 s = str(i)
120 ci: int = 0
121 for p in range(n_prod):
122 s = (f"{s}{__CS}{__m(st.immediate_rates[p])}{__CS}"
123 f"{__m(st.stock_levels[p])}{__CS}{__m(st.waiting_times[p])}"
124 f"{__CS}{__m(st.production_times[p])}")
126 v = st.immediate_rates[p]
127 if v is not None:
128 collect[ci].add(v)
129 ci += 1
130 v = st.stock_levels[p]
131 if v is not None:
132 collect[ci].add(v)
133 ci += 1
134 vx = st.waiting_times[p]
135 if vx is not None:
136 v = vx.mean_arith
137 if v is not None:
138 collect[ci].add(v)
139 ci += 1
140 vx = st.production_times[p]
141 if vx is not None:
142 v = vx.mean_arith
143 if v is not None:
144 collect[ci].add(v)
145 ci += 1
146 yield s
148 cres: list[StreamStatistics | None] = [
149 cc.result_or_none() for cc in collect]
150 s = "min"
151 for cress in cres:
152 s = f"{s}{__CS}{'' if cress is None else __n(cress.minimum)}"
153 yield s
154 s = "mean"
155 for cress in cres:
156 s = f"{s}{__CS}{'' if cress is None else __n(cress.mean_arith)}"
157 yield s
158 s = "max"
159 for cress in cres:
160 s = f"{s}{__CS}{'' if cress is None else __n(cress.maximum)}"
161 yield s
162 s = "stddev"
163 for cress in cres:
164 s = f"{s}{__CS}{'' if cress is None else __n(cress.stddev)}"
165 yield s
168def summarize_rop_to_text(x: Any) -> Generator[str, None, None]:
169 """
170 Summarize an ROP to text.
172 :param x: the re-order point
173 :return: the text
174 """
175 n: Final[int] = len(x)
176 s = "product"
177 for i in range(n):
178 s = f"{s}{__CS}{int(i + 1)}"
179 yield s
180 s = "ROP"
181 for v in x:
182 s = f"{s}{__CS}{int(v)}"
183 yield s
186def __default_x_space(ms: MultiStatisticsSpace) -> Space:
187 """
188 Create the default integer space.
190 :param ms: the multi-statistics space
191 :return: the int space
192 """
193 return IntSpace(ms.instances[0].n_products, 0, 1_000_000_000)
196def result_summary(
197 source: Iterable[str],
198 y_space: MultiStatisticsSpace,
199 x_space: Space | Callable[[MultiStatisticsSpace], Space] =
200 __default_x_space,
201 index_filter: Callable[[int], bool] = lambda _: True,
202 x_from_text: Callable[[Iterable[str]], Any] | None = None,
203 y_from_text: Callable[[Iterable[str]], MultiStatistics] | None = None,
204 x_to_text: Callable[[Any], Generator[str, None, None]] | None =
205 summarize_rop_to_text,
206 y_to_text: Callable[[MultiStatistics], Generator[
207 str, None, None]] | None =
208 summarize_multi_statistics_to_text) \
209 -> Generator[str, None, None]:
210 """
211 Load an ROP multi-simulation summary from a log file.
213 :param source: the source log file
214 :param x_space: the search space
215 :param y_space: the multi-statistics space
216 :param x_from_text: convert text to an element of the x-space
217 :param y_from_text: convert text to an element of the y-space
218 :param x_to_text: convert an element of the x-space to text
219 :param y_to_text: convert an element of the y-space to text
220 :param index_filter: the index filter function
221 :return: the generator with the summary text
222 """
223 if not isinstance(source, Iterable):
224 raise type_error(source, "source", Iterable)
225 if not isinstance(y_space, MultiStatisticsSpace):
226 raise type_error(y_space, "y_space", MultiStatisticsSpace)
227 if callable(x_space):
228 x_space = x_space(y_space)
229 if not isinstance(x_space, Space):
230 raise type_error(x_space, "x_space", Space, call=True)
232 if y_from_text is None:
233 def __y_from_text(text, _z=y_space) -> MultiStatistics:
234 return _z.create().from_stream(text)
235 y_from_text = __y_from_text
236 if not callable(y_from_text):
237 raise type_error(y_from_text, "y_from_text", call=True)
239 if x_from_text is None:
240 def __x_from_text(text, _z=x_space) -> Any:
241 return _z.from_str(text[0])
242 x_from_text = __x_from_text
243 if not callable(x_from_text):
244 raise type_error(x_from_text, "x_from_text", call=True)
246 if y_to_text is None:
247 def __y_to_text(y: MultiStatistics, _z=y_space) \
248 -> Generator[str, None, None]:
249 yield _z.to_str(y)
250 y_to_text = __y_to_text
251 if not callable(y_to_text):
252 raise type_error(y_to_text, "y_to_text", call=True)
254 if x_to_text is None:
255 def __x_to_text(x, _z=x_space) -> Generator[str, None, None]:
256 yield _z.to_str(x)
257 x_to_text = __x_to_text
258 if not callable(x_to_text):
259 raise type_error(x_to_text, "x_to_text", call=True)
261 if not callable(index_filter):
262 raise type_error(index_filter, "index_filter", call=True)
264 collected: dict[int, tuple[list[str], list[str], list[str]]] = {}
266 # collect the raw data
267 mode: int = -1
268 current_index: int | None = None
269 s_arch: Final[str] = f"{SECTION_START}{PREFIX_SECTION_ARCHIVE}"
270 s_ql: Final[str] = f"{SECTION_START}{SECTION_ARCHIVE_QUALITY}"
271 suf_x: Final[str] = SUFFIX_SECTION_ARCHIVE_X
272 suf_y: Final[str] = SUFFIX_SECTION_ARCHIVE_Y
273 e_arch: Final[str] = f"{SECTION_END}{PREFIX_SECTION_ARCHIVE}"
274 e_ql: Final[str] = f"{SECTION_END}{SECTION_ARCHIVE_QUALITY}"
275 cur_coll: list[str] = []
276 for srow in source:
277 row = str.strip(srow)
278 if (str.__len__(row) <= 0) or row.startswith(COMMENT_START):
279 continue
280 if 0 <= mode <= 1:
281 if row.startswith(e_arch):
282 if current_index is None:
283 mode = -1
284 continue
285 if current_index not in collected:
286 collected[current_index] = ([], [], [])
287 collected[current_index][mode].extend(cur_coll)
288 cur_coll.clear()
289 current_index = None
290 mode = -1
291 continue
292 if current_index is not None:
293 cur_coll.append(row)
294 continue
296 if mode == 2:
297 if row == e_ql:
298 mode = -1
299 current_index = None
300 continue
301 if str.lower(row[0]) == "f":
302 continue
303 if current_index is not None:
304 current_index += 1
305 if not index_filter(current_index):
306 continue
307 if current_index not in collected:
308 collected[current_index] = ([], [], [])
309 collected[current_index][mode].append(row)
310 continue
312 if row.startswith(s_arch):
313 if row.endswith(suf_x):
314 mode = 0
315 current_index = int(row[str.__len__(s_arch):
316 -str.__len__(suf_x)])
317 if not index_filter(current_index):
318 current_index = None
319 continue
320 if row.endswith(suf_y):
321 mode = 1
322 current_index = int(row[str.__len__(s_arch):
323 -str.__len__(suf_y)])
324 if not index_filter(current_index):
325 current_index = None
326 continue
327 if row == s_ql:
328 mode = 2
329 current_index = -1
331 # now print the results
332 not_first: bool = False
333 counter: int = 0
334 for idx in sorted(collected.keys()):
335 counter += 1
336 value: tuple[list[str], list[str], list[str]] = collected[idx]
337 if not_first:
338 yield ""
339 yield ""
340 not_first = True
342 yield f"# ================== Solution {idx} =================="
343 yield from x_to_text(x_from_text(value[0]))
344 yield ""
345 yield from y_to_text(y_from_text(value[1]))
346 yield ""
347 obvals: list[str] = value[2][0].split(__CS)
348 yield f"summary objective value{__CS}{obvals[0]}"
349 for i in range(1, list.__len__(obvals)):
350 yield f"f{i}{__CS}{obvals[i]}"
351 logger(f"Found {counter} results.")
354def result_summaries(
355 source: str,
356 dest: str,
357 y_space: MultiStatisticsSpace,
358 x_space: Space | Callable[[MultiStatisticsSpace], Space] =
359 __default_x_space,
360 index_filter: Callable[[int], bool] = lambda _: True,
361 x_from_text: Callable[[Iterable[str]], Any] | None = None,
362 y_from_text: Callable[[Iterable[str]], MultiStatistics] | None = None,
363 x_to_text: Callable[[Any], Generator[str, None, None]] | None =
364 summarize_rop_to_text,
365 y_to_text: Callable[[MultiStatistics], Generator[
366 str, None, None]] | None =
367 summarize_multi_statistics_to_text) -> None:
368 """
369 Convert one or multiple files from a source to a destination.
371 :param source: the source file or directory
372 :param dest: the destination directory
373 :param x_space: the search space
374 :param y_space: the multi-statistics space
375 :param x_from_text: convert text to an element of the x-space
376 :param y_from_text: convert text to an element of the y-space
377 :param x_to_text: convert an element of the x-space to text
378 :param y_to_text: convert an element of the y-space to text
379 """
380 src: Final[Path] = Path(source)
381 dst: Final[Path] = Path(dest)
382 dst.ensure_dir_exists()
384 if src.is_dir():
385 for spt in src.list_dir():
386 result_summaries(spt, dst, y_space, x_space, index_filter,
387 x_from_text, y_from_text)
388 return
389 if not src.is_file():
390 return
392 dest_file: Final[Path] = dst.resolve_inside(src.basename())
393 logger(f"Now processing {src!r} to {dest_file!r}.")
394 with dest_file.open_for_write() as ds, src.open_for_read() as ss:
395 for s in result_summary(
396 ss,
397 y_space=y_space,
398 x_space=x_space,
399 x_from_text=x_from_text,
400 y_from_text=y_from_text,
401 x_to_text=x_to_text,
402 y_to_text=y_to_text):
403 ds.write(s)
404 ds.write("\n")
407# Run to parse all log files and to create csv
408if __name__ == "__main__":
409 parser: Final[argparse.ArgumentParser] = moptipyapps_argparser(
410 __file__,
411 "Postprocess Solutions",
412 "Create postprocessing results.")
413 parser.add_argument(
414 "source", nargs="?", default="./results",
415 help="the location of the experimental results, i.e., the root folder "
416 "under which to search for log files", type=Path)
417 parser.add_argument(
418 "dest", help="the path to the destination folder to be created",
419 type=Path, nargs="?", default="./evaluation/")
420 parser.add_argument(
421 "insts", help="the directory with the instances",
422 type=Path, nargs="?", default="./instances/")
423 parser.add_argument(
424 "n_inst", help="the number of instances",
425 type=int, nargs="?", default=23)
427 args: Final[argparse.Namespace] = parser.parse_args()
429 n_insts: Final[int] = args.n_inst
430 inst_path: Final[Path] = args.insts
431 logger(f"Loading {n_insts} instances from {inst_path!r}.")
432 insts = get_instances(n_insts, inst_path)
433 logger("Done loading instances, now executing evaluation.")
434 result_summaries(args.source, args.dest, MultiStatisticsSpace(insts))