Skip to content

Commit

Permalink
forget to sign off one commit
Browse files Browse the repository at this point in the history
Signed-off-by: DONNOT Benjamin <[email protected]>
  • Loading branch information
BDonnot committed Jan 9, 2025
1 parent 7beab93 commit 85f2ccc
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 86 deletions.
11 changes: 9 additions & 2 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,20 @@ TODO: in `main.cpp` check the returned policy of pybind11 and also the `py::call
TODO: a cpp class that is able to compute (DC powerflow) ContingencyAnalysis and TimeSeries using PTDF and LODF
TODO: integration test with pandapower (see `pandapower/contingency/contingency.py` and import `lightsim2grid_installed` and check it's True)

[0.10.1.dev0] 2025-xx-yy
----------------------------
[0.10.1] 2025-01-xx
----------------------
- [FIXED] some timings on the benchmarks were not measured at the right time
- [FIXED] an error when changing of bus one of the slack (did not trigger the
recompute of pv bus ids)
- [FIXED] an issue when turning off a generator: it was still declared as "slack"
if it was one.
- [FIXED] could not disconnect a generator when it was a slack bus
- [ADDED] more benchmarks especially for DC powerflow
- [ADDED] a `dfpc` function that can replace the pandapower `dcpf` interal function
- [ADDED] packaging package as a dependency
- [IMPROVED] benchmark on the documentation
(clarity of what is done)
- [IMPROVED] consistency of the names and measured times accross the different benchmarks
- [IMPROVED] refactoring of the c++ side container element to reduce
code (for "one end" elements such as loads, generators, static generators and shunts)

Expand Down
35 changes: 30 additions & 5 deletions benchmarks/benchmark_grid_size.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,7 @@ def run_grid2op_env(env_lightsim, case, reset_solver,
env_lightsim.backend.tol)
time_serie._TimeSerie__computed = True
a_or = time_serie.compute_A()
assert status, f"some powerflow diverge for Time Series for {case_name}: {computer.nb_solved()} "
assert status or computer.nb_solver() == nb_step_pp, f"some powerflow diverge for Time Series for {case_name}: {computer.nb_solved()} "

if VERBOSE:
# print detailed results if needed
Expand Down Expand Up @@ -403,7 +403,32 @@ def run_grid2op_env(env_lightsim, case, reset_solver,
print_configuration()
print(f"Solver used for linear algebra: {linear_solver_used_str}")
print()


print("TL;DR")
tab_tldr = []
for i, nm_ in enumerate(case_names_displayed):
tab_tldr.append((nm_,
ts_sizes[i],
1000. * ls_gridmodel_time[i] / nb_step if ls_gridmodel_time[i] else None,
1000. * ls_gridmodel_time_reset[i] / nb_step_reset if ls_gridmodel_time_reset[i] else None,
1000. / ts_speeds[i] if ts_speeds[i] else None,
1000. / sa_speeds[i] if sa_speeds[i] else None,
))
if TABULATE_AVAIL:
res_use_with_grid2op_2 = tabulate(tab_tldr,
headers=["grid",
"size (nb bus)",
"time (recycling)",
"time (no recycling)",
"time (`TimeSerie`)",
"time (`ContingencyAnalysis`)",
],
tablefmt="rst")
print(res_use_with_grid2op_2)
else:
print(tab_tldr)
print()

print("Results using grid2op.steps (288 consecutive steps, only measuring 'dc pf [init] + ac pf') (no recycling allowed, non default)")
tab_g2op = []
for i, nm_ in enumerate(case_names_displayed):
Expand All @@ -417,7 +442,7 @@ def run_grid2op_env(env_lightsim, case, reset_solver,
))
if TABULATE_AVAIL:
res_use_with_grid2op_2 = tabulate(tab_g2op,
headers=["grid",
headers=["grid name",
"size (nb bus)",
"avg step duration (ms)",
"time [DC + AC] (ms / pf)",
Expand Down Expand Up @@ -450,8 +475,8 @@ def run_grid2op_env(env_lightsim, case, reset_solver,
"avg step duration (ms)",
"time [DC + AC] (ms / pf)",
"speed (pf / s)",
"time in 'gridmodel' (ms / pf)",
"time in 'pf algo' (ms / pf)",
"time in 'solver' (ms / pf)",
"time in 'algo' (ms / pf)",
],
tablefmt="rst")
print(res_use_with_grid2op_2)
Expand Down
Loading

0 comments on commit 85f2ccc

Please sign in to comment.