From 42cb2c31ee0c4851bfe46497dab93c0cee69cee3 Mon Sep 17 00:00:00 2001 From: Fabian Castaneda Date: Wed, 11 Jun 2025 15:47:39 +0200 Subject: [PATCH 1/7] fix: added to execute_post() how to treat an `OptimizationDataset`. --- src/gemseo/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/gemseo/__init__.py b/src/gemseo/__init__.py index d88add09b2..f76066e03c 100644 --- a/src/gemseo/__init__.py +++ b/src/gemseo/__init__.py @@ -50,6 +50,7 @@ from numpy import ndarray from gemseo.core.execution_statistics import ExecutionStatistics as _ExecutionStatistics from gemseo.datasets import DatasetClassName +from gemseo.datasets.optimization_dataset import OptimizationDataset from gemseo.mda import base_parallel_mda_settings as base_parallel_mda_settings from gemseo.mda.base_parallel_mda_settings import BaseParallelMDASettings from gemseo.mlearning.regression.algos.base_regressor import BaseRegressor @@ -82,9 +83,6 @@ if TYPE_CHECKING: from gemseo.core.grammars.json_grammar import JSONGrammar from gemseo.datasets.dataset import Dataset from gemseo.datasets.io_dataset import IODataset - from gemseo.datasets.optimization_dataset import ( - OptimizationDataset as OptimizationDataset, - ) from gemseo.disciplines.surrogate import SurrogateDiscipline from gemseo.disciplines.wrappers.job_schedulers.discipline_wrapper import ( JobSchedulerDisciplineWrapper, @@ -1169,6 +1167,8 @@ def execute_post( opt_problem = to_post_proc elif isinstance(to_post_proc, (str, PathLike)): opt_problem = OptimizationProblem.from_hdf(to_post_proc) + elif isinstance(to_post_proc, OptimizationDataset): + opt_problem = to_post_proc else: msg = f"Cannot post process type: {type(to_post_proc)}" raise TypeError(msg) -- GitLab From 6ff13fa30adbaa5a18797e29153d786787c46f73 Mon Sep 17 00:00:00 2001 From: Fabian Castaneda Date: Wed, 11 Jun 2025 15:48:32 +0200 Subject: [PATCH 2/7] doc: fixed outdated attributes. --- .../dataset/creation/plot_optimization_dataset.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc_src/_examples/dataset/creation/plot_optimization_dataset.py b/doc_src/_examples/dataset/creation/plot_optimization_dataset.py index 15c309fdd2..1703c61052 100644 --- a/doc_src/_examples/dataset/creation/plot_optimization_dataset.py +++ b/doc_src/_examples/dataset/creation/plot_optimization_dataset.py @@ -23,10 +23,12 @@ The optimisation dataset ======================== The :class:`.OptimizationDataset` proposes several particular group names, -namely :attr:`~.IODataset.DESIGN_GROUP`, -:attr:`~.IODataset.OBJECTIVE_GROUP`, -:attr:`~.IODataset.OBSERVABLE_GROUP`, -and :attr:`~.IODataset.CONSTRAINT_GROUP`. +namely :attr:`~.OptimizationDataset.DESIGN_GROUP`, +:attr:`~.OptimizationDataset.OBJECTIVE_GROUP`, +:attr:`~.OptimizationDataset.OBSERVABLE_GROUP`, +:attr:`~.OptimizationDataset.CONSTRAINT_GROUP`, +:attr:`~.OptimizationDataset.EQUALITY_CONSTRAINT_GROUP`, +and :attr:`~.OptimizationDataset.INEQUALITY_CONSTRAINT_GROUP`. This particular :class:`.Dataset` is useful to post-process an optimization history. """ -- GitLab From 9ac00e12d9ddec2dadf1c1c3e19fc8bf3d6e4b84 Mon Sep 17 00:00:00 2001 From: Fabian Castaneda Date: Wed, 11 Jun 2025 15:48:59 +0200 Subject: [PATCH 3/7] doc: added example on how to post-process an optimization dataset. --- .../plot_post_process_optimization_dataset.py | 203 ++++++++++++++++++ 1 file changed, 203 insertions(+) create mode 100644 doc_src/_examples/post_process/plot_post_process_optimization_dataset.py diff --git a/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py b/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py new file mode 100644 index 0000000000..b662cefb75 --- /dev/null +++ b/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py @@ -0,0 +1,203 @@ +# Copyright 2021 IRT Saint Exupéry, https://www.irt-saintexupery.com +# +# This work is licensed under a BSD 0-Clause License. +# +# Permission to use, copy, modify, and/or distribute this software +# for any purpose with or without fee is hereby granted. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL +# THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, +# OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING +# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Post-process an optimization dataset +==================================== +""" +# %% +# Optimization problems can be exported into an :class:`.OptimizationDataset` which can +# be later be stored as different file types like `csv`. Later on one might want to +# recover the stored data and visualize it. Fortunately |g| allows to use the different +# post-processing algorithms with an :class:`.OptimizationDataset` as the +# post-processing input. + +# %% +# In this example we illustrate the use of an :class:`.OptimizationDataset` for +# post-processing. The data used to illustrate this, are from an MDO scenario on the +# Power 2 problem. The data have been saved in an HDF5 file. +# The post-processing algorithm used for this example is the :class:`.OptHistoryView`. + +from __future__ import annotations + +import numpy as np + +from gemseo import execute_post +from gemseo.algos.constraint_tolerances import ConstraintTolerances +from gemseo.algos.design_space import DesignSpace +from gemseo.algos.optimization_problem import OptimizationProblem +from gemseo.datasets.optimization_dataset import OptimizationDataset +from gemseo.datasets.optimization_metadata import OptimizationMetadata +from gemseo.settings.post import OptHistoryView_Settings + +# %% +# First we will recover the use case data from a HDF5 file. and convert it into an +# :class: `.OptimizationProblem`. + +problem = OptimizationProblem.from_hdf("power2_opt_pb.h5") + +# %% +# Now the problem gets converted into an :class:`.OptimizationDataset` + +dataset = problem.to_dataset(group_functions=True) + +# %% +# As you can see the argument `group_functions` must be true in order to use the +# post-processing, otherwise, the different functions won't be grouped to their +# corresponding optimization function (objective, inequality constraints, equality +# constraints, observables). + +# %% +# Now we can execute the post-processing as usual, just that instead of passing as an +# argument a :class:`.Scenario` or a hdf5 file, the argument is the +# :class:`.OptimizationDataset`. + + +execute_post( + dataset, + settings_model=OptHistoryView_Settings( + save=False, + show=True, + ), +) + +# %% +# The advantage of converting an :class:`.OptimizationProblem` to an +# :class:`.OptimizationDataset` is that there's no manipulation to be done to +# post-process. As you can tell, there's no significant advantage with doing this +# instead of post-processing the problem directly. + +# %% +# The advantage of being able to use an :class:`.OptimizationDataset` is that it allows +# to use |g| post-processing algorithms using any data. To illustrate this we recover +# the data from a csv file instead of a HDF5 file. +# First, we will save the previous data in a csv file. +dataset.to_csv("results.csv") + +# %% +# We can build directly the :class:`.OptimizationDataset` from the csv file. + + +recovered_dataset = OptimizationDataset.from_csv("results.csv") + +print(recovered_dataset.summary) + +# %% +# .. note:: +# Since the data recovered from the csv comes from an existing +# :class:`.OptimizationDataset`, the variables are already grouped. Details on how to +# group the variables in case of importing ungrouped data can be found +# :ref:`here `. + +# %% +# In order to use an :class:`.OptimizationDataset` we must attribute some optimization +# metadata to the :class:`.OptimizationDataset`. For this we use the +# :class:`.OptimizationMetadata` and store it in the attribute :attr:`.misc` of the +# dataset under the key "optimization_metadata". +# Some optimization metadata can be recovered from the dataset itself, but overall, +# it requires to have knowledge of the problem. + + +# %% +# the field `output_names_to_constraint_names` makes reference to the cases where the +# names of functions were changes for a reason or another (like an off-set for example). +# the argument takes the shape of a dictionary where the keys are the original +# constraint names and the value a list of associated names. For the use case at hand, +# there is no name change so the associated constraint names are the names themselves. +output_names_to_constraint_names = {} +for constraint_name in ( + recovered_dataset.inequality_constraint_names + + recovered_dataset.equality_constraint_names +): + output_names_to_constraint_names[constraint_name] = constraint_name + +# %% +# The optimum iteration can be retrieved from the dataset by looking for the minimum +# value of the objective function. + +optimum_iteration = recovered_dataset.objective_dataset.idxmin(axis=0).values[0] + +# %% +# The tolerances field is an instance of the :class:`.CosntraintTolerances` model. +# Which must de instantiated with the corresponding values. In this case the default +# values are used + +tolerances = ConstraintTolerances() + +# %% +# The last important data to be determine is the point feasibility. This can be +# predetermined and stored in the csv file. In this case, we determine the feasibility +# using the tolerances to create a mask. + +equality_feasible_mask = ( + np.abs(recovered_dataset.equality_constraint_dataset) <= tolerances.equality +).all(axis=1) + +inequality_feasible_mask = ( + np.abs(recovered_dataset.inequality_constraint_dataset) <= tolerances.inequality +).all(axis=1) + +feasible_iterations = recovered_dataset.index[ + equality_feasible_mask & inequality_feasible_mask +].tolist() + +# %% +# With all the optimization metadata ready, we can create the +# :class:`.OptimizationMetadata` and attribute it to the dataset. + + +opt_metadata = OptimizationMetadata( + objective_name="pow2", + standardized_objective_name="pow2", + minimize_objective=True, + use_standardized_objective=False, # Either True or False according to the user + tolerances=ConstraintTolerances(), # Add the corresponding tolerances to the pydantic model + output_names_to_constraint_names=output_names_to_constraint_names, + feasible_iterations=feasible_iterations, + optimum_iteration=optimum_iteration, +) + +recovered_dataset.misc["optimization_metadata"] = opt_metadata + +# %% +# Given that some post-processing algorithms use the input space of the problem, +# attributing the input space of the problem to the dataset can be useful. +# For the power2 problem we know that the input space is :math:`-1.0 < x < 1.0` where +# `x` has 3 components and has initiated with 1.0. + + +input_space = DesignSpace() +input_space.add_variable("x", 3, lower_bound=-1.0, upper_bound=1.0, value=1.0) + +recovered_dataset.misc["input_space"] = input_space + +# %% +# With all the optimization metadata gathered, we can execute the post-processing. + +execute_post( + recovered_dataset, + settings_model=OptHistoryView_Settings( + save=False, + show=True, + ), +) + + +# %% +# .. warning:: +# The post-processing algorithm :class:`.GradientSensitivity`, has the option to compute +# missing gradients. It is not possible to use an OptimizationDataset with that option. +# -- GitLab From 08a3062edf50f9f3a56a1ea6e3452f674a556df1 Mon Sep 17 00:00:00 2001 From: Fabian Castaneda Date: Wed, 11 Jun 2025 16:16:38 +0200 Subject: [PATCH 4/7] test: added test for `execute_post` with a dataset. --- tests/test_gemseo.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_gemseo.py b/tests/test_gemseo.py index 891367564b..b888912aa7 100644 --- a/tests/test_gemseo.py +++ b/tests/test_gemseo.py @@ -287,6 +287,13 @@ def test_execute_post(scenario, obj_type, tmp_wd) -> None: assert isinstance(post, OptHistoryView) +def test_execute_post_with_optimization_dataset(scenario): + """Test the method execute_post with a :class:`.OptimizationDataset.""" + dataset = scenario.formulation.optimization_problem.to_dataset(group_functions=True) + post = execute_post(dataset, post_name="OptHistoryView", save=False, show=False) + assert isinstance(post, OptHistoryView) + + def test_execute_post_type_error(scenario) -> None: """Test the method execute_post with a wrong typed argument.""" with pytest.raises(TypeError, match=f"Cannot post process type: {int}"): -- GitLab From d0dbafa7d483d942003f89f62cbc24fb8d247b13 Mon Sep 17 00:00:00 2001 From: Fabian Castaneda Date: Thu, 12 Jun 2025 08:43:22 +0200 Subject: [PATCH 5/7] fix: added example h5 file --- doc_src/_examples/post_process/power2_opt_pb.h5 | Bin 0 -> 87264 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 doc_src/_examples/post_process/power2_opt_pb.h5 diff --git a/doc_src/_examples/post_process/power2_opt_pb.h5 b/doc_src/_examples/post_process/power2_opt_pb.h5 new file mode 100644 index 0000000000000000000000000000000000000000..2b69829be0d108e334d74e7b76fdac5e9dd72cd7 GIT binary patch literal 87264 zcmeD5aB<`1lHy_j0S*oZ76t(j3y%Lo!GjP+uo#s4AIxE3U`T*6AhHY$9Z>!XCJ2L( z0R$NsBp_S{L8$rY>Oz8CT^Se{8DQqaXs9-Z2dog52ZS0WM_34WxH<-ayu1NKFfcF_ zKK!1| zC^^DH06qO?K>dF~48p*b&Kkra@(Uy(G%j@;3?T9sj36|O4EFPPVPxQ7(0~eTfTmYa zUI7WA>w{(|1`j)k3JA%-PyppmaDa$_Ga~~7!wGmf4>FB`0X@IL@?!^@z3BdeW)B7h zXug8j!oYx@&nzI;FwB5D0H*E$l>ZLCA9_BA)h8EXAsS%j{ebc%paH_j zz{UWU0~4T{3gTiYmw}l<11vAW1d?N55a58&3=A9$ZjL^|t_%zeAwi)a3M|3OzyVgz z#KZ(Pf&*GZGBY-CqPhga6JkKv_xkC^L-Wt1biK;1v^cdWFF#K= zCo?ZKu}Ifh=Jg{pDf?=Xb?y(VTI^XsRh|sQ4WRS_$|VI*J}8BQN(mSrBnMIho|jzs3OF)%RjBiREAF_1o(dQeb;_#ksYdST+A^a`>A#s{?}`H;-vW&o#n z2ADh#l#gx>F9QPuvN<3#Kp3VEWQMk*g&84@2i)P&2@MZW69c3Zf`@Z>fEr@B!=o1(9w1eaumF)mFFZia zQsneNZhn~#4G)NUps)bpksBWKhh%tw;tYgyGxIWYGpkbL^OLetlS?wopyPUJDe=W6 zsRaxvnQ3XMMX7lunTaKt`FZiVsU;ctDGUq+N%19>1*r^~d8vi*CHXn2MTvRIsSFIL zJVbhf^+Q2+l7k728)`^HOhrwV5V28m_=f;&+z>W?0v+aKH~<|FgoQv}VlFs^r$Lf0 zLuy4q5lAkvD7_d&r)1`WC~#W>#DI+lg4ozFW<3l^fw20QnE_JHN-{ElXLJyqafmuZ zy#uilF}}4NTCRX}AZ#A&bfU2Yk@QL4#l* z6`;W<5M2>%6sw@6rC_X}t6-pK32l=j>I0ZP&_vDvvkz3C7&0(0z*<|dRwzguWG-ls z3`Bzlyg;;po~eQ^$a7%J7#KiufC3O^4!S)sf57CS_83Au1)LWX<(4>k_TNa8r;bt1zDHlhPfP;{mUrw;+}04r#{A!O_ivW5vU{))c7 z@Hy1q5Y;g6VCItvFsVUB6PhpKFoQT3l4>CAQF7#j0Q!6hY`p#hXiyX>7+~YRpz&AG zxGabUDS?f9!qyohX~E0HwJy8B7@}>0DTF4JeiNV;qvlSC-cfS+hX8u|h0XVEfTkZ< z2vB>z4>G?6T1^A7!2_CZVfg_)9m85#FKi*|Vd@0zAbbNzH3VJN&cNW{4iSf`%YgDb zJR#!4g(O^G!`ds5xfW13Lc#=+;}P{XxLRjG%$vT4*4rQ*h|t2Uw+GjHE=amU%$uID zMz|dt3pQ_h!v>oyLJU^F@32CMVPnBUYlS5?S%erQ^cffe%n@RPiAB3~^?yjFE82wz zKXi%~mU1AeV6f*i% z1`TUh8Mt9cp$I@~4F(1Q7ZhO(4m`A+F{DrgAhE>2umUZ8pePzZ4(-yF`;bgmvBde1nWvalrRaUI>S|6gv$cE{eu34-=1Un6zNBSKNq5g-(3?w2j^VQ($zYrT9q?a@7&@HNv@P&j11H;e{4^r1p@)H>jgSA~n=p0K@ z!$W+ShX=LJ^^}K(6Rc!{eAvd+SI59nyAq~ne zE=epYEoNZI10ApupPZPJ1JRjT9G{k&Se%)Z13r#|As@6)yfimHGcP3-Vf8FT z7x^UN@(z)nA$#E=D?$ffyx>X zGaE!OFff!s>6Fys%=EnY;(|obsZNMLp+=SUT#;kO?)8WFG%zdD$&BM#tK2djq@?&DVn8}G{ahhH769Lta= zFMZ%QoscZ%0q~$!d_XlsA84UFG9NUO0XjDeCJ#F63z-jMgARg+$%EJ+4C8}*2vP&% zgU${EVHh8jmO&WC2iXmA$%Fl5@pc<0F?q-08*ZSok{~-7fWeg1vfBu5{ z^Orrupa1L`K6uZ(%8-o_Uik|yvsvY`9k})@>7@QNCTMtZIAqkWcwlbzIfgE1o zt7QBWSz}M;>}NZz~`6a*Y%9G7loPkU+4XAhwdU4UG?fSjIG(85a@ ze4aG}11$WM(D|kzhGDmn|Z^Hi)6}!KxS-8X*4s3H9esdx$?l;l;qvpfzhROdX8YvYuw= z5YKiXPHMeG9{j=y5r=Kf4&aL_IBFm??4kriwz>Y7B-{uVg85s^QS$;pFiy({)B`V z1H*>OSQB2y4qTFsMMn>wzGIW?*P&(Fs8buVh_RKFt3RfBuACU_yL& zp|sQ8vT6$?RCIh2TCL$HY|KW>`^=#0;3@?8UmvsFd71* zAuzl`0N45Upne3%rw*Wlh72M7QV<{1p8(Mg(1Y(m{Ra>q)UN>1uycw){TL7*Chq{1 zfa!!2AUoqXDTyvDb)!0W`J)5=T}K@)u|f z2qcc~FVI*KvN+6NpfL=PIEwj(3=E*L43Id?e3-vLV;UfFbbrD6A?WE4G=>6F4>Jem zFVI*DNF2ppBL)W07z{`pSv|;Kps^Z|IJ&<;V>-yAlHU!XDqBn~qN<}c8g2S^;nULywhJU+5|klCOy6_9##e}Tqcki}vC z0*Qg}1JGbFh%t&sLtr!nMnhmU1V%$(Gz3ONU^E0qLtqGp0BEiNG(!RF&%xGBRdho< zHKP|ogXR@Lt50F`2{0NW%fOHUonU~iyM>95(jy}TVC(Z>>oH*yxI3Wf2NnvjejKdd z2Ab0b$-(+%uzncqSW{Sf0;N%qI#@adu|XKJ9*qUQUs_-WNEA1Q_4_5Z<5mNbf%Q)n zu7P;CG0dMDUvaA$3NnP|qha9>qha&Wq_1-<$b^IkYy=O&!`!bj80Mqd;p-eD`XTPb zMZ)GwH%!E(0wNAezXv8k__#<|I=wI%mkNkDY#ivp6bK&|2}{o(rs7fo5r^eBhUpMK zr6m2*RmG4$=zq)01XdVii5=DV9RIp3y&G2;laRwbUyorVG$m%`Upmo zUoV}2hR5Kpm*|&YEih3NEwWgXp2ND&G$5}rr~TX_oVr-Ax(F#p16 zSo;Lx1qAs58p|*dkmo>bxCcR;Q9K#~qaiRF0;3@?8UmvsFswp=e(lL0(3Zx~ZcoD2 zm%-Xqu=XSw`!Wg^LsAmRNO<#XsI(`c!~GB+Adv!DNE`+R@TL=x2;74p&L|!Yfzc2c z4S~@R7!85Z5Exb=02+G&jm5%Rd9d{aCssqe`C=`E28~nUTTf819-km!-Wf|;^;isJotl) z=rX8$*gW`$OQ_=LJlH%q!xeNHR6eZS5x9yfPA-p7KO0nLg3N}|uzn@Mc?u@t}7z-aQz4-IH|4DNb~ ze)+`!8XiMCeZb~*(Z?+b)-Raz;0D(rMJUKfcp@2W?G{k~0@N3Q`4>jx8i(|N#xlff zh;zT;9)wAZ(xV|T8UmvsFd71*Aut*O!zKji*Pe`kwls!zdyZM z+LO5E!3&_V4Ds6NJUA>IM(NQI7!85Z5Eu=C(GVEUApjbC0*%GOT6wT}@Eb27zW(tV zLW9Ps@XdoayoIQP%|}6aqvS{l0bKLo5^qpLVCc?+Gvq>if;!SV*xLW-^WX(&;Sccx zf`oNsPP{{iVPnC{ha2y)$s)vH^WZN&AjGh-VDsQVK4Ozah{5K;IX)xAu(4p}j>H#i zvZRX<>Su$>OpvKCn*4bzjvo=pimyk^WYkz;ej#_?l3IE16FRp zX!6UC2xxc=?s|!SIAkz`y{)Fh0m`5QgzVcfEr!j1Ri= z9fVJ`v)AzGca}N>u6x%wgXzOz|^6a zGZ7HyGf1RC7%+7^G$E`T+7OyhKNzA4RANEQf%W?c?x(}puWgVAF##6B5Z++x_d(JX z;ynBUURVe~+>1`a`gbSz@vDQSmm323)xp<42;x@R{D|-JXQa=fm1n zu>J-)>mL*$?MaZ4@aEZ2X;0!>|8M~s%dkuW@*Ic__aKNfibq3WGz3ONU^E0qLtr!n zhE)iF#-Bj5gs}DzjD~qKK^M~Kfz30)#7F6o5dyf@HT;0iL&HJ=wyprSo&dIv0JeU> z464u^O2g6 z4z_kZ`uui)4lD#9u16hR5Kpm*|&Y4nV_WXqO+b`C3>zo9Og`IlnDo21#TfBjJf;u$8Bv z{sk%X+c%)G45=L;hZlg@a1Vkwqj)p~MnhmU1V%$(Gz3ONU|59!sI35M*1%eMu>DvC zb|6s(h6xT38q}@;jsC#eQ7{@77!pnpb+B;|nD{6?GC}}-+!B_aHbB!4EEMt*b5n~M z7#J#`0+3Y0z~BJolS&cl55dfb(aa1S;Bii2Mo?;IU|@yvnJ~sFJ=`FAASBFPnC<-z znAD)4A>)=z46yb)ha;-{(H#lPuL;f&br6z)p##ca;R+K$*EziTxXMNJa3NGKM)*U* z4>q1VyhCl&cjzI2UM|AM*H573tK!V6RFK}B_#_a;Pzt5X5{u$XDht4JAc?ZXoKg@6 zlByUOC{0%tumNX~;S3BP_T!-Kq2h2lEhj&*1VKE@vt9Zx^`(6Wlz!1#@XzzZ^E@g8m41lXh{Ewl>yc-S`YvW3GD8H`O?D=zdCq-I1s-&*gB01!T8m|+IYqMVLro5Z?F*um Date: Thu, 12 Jun 2025 09:45:45 +0000 Subject: [PATCH 6/7] suggestions --- .../plot_post_process_optimization_dataset.py | 43 +++++++++---------- 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py b/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py index b662cefb75..a39d38715a 100644 --- a/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py +++ b/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py @@ -19,16 +19,15 @@ Post-process an optimization dataset ==================================== """ # %% -# Optimization problems can be exported into an :class:`.OptimizationDataset` which can -# be later be stored as different file types like `csv`. Later on one might want to -# recover the stored data and visualize it. Fortunately |g| allows to use the different -# post-processing algorithms with an :class:`.OptimizationDataset` as the -# post-processing input. +# Optimization problems can be exported as an :class:`.OptimizationDataset`, which can +# later be stored as different file types, such as `csv`. One might later want to +# recover and visualize the stored data. Fortunately, |g| allows one to use different +# post-processing algorithms with an :class:`.OptimizationDataset` as input. # %% -# In this example we illustrate the use of an :class:`.OptimizationDataset` for -# post-processing. The data used to illustrate this, are from an MDO scenario on the -# Power 2 problem. The data have been saved in an HDF5 file. +# In this example, we demonstrate how to use an :class:`.OptimizationDataset` for +# post-processing. The data used are from an MDO scenario on the +# :class:`.Power2` problem. The data has been saved in an HDF5 file. # The post-processing algorithm used for this example is the :class:`.OptHistoryView`. from __future__ import annotations @@ -44,7 +43,7 @@ from gemseo.datasets.optimization_metadata import OptimizationMetadata from gemseo.settings.post import OptHistoryView_Settings # %% -# First we will recover the use case data from a HDF5 file. and convert it into an +# First we will recover the use case data from an HDF5 file, and convert it into an # :class: `.OptimizationProblem`. problem = OptimizationProblem.from_hdf("power2_opt_pb.h5") @@ -55,14 +54,14 @@ problem = OptimizationProblem.from_hdf("power2_opt_pb.h5") dataset = problem.to_dataset(group_functions=True) # %% -# As you can see the argument `group_functions` must be true in order to use the +# As you can see, the argument `group_functions` must be ``True`` in order to use the # post-processing, otherwise, the different functions won't be grouped to their # corresponding optimization function (objective, inequality constraints, equality # constraints, observables). # %% -# Now we can execute the post-processing as usual, just that instead of passing as an -# argument a :class:`.Scenario` or a hdf5 file, the argument is the +# Now we can execute the post-processing as usual. The only difference is that, instead +# of passing a :class:`.Scenario` or an HDF5 file as an argument, we pass the # :class:`.OptimizationDataset`. @@ -83,7 +82,7 @@ execute_post( # %% # The advantage of being able to use an :class:`.OptimizationDataset` is that it allows # to use |g| post-processing algorithms using any data. To illustrate this we recover -# the data from a csv file instead of a HDF5 file. +# the data from a csv file instead of an HDF5 file. # First, we will save the previous data in a csv file. dataset.to_csv("results.csv") @@ -97,10 +96,10 @@ print(recovered_dataset.summary) # %% # .. note:: -# Since the data recovered from the csv comes from an existing -# :class:`.OptimizationDataset`, the variables are already grouped. Details on how to -# group the variables in case of importing ungrouped data can be found -# :ref:`here `. +# Since the data recovered from the csv comes from an existing +# :class:`.OptimizationDataset`, the variables are already grouped. Details on how to +# group the variables in case of importing ungrouped data can be found +# :ref:`here `. # %% # In order to use an :class:`.OptimizationDataset` we must attribute some optimization @@ -112,7 +111,7 @@ print(recovered_dataset.summary) # %% -# the field `output_names_to_constraint_names` makes reference to the cases where the +# The field `output_names_to_constraint_names` makes reference to the cases where the # names of functions were changes for a reason or another (like an off-set for example). # the argument takes the shape of a dictionary where the keys are the original # constraint names and the value a list of associated names. For the use case at hand, @@ -131,9 +130,9 @@ for constraint_name in ( optimum_iteration = recovered_dataset.objective_dataset.idxmin(axis=0).values[0] # %% -# The tolerances field is an instance of the :class:`.CosntraintTolerances` model. -# Which must de instantiated with the corresponding values. In this case the default -# values are used +# The tolerances field is an instance of the :class:`.ConstraintTolerances` model. +# Which must be instantiated with the corresponding values. In this case the default +# values are used. tolerances = ConstraintTolerances() @@ -175,7 +174,7 @@ recovered_dataset.misc["optimization_metadata"] = opt_metadata # %% # Given that some post-processing algorithms use the input space of the problem, # attributing the input space of the problem to the dataset can be useful. -# For the power2 problem we know that the input space is :math:`-1.0 < x < 1.0` where +# For the :class:`.Power2` problem we know that the input space is :math:`-1.0 < x < 1.0` where # `x` has 3 components and has initiated with 1.0. -- GitLab From 3c69d4516ac99c143d3ae0423aa1a2db7104149a Mon Sep 17 00:00:00 2001 From: Fabian Castaneda Date: Thu, 12 Jun 2025 11:02:52 +0000 Subject: [PATCH 7/7] suggestions --- .../plot_post_process_optimization_dataset.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py b/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py index a39d38715a..7679d2494f 100644 --- a/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py +++ b/doc_src/_examples/post_process/plot_post_process_optimization_dataset.py @@ -44,7 +44,7 @@ from gemseo.settings.post import OptHistoryView_Settings # %% # First we will recover the use case data from an HDF5 file, and convert it into an -# :class: `.OptimizationProblem`. +# :class:`.OptimizationProblem`. problem = OptimizationProblem.from_hdf("power2_opt_pb.h5") @@ -61,7 +61,7 @@ dataset = problem.to_dataset(group_functions=True) # %% # Now we can execute the post-processing as usual. The only difference is that, instead -# of passing a :class:`.Scenario` or an HDF5 file as an argument, we pass the +# of passing a :class:`.BaseScenario` or an HDF5 file as an argument, we pass the # :class:`.OptimizationDataset`. @@ -81,7 +81,7 @@ execute_post( # %% # The advantage of being able to use an :class:`.OptimizationDataset` is that it allows -# to use |g| post-processing algorithms using any data. To illustrate this we recover +# to use |g| post-processing algorithms using any data. To illustrate this, we recover # the data from a csv file instead of an HDF5 file. # First, we will save the previous data in a csv file. dataset.to_csv("results.csv") @@ -105,15 +105,15 @@ print(recovered_dataset.summary) # In order to use an :class:`.OptimizationDataset` we must attribute some optimization # metadata to the :class:`.OptimizationDataset`. For this we use the # :class:`.OptimizationMetadata` and store it in the attribute :attr:`.misc` of the -# dataset under the key "optimization_metadata". +# dataset under the key ``"optimization_metadata"``. # Some optimization metadata can be recovered from the dataset itself, but overall, # it requires to have knowledge of the problem. # %% # The field `output_names_to_constraint_names` makes reference to the cases where the -# names of functions were changes for a reason or another (like an off-set for example). -# the argument takes the shape of a dictionary where the keys are the original +# names of functions were changes for a reason or another (like an offset for example). +# The argument takes the shape of a dictionary where the keys are the original # constraint names and the value a list of associated names. For the use case at hand, # there is no name change so the associated constraint names are the names themselves. output_names_to_constraint_names = {} @@ -137,7 +137,7 @@ optimum_iteration = recovered_dataset.objective_dataset.idxmin(axis=0).values[0] tolerances = ConstraintTolerances() # %% -# The last important data to be determine is the point feasibility. This can be +# The last important data to be determined is the point feasibility. This can be # predetermined and stored in the csv file. In this case, we determine the feasibility # using the tolerances to create a mask. -- GitLab