#!/usr/bin/env python # coding: utf-8 # # Interacting with PyBOP optimisers # # This notebook introduces two interfaces to interact with PyBOP's optimiser classes. # # ### Set the Environment # In[ ]: get_ipython().run_line_magic('pip', 'install --upgrade pip ipywidgets -q') get_ipython().run_line_magic('pip', 'install pybop -q') # Import the necessary libraries import numpy as np import pybop pybop.PlotlyManager().pio.renderers.default = "notebook_connected" # Let's fix the random seed in order to generate consistent output during development, although this does not need to be done in practice. # In[ ]: np.random.seed(8) # ## Setup the model, problem, and cost # # The code block below sets up the model, problem, and cost objects. For more information on this process, take a look at other notebooks in the examples directory. # In[ ]: # Load the parameters parameter_set = pybop.ParameterSet( json_path="../scripts/parameters/initial_ecm_parameters.json" ) parameter_set.import_parameters() # Define the model model = pybop.empirical.Thevenin( parameter_set=parameter_set, options={"number of rc elements": 1} ) # Define the parameters parameters = pybop.Parameter( "R0 [Ohm]", prior=pybop.Gaussian(0.0002, 0.0001), bounds=[1e-4, 1e-2], ) # Generate synthetic data t_eval = np.arange(0, 900, 2) values = model.predict(t_eval=t_eval) # Form dataset dataset = pybop.Dataset( { "Time [s]": t_eval, "Current function [A]": values["Current [A]"].data, "Voltage [V]": values["Voltage [V]"].data, } ) # Construct problem and cost problem = pybop.FittingProblem(model, parameters, dataset) cost = pybop.SumSquaredError(problem) # ## Interacting with the Optimisers # # Now that we have set up the required objects, we can introduce the two interfaces for interacting with PyBOP optimisers. These are: # # 1. The direct optimiser (e.g. `pybop.XNES`) # 2. The optimisation class (i.e. `pybop.Optimisation`) # # These two methods provide two equivalent ways of interacting with PyBOP's optimisers. The first method provides a direct way to select the Optimiser, with the second method being a more general method with a default optimiser (`pybop.XNES`) set if you don't provide an optimiser. # # First, the direct interface is presented. With this interface the user can select from the [list of optimisers](https://github.com/pybop-team/PyBOP?tab=readme-ov-file#supported-methods) supported in PyBOP and construct them directly. Options can be passed as kwargs, or through get() / set() methods in the case of PINTS-based optimisers. # In[ ]: optim_one = pybop.XNES( cost, max_iterations=50 ) # Direct optimiser class with options as kwargs optim_one.set_max_iterations( 50 ) # Alternative set() / get() methods for PINTS optimisers x1, final_cost = optim_one.run() # Next, the `Optimisation` interface is less direct than the previous one, but provides a single class to work with across PyBOP workflows. The options are passed the same way as the above method, through kwargs or get() / set() methods. # In[ ]: optim_two = pybop.Optimisation( cost, optimiser=pybop.XNES, max_iterations=50 ) # Optimisation class with options as kwargs optim_two.set_max_iterations( 50 ) # Alternative set() / get() methods for PINTS optimisers x2, final_cost = optim_two.run() # We can show the equivalence of these two methods by comparing the optimiser objects: # In[ ]: isinstance(optim_one, type(optim_two.optimiser)) # For completeness, we can show the optimiser solutions: # In[ ]: print("Estimated parameters x1:", x1) print("Estimated parameters x2:", x2) # ## Closing Comments # # As both of these API's provide access to the same optimisers, please use either as you prefer. A couple things to note: # # - If you are using a SciPy-based optimiser (`pybop.SciPyMinimize`, `pybop.SciPyDifferentialEvolution`), the `set()` / `get()` methods for the optimiser options are not currently supported. These optimisers require options to be passed as kwargs. # - The optimiser passed to `pybop.Optimisation` must not be a constructed object.