#!/usr/bin/env python # coding: utf-8 # # How to add new gauge-optimizations to GST results # This example demonstrates how to take a previously computed `Results` object and add new gauge-optimized version of to one of the estimates. First, let's "pre-compute" a `Results` object using `do_long_sequence_gst`, which contains a single `Estimate` called "default": # In[1]: from __future__ import print_function import pygsti, pickle from pygsti.construction import std1Q_XYI # In[2]: #Generate some fake data and run GST on it. target_model = std1Q_XYI.target_model() mdl_datagen = std1Q_XYI.target_model().depolarize(op_noise=0.1, spam_noise=0.001) listOfExperiments = pygsti.construction.make_lsgst_experiment_list( target_model, std1Q_XYI.fiducials, std1Q_XYI.fiducials, std1Q_XYI.germs, [1,2,4]) ds = pygsti.construction.generate_fake_data(mdl_datagen, listOfExperiments, nSamples=1000, sampleError="binomial", seed=1234) target_model.set_all_parameterizations("TP") results = pygsti.do_long_sequence_gst( ds, target_model, std1Q_XYI.fiducials, std1Q_XYI.fiducials, std1Q_XYI.germs, [1,2,4], gaugeOptParams={'itemWeights': {'gates': 1, 'spam': 1}}, verbosity=1) with open("example_files/regaugeopt_result.pkl","wb") as f: pickle.dump(results, f) # pickle the results, to mimic typical workflow # Next, let's load in the pre-computed results and use the `add_gauge_optimization` method of the `pygsti.objects.Estimate` object to add a new gauge-optimized version of the (gauge un-fixed) model estimate stored in `my_results.estimates['default']`. The first argument of `add_gauge_optimization` is just a dictionary of arguments to `pygsti.gaugeopt_to_target` **except** that you don't need to specify the `Model` to gauge optimize or the target `Model` (just like the `gaugeOptParams` argument of `do_long_sequence_gst`). The optional "`label`" argument defines the key name for the gauge-optimized `Model` and the corresponding parameter dictionary within the `Estimate`'s `.models` and `.goparameters` dictionaries, respectively. # In[3]: my_results = pickle.load(open("example_files/regaugeopt_result.pkl","rb")) # In[4]: estimate = my_results.estimates['default'] estimate.add_gaugeoptimized( {'itemWeights': {'gates': 1, 'spam': 0.001}}, label="Spam 1e-3" ) mdl_gaugeopt = estimate.models['Spam 1e-3'] print(list(estimate.goparameters.keys())) # 'go0' is the default gauge-optimization label print(mdl_gaugeopt.frobeniusdist(estimate.models['target'])) # One can also perform the gauge optimization separately and specify it using the `model` argument (this is useful when you want or need to compute the gauge optimization elsewhere): # In[5]: mdl_unfixed = estimate.models['final iteration estimate'] mdl_gaugefixed = pygsti.gaugeopt_to_target(mdl_unfixed, estimate.models['target'], {'gates': 1, 'spam': 0.001}) estimate.add_gaugeoptimized( {'any': "dictionary", "doesn't really": "matter", "but could be useful it you put gaugeopt params": 'here'}, model=mdl_gaugefixed, label="Spam 1e-3 custom" ) print(list(estimate.goparameters.keys())) print(estimate.models['Spam 1e-3 custom'].frobeniusdist(estimate.models['Spam 1e-3'])) # You can look at the gauge optimization parameters using `.goparameters`: # In[6]: import pprint pp = pprint.PrettyPrinter() pp.pprint(dict(estimate.goparameters['Spam 1e-3'])) # Finally, note that if, in the original call to `do_long_sequence_gst`, you set **`gaugeOptParams=False`** then no gauge optimizations are performed (there would be no "`go0`" elements) and you start with a blank slate to perform whatever gauge optimizations you want on your own. # In[ ]: