#| default_exp utils
#| export
import pickle
import torch
from pathlib import Path
#| export
def store_variables(pkl_fn:(str, Path),
size:list,
reorder:bool,
resample:(int,list),
) -> None:
'''Save variable values in a pickle file.'''
var_vals = [size, reorder, resample]
with open(pkl_fn, 'wb') as f:
pickle.dump(var_vals, f)
#| export
def load_variables(pkl_fn # Filename of the pickle file
):
'''Load stored variable values from a pickle file.
Returns: A list of variable values.
'''
with open(pkl_fn, 'rb') as f:
return pickle.load(f)
#| export
def print_colab_gpu_info():
'''Check if we have a GPU attached to the runtime.'''
colab_gpu_msg =(f"{'#'*80}\n"
"Remember to attach a GPU to your Colab Runtime:"
"\n1. From the **Runtime** menu select **Change Runtime Type**"
"\n2. Choose **GPU** from the drop-down menu"
"\n3. Click **'SAVE'**\n"
f"{'#'*80}")
if torch.cuda.is_available(): print('GPU attached.')
else: print(colab_gpu_msg)