#!/usr/bin/env python # coding: utf-8 # # Image Subtraction Module # # **Lecturer:** Christoffer Fremling
# **Jupyter Notebook Authors:** Igor Andreoni, Christoffer Fremling and Cameron Hummels # # This is a Jupyter notebook lesson taken from the GROWTH Summer School 2020. For other lessons and their accompanying lectures, please see: http://growth.caltech.edu/growth-school-2020.html # # ## Objective # Learn how to perform image subtraction to discover astronomical transients from multiple consecutive images. # # ## Key steps # - Register science and reference images (make them aligned and of the same size) # - PSF extraction, using PSFex # - PSF matching by convolution # - [Zero-point calibration] # - Image subtraction # # ## Required dependencies # # See GROWTH school webpage for detailed instructions on how to install these modules and packages. Nominally, you should be able to install the python modules with `pip install `. The external astromatic packages are easiest installed using package managers (e.g., `rpm`, `apt-get`). # # ### Python modules # * python 3 # * astropy # * numpy # * scipy # * matplotlib # * pytest # * photutils # # ### External packages # * SWarp https://www.astromatic.net/software # * SExtractor https://www.astromatic.net/software # * PSFex https://www.astromatic.net/software # * ds9 http://ds9.si.edu/site/Home.html # In[ ]: # Import the relevant packages import numpy as np from astropy.io import fits #FITS files handling import os #Call commands from outside Python from astropy.io import ascii #Read/write ascii files # Useful to smooth the images with a Gaussian kernel before the subtraction from scipy.signal import convolve as scipy_convolve from astropy.convolution import convolve from astropy.convolution import Gaussian2DKernel from astropy.stats import sigma_clipped_stats from astropy.coordinates import SkyCoord from astropy import units as u # Background subtraction from astropy.stats import SigmaClip from photutils import Background2D, MedianBackground from photutils import make_source_mask # Image registration from image_registration import chi2_shift from image_registration.fft_tools import shift import scipy from scipy import ndimage, misc import numpy.fft # Plot import matplotlib.pyplot as plt # Running external programs import subprocess import shutil # ## Test dependencies # # In order for this jupyter notebook to function correctly, we must have some external software installed, as described above. The following step assures that these are installed properly before getting to the rest of the content of this lesson. # In[ ]: def test_dependency(dep, alternate_name=None): """ Test external dependency by trying to run it as a subprocess """ try: subprocess.check_output(dep, stderr=subprocess.PIPE, shell=True) print("%s is installed properly as %s. OK" % (dep, dep)) return 1 except subprocess.CalledProcessError: try: subprocess.check_output(alternate_name, stderr=subprocess.PIPE, shell=True) print("%s is installed properly as %s. OK" % (dep, alternate_name)) return 1 except subprocess.CalledProcessError: print("===%s/%s IS NOT YET INSTALLED PROPERLY===" % (dep, alternate_name)) return 0 dependencies = [('sextractor', 'sex'), ('SWarp', 'swarp'), ('psfex', 'PSFEx'), ('ds9', 'DS9')] i = 0 for dep_name1, dep_name2 in dependencies: i += test_dependency(dep_name1, dep_name2) print("%i out of %i external dependencies installed properly.\n" % (i, len(dependencies))) if i != len(dependencies): print("Please correctly install these programs before continuing by following the instructions in README.md.") else: print("You are ready to continue.") # ## Set up paths and clear old temp files # In[ ]: # remove temporary fits files in current working directory [os.remove(f) for f in os.listdir() if f.endswith('.fits')] # Set directory structure cwd = os.getcwd() print("You start from the directory:", cwd) print("If you are not in the image_subtraction/ directory, use os.chdir() to get there") proc_dir = os.path.join(cwd, 'processed') data_dir = os.path.join(cwd, 'data') out_dir = os.path.join(proc_dir, 'out') if os.path.isdir(proc_dir): shutil.rmtree(proc_dir) os.mkdir(proc_dir) for f in os.listdir(data_dir): shutil.copy2(os.path.join(data_dir, f), os.path.join(proc_dir, f)) os.chdir(proc_dir) print("You are working in the image_subtraction/processed/ directory") print("Full path:", proc_dir) # ## Reference and science images # # Define the reference and science images. Open them with ds9 to give them a look.
# Also, what is the size of the images in pixel? This information will be useful when we want to align the images using Swarp. # In[ ]: # Reference image ref_image_name = os.path.join(data_dir, 'refimg_i.fits') ref_image = fits.open(ref_image_name) # Plot up the reference image mean, median, std = sigma_clipped_stats(ref_image[0].data) plt.figure(figsize=(8,8)) # Set the scale of the image based on its statistics plt.imshow(ref_image[0].data, vmin=median-2*std, vmax=median+2*std) plt.colorbar() plt.title('Reference image') plt.show() # Image size? print("The dimension of the X axis of the reference image is ") print(ref_image[0].header["NAXIS1"]) print("The dimension of the Y axis of the reference image is ") print(ref_image[0].header["NAXIS2"]) # ### Exercise: # Let's do the same for the science image. Can you already spot the Supernova? # In[ ]: # Science image sci_image_name = os.path.join(data_dir, '20120419094409p.fits') sci_image = fits.open(sci_image_name) # Open the images - uncomment or open from the ds9 app ##os.system('ds9 -zscale '+sci_image_name +' ' + ref_image_name +'&') # Plot up the science image mean, median, std = ??? plt.figure(figsize=(8,8)) # set the scale of the image based on its statistics get_ipython().run_line_magic('pinfo2', '?') plt.title('Science image') plt.show() # Image size? print("The dimension of the X axis of the science image is ") get_ipython().run_line_magic('pinfo2', '?') print("The dimension of the Y axis of the science image is ") get_ipython().run_line_magic('pinfo2', '?') # ## Align the images # # Use the AstrOmatic Swarp package to align the images. Swarp relies on the astrometric information of the image (in other words, on the sky coordinates), therefore both the science and reference images must be astrometrically calibrated (for example, using the AstrOmatic SCAMP package). In this module we assume that the input images are already calibrated. # In[ ]: # Swarp command try: command = "SWarp %s %s -c %s -SUBTRACT_BACK N -RESAMPLE Y -RESAMPLE_DIR . -COMBINE N -IMAGE_SIZE 1800,900" % (sci_image_name, ref_image_name, os.path.join(data_dir, 'config.swarp')) print('Executing command: %s' % command) rval = subprocess.run(command.split(), check=True) print('Success!') except subprocess.CalledProcessError as err: print('Could not run SWarp with exit error %s'%err) # Names of the aligned images sci_image_aligned_name = sci_image_name.replace(".fits", ".resamp.fits").replace('data','processed') ref_image_aligned_name = ref_image_name.replace(".fits", ".resamp.fits").replace('data','processed') # ### Exercise: # If we attempt an image subtraction now, what does the result look like? # In[ ]: # Test image subtraction: ref_image_aligned = fits.open(ref_image_aligned_name) hdr_ref = ref_image_aligned[0].header #save fits header sci_image_aligned = fits.open(sci_image_aligned_name) hdr_sci = sci_image_aligned[0].header #save fits header # Perform the image subtraction image_sub = ??? hdu_image_sub = fits.PrimaryHDU(image_sub) hdu_image_sub.writeto("sub_test_0.fits", overwrite = True) # Plot up the result of the image subtraction mean, median, std = sigma_clipped_stats(hdu_image_sub.data) plt.figure(figsize=(8,8)) # Set the scale of the image based on its statistics plt.imshow(hdu_image_sub.data, vmin=median-2*std, vmax=median+2*std, cmap='gray') plt.colorbar(shrink=0.4) plt.title('Test image subtraction') plt.show() # # Background Subtraction # - Mask sources in images # - Use 3 sigma clipping to filter data and accurately measure the backgorund # - Then split image into 300x300 pixel boxes and apply 2x2 median filter # In[ ]: # Background subtraction. # mask mask_sci = make_source_mask(sci_image_aligned[0].data, snr=2, npixels=3, dilate_size=11) mask_ref = make_source_mask(ref_image_aligned[0].data, snr=2, npixels=3, dilate_size=11) sci_image_aligned_name = os.path.join(proc_dir, "bg_sub_sci.fits") ref_image_aligned_name = os.path.join(proc_dir, "bg_sub_ref.fits") # remove temporary fits files if os.path.exists(sci_image_aligned_name): os.remove(sci_image_aligned_name) if os.path.exists(ref_image_aligned_name): os.remove(ref_image_aligned_name) # estimate the background sigma_clip = SigmaClip(sigma=3) # Sigma clipping bkg_estimator = MedianBackground() bkg_sci = Background2D(sci_image_aligned[0].data, (200, 150), filter_size=(3, 3), sigma_clip=sigma_clip, bkg_estimator=bkg_estimator, mask=mask_sci) bkg_ref = Background2D(ref_image_aligned[0].data, (200, 150), filter_size=(3, 3), sigma_clip=sigma_clip, bkg_estimator=bkg_estimator, mask=mask_ref) # Remove the background from the science image sci_image_aligned[0].data = sci_image_aligned[0].data - bkg_sci.background hdu_image_sub = fits.PrimaryHDU(sci_image_aligned[0].data) hdu_image_sub.writeto(sci_image_aligned_name) # Remove the background from the reference image ref_image_aligned[0].data = ref_image_aligned[0].data - bkg_ref.background hdu_image_sub = fits.PrimaryHDU(ref_image_aligned[0].data) hdu_image_sub.writeto(ref_image_aligned_name) # What do the background-subtracted images look like? # In[ ]: # Display with ds9 try: command = "ds9 %s %s" % (sci_image_aligned_name, ref_image_aligned_name) print('Executing command: %s' % command) rval = subprocess.run(command.split(), check=True) print('Success!') except subprocess.CalledProcessError as err: print('Could not run ds9 with exit error %s'%err) # Plot here the background image plt.imshow(bkg_sci.background, origin='lower', cmap='Greys_r') #plt.imshow(sci_image_aligned[0].data-bkg_sci.background, origin='lower', cmap='Greys_r') # ## PSF matching # # The atmosphere heavily affects the PSF of the images by determining the "seeing" conditions. The seeing for ground-based optical telescopes is usually measured as the FWHM of the imaging PSF. Properties of the atmosphere can change very rapidly, so it is rare that science and reference images are characterized by the same seeing. Therefore their PSFs are usually different, which is a problem for image subtraction. # # # ### Generate the kernel for the convolution # # The PSF of the science and reference images can be matched in several different ways. Here we start by performing a first source extraction on both the science image. We can use the catalogs of sources that we obtain for two main purposes:
# 1. Measure the PSF of the science frame, using PSFex or photutils # 2. Obtain instruments magnitudes that will be the basis for the zero-point calibration (see Photometry module). # In[ ]: if os.path.exists('prepsfex.cat'): #Remove possible temporary files os.remove("prepsfex.cat") try: command = "sextractor %s -c %s -CATALOG_NAME %s -MAG_ZEROPOINT 25.0" % (sci_image_aligned_name, os.path.join(data_dir, 'prepsfex.sex'), os.path.join(proc_dir, 'prepsfex.cat')) print('Executing command: %s\n' % command) rval = subprocess.run(command.split(), check=True) print('Success!') except subprocess.CalledProcessError as err: print('Could not run SExtractor with exit error %s'%err) # Now we use another software part of the AstrOmatic suite, PSFex, to measure the PSF of the science image. PSFex estimates the PSF based on the information present in the catalog generated with SExtractor. Then, let's plot the PSF model obtained with PSFex # In[ ]: # Run PSFex to compute PSF, read and display the final model; needs to output to "out" dir. if not os.path.isdir('out'): os.mkdir('out') try: command = "psfex prepsfex.cat -c psfex_conf.psfex" print('Executing command: %s\n' % command) rval = subprocess.run(command.split(), check=True) print('Success!') except subprocess.CalledProcessError as err: print('Could not run psfex with exit error %s'%err) psf_sci_image_name = os.path.join(out_dir,'proto_prepsfex.fits') print(psf_sci_image_name) psf_sci_image = fits.open(psf_sci_image_name) plt.imshow(psf_sci_image[0].data[0], cmap='gray') # ### Convolve the reference image with the PSF of the science image # Now that the kernel is generated, let's convolve the reference image with the PSF of the science frame. # In[ ]: # Convolve the reference image with the PSF of the science frame if os.path.exists(os.path.join(proc_dir, 'ref_convolved.fits')): os.remove(os.path.join(proc_dir, 'ref_convolved.fits')) kernel_sci = psf_sci_image[0].data[0] ref_image_aligned = fits.open(ref_image_aligned_name) ref_conv = scipy_convolve(ref_image_aligned[0].data, kernel_sci, mode='same', method='fft') # Create a new fits file for the convolved image hdu_ref_conv = fits.PrimaryHDU(ref_conv,hdr_ref) hdu_ref_conv.writeto(os.path.join(proc_dir, "ref_convolved.fits")) # Plot up the convolved reference image mean, median, std = sigma_clipped_stats(hdu_ref_conv.data) plt.figure(figsize=(8,8)) # set the scale of the image based on its statistics plt.imshow(hdu_ref_conv.data, vmin=median-2*std, vmax=median+2*std) plt.colorbar(shrink = 0.4) plt.title('Convolved reference image') plt.show() # ### Convolve the science image with the PSF of the reference image # # ### Exercise: # # Same as above, but this time we generate a kernel with the properties of the PSF of the reference image. Then, we convolve the science image with this kernel. # In[ ]: #SExtractor command for the ref image if os.path.exists('prepsfex.cat'): os.remove("prepsfex.cat") try: command = "???" print('Executing command: %s\n' % command) rval = subprocess.run(command.split(), check=True) print('Success!') except subprocess.CalledProcessError as err: print('Could not run SExtractor with exit error %s'%err) # In[ ]: #Run PSFex to compute PSF, read and display the final model if os.path.exists(os.path.join(out_dir,'proto_prepsfex.fits')): os.remove(os.path.join(out_dir, 'proto_prepsfex.fits')) try: command = "???" print('Executing command: %s\n' % command) rval = subprocess.run(command.split(), check=True) print('Success!') except subprocess.CalledProcessError as err: print('Could not run psfex with exit error %s'%err) psf_ref_image_name = os.path.join(out_dir, 'proto_prepsfex.fits') psf_ref_image = fits.open(psf_ref_image_name) plt.imshow(psf_ref_image[0].data[0], cmap='gray') # In[ ]: kernel_ref = psf_ref_image[0].data[0] # Read the SCIENCE image and convolve it with the PSF of the reference frame sci_image_aligned = fits.open(sci_image_aligned_name) sci_conv = ??? # Create a new fits file for the convolved image hdu_sci_conv = fits.PrimaryHDU(sci_conv,hdr_sci) hdu_sci_conv.writeto("sci_convolved.fits", overwrite = True) # Plot up the convolved science image mean, median, std = sigma_clipped_stats(hdu_sci_conv.data) plt.figure(figsize=(8,8)) # set the scale of the image based on its statistics plt.imshow(hdu_sci_conv.data, vmin=median-2*std, vmax=median+2*std) plt.colorbar(shrink = 0.4) plt.title('Convolved science image') plt.show() # ### Improving the alignment # Now that the science image is convolved with (an approximation of) the PSF of the reference image, # and the reference image is convolved with the PSF of the science image, we can perform the image subtraction. # # - Before the subtraction we use an fft method (chi_2_shift) to fine-tune the image alignment of the reference and science image # In[ ]: # Fine tuning of image alignment xoff, yoff, exoff, eyoff = chi2_shift(ref_conv, sci_conv, 10, return_error=True, upsample_factor='auto') print("Alignment offsets:",xoff,yoff) sci_conv_shift = scipy.ndimage.shift(sci_conv, [-yoff, -xoff], order=3, mode='reflect', cval=0.0, prefilter=True) # ## Normalization of the images # # The science and reference images are usually obtained with different exposure times. In addition, the reference image can be the stack of several images to increase the depth. Finally, different CCDs of the same camera (or even different regions of the same CCD when multiple amplifiers are present) may have slightly different gain.
# # The background subtraction should have removed the non-linear offsets between science and reference images. We can therefore normalize the two images by computing the ratio of bright star fluxes in the two images. Once again, we use SExtractor to extract the flux and other quantities. # In[ ]: # Normalize images using the stars in the image # Run SExtractor on the science image sextractor_command = "sextractor sci_convolved.fits -c prepsfex.sex -CATALOG_NAME sci_match.cat -MAG_ZEROPOINT 25.0 -CATALOG_TYPE=ASCII_HEAD" try: command = sextractor_command print('Executing command: %s\n' % command) rval = subprocess.run(command.split(), check=True) print('Success!') except subprocess.CalledProcessError as err: print('Could not run SExtractor with exit error %s'%err) cat_sci = ascii.read('sci_match.cat') # Run SExtractor on the reference image sextractor_command = "sextractor ref_convolved.fits -c prepsfex.sex -CATALOG_NAME ref_match.cat -MAG_ZEROPOINT 25.0 -CATALOG_TYPE=ASCII_HEAD" try: command = sextractor_command print('Executing command: %s\n' % command) rval = subprocess.run(command.split(), check=True) print('Success!') except subprocess.CalledProcessError as err: print('Could not run SExtractor with exit error %s'%err) # Read in the SExtractor output catalog cat_ref = ascii.read('ref_match.cat') # Match the catalog of sources of the reference and science images. Calculate the ratio between the flux of source in the science image over the flux of sources in the reference image. # In[ ]: c1 = SkyCoord(ra=cat_sci['X_WORLD'], dec=cat_sci['Y_WORLD']) c2 = SkyCoord(ra=cat_ref['X_WORLD'], dec=cat_ref['Y_WORLD']) idx, d2d, d3d = c1.match_to_catalog_3d(c2) # Initialize a list for the indexes and one for the flux ratios index_arr = [] ratio_arr = [] for i, i2, d in zip(idx, np.arange(len(d2d)),d2d): #print(i,d) index_arr.append(i) print("Image coordinates") print(cat_ref['X_IMAGE'][i],cat_ref['Y_IMAGE'][i],' ', cat_sci['X_IMAGE'][i2],cat_sci['Y_IMAGE'][i2]) print('Fluxes and flux ratio') print(cat_ref['FLUX_AUTO'][i], cat_sci['FLUX_AUTO'][i2], cat_sci['FLUX_AUTO'][i2] / cat_ref['FLUX_AUTO'][i]) ratio_arr.append(cat_sci['FLUX_AUTO'][i2] / cat_ref['FLUX_AUTO'][i]) # ### Exercise: # # 1. Find the scaling factor # 2. Rescale the science image and perform the image subtraction. # 3. Plot the results # In[ ]: # Find the scaling factor scale = ??? print("The scaling factor is", scale) # In[ ]: # Rescale the science image and perform the image subtraction. image_sub = ??? hdu_image_sub = fits.PrimaryHDU(image_sub) hdu_image_sub.writeto("sub_final.fits") # Plot the results... # In[ ]: # Plot up the result of the image subtraction # Tip: it must look very similar to the other two plots below.. get_ipython().run_line_magic('pinfo2', '?') plt.title('Final image subtraction') plt.show() #...and plot up the same region of sky of science and template images (nothing to do here for you) mean, median, std = sigma_clipped_stats(sci_conv_shift) plt.figure(figsize=(8,8)) # set the scale of the image based on its statistics plt.imshow(sci_conv_shift[200:600,1100:1600], vmin=median-3*std, vmax=median+3*std, cmap='gray') plt.plot([280,310],[115,115], "r-" ) plt.plot([258,258],[95,65], "r-" ) plt.colorbar(shrink = 0.4) plt.title('Science image') plt.show() mean, median, std = sigma_clipped_stats(ref_conv) plt.figure(figsize=(8,8)) # set the scale of the image based on its statistics plt.imshow(ref_conv[200:600,1100:1600], vmin=median-3*std, vmax=median+3*std, cmap='gray') plt.plot([280,310],[115,115], "r-" ) plt.plot([258,258],[95,65], "r-" ) plt.colorbar(shrink = 0.4) plt.title('Reference image') plt.show() # ds9 visualization #os.system('ds9 '+sci_image_aligned_name +' ' + ref_image_aligned_name + ' sub_final.fits &') # There you go, now you should be able to easily spot the supernova as a white excess on the grey background!
# # Note that the bright center of the host galaxy was not perfectly subtracted and left a spurious signal that could be mistaken for real luminosity variability.
# # # These operations can be made automatic and can be incorporated in pipelines that discover transients. Moreover, using the methods learnt in the Photometry module, you can perform forced PSF photometry on the image subtraction to obtain flux measurement of the transient free from the host galaxy contamination. # # # END OF THE SCHOOL MODULE # ### HOTPANTS # # There are some packages that perform most of the operations above automatically. One of the most popular is called HOTPANTS, which stands for "High Order Transform of PSF ANd Template Subtraction".
# # The code can be found on GitHub: https://github.com/acbecker/hotpants # # ### ZOGY # # A new image-subtraction algorithm was recently developed, nicknamed "ZOGY" (an acronym made of the authors' surnames). The Zwicky Transient Facility and other optical time-domain surveys now employ the ZOGY algorithm for image subtraction. # # Reference:
# Zackay, Ofek, Gal-Yam; The Astrophysical Journal, Volume 830, Issue 1, article id. 27, 23 pp. (2016). # In[ ]: