diff --git a/.gitignore b/.gitignore index 2fa44b1..327bea4 100644 --- a/.gitignore +++ b/.gitignore @@ -24,6 +24,7 @@ MANIFEST # Sphinx docs/api docs/_build +docs/_static # Eclipse editor project files .project @@ -154,6 +155,7 @@ nosetests.xml coverage.xml *.cover .hypothesis/ +ref_data/ # Translations # ################ diff --git a/CHANGES.rst b/CHANGES.rst index 75d2578..2561a1f 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -5,6 +5,13 @@ Release Notes Version History and Change Log ------------------------------ +Version 2.1.0 +============= +- Updated the Flux PHOTFNU values to match Pandeia +- Updated the background and noise estimates to use Pandeia +- Updated webbpsf to version 1.1 +- Updated pandeia to version 2.0 + Version 2.0.0 ============= diff --git a/docs/basic_tutorial.rst b/docs/basic_tutorial.rst index b1b8ee4..1725fd4 100644 --- a/docs/basic_tutorial.rst +++ b/docs/basic_tutorial.rst @@ -55,7 +55,7 @@ population parameters. In this example, we will create the following: #. A stellar population representing a global cluster with: - * 10000 stars + * 100 stars * An age of 7.5 billion years * A metallicity of -2.0 * A Salpeter IMF with alpha = -2.35 @@ -81,12 +81,16 @@ population parameters. In this example, we will create the following: .. code-block:: python + obs_prefix = 'notebook_example' + obs_ra = 150.0 + obs_dec = -2.5 + from stips.scene_module import SceneModule scm = SceneModule(out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec) stellar_parameters = { - 'n_stars': 10000, + 'n_stars': 100, 'age_low': 7.5e12, 'age_high': 7.5e12, 'z_low': -2.0, @@ -98,11 +102,11 @@ population parameters. In this example, we will create the following: 'distribution': 'invpow', 'radius': 100.0, 'radius_units': 'pc', - 'distance_low': 10.0, - 'distance_high': 10.0, + 'distance_low': 20.0, + 'distance_high': 20.0, 'offset_ra': 0.0, - 'offset_dec':0.0 - } + 'offset_dec': 0.0 + } stellar_cat_file = scm.CreatePopulation(stellar_parameters) print("Stellar population saved to file {}".format(stellar_cat_file)) @@ -113,7 +117,7 @@ population parameters. In this example, we will create the following: 'z_high': 0.2, 'rad_low': 0.01, 'rad_high': 2.0, - 'sb_v_low': 28.0, + 'sb_v_low': 30.0, 'sb_v_high': 25.0, 'distribution': 'uniform', 'clustered': False, @@ -124,8 +128,7 @@ population parameters. In this example, we will create the following: } galaxy_cat_file = scm.CreateGalaxies(galaxy_parameters) - print("Galaxy population saved to file{}".format(galaxy_cat_file)) - + print("Galaxy population saved to file {}".format(galaxy_cat_file)) Creating a STIPS Observation ---------------------------- @@ -137,14 +140,14 @@ will create a single Roman WFI observation. STIPS uses a bit of specialized terminology to describe its observations. In particular: - \• An *observation* is a set of exposures with a single instrument (e.g. Roman WFI), one or +* An *observation* is a set of exposures with a single instrument (e.g. Roman WFI), one or more filters (where each exposure in the observation will be repeated for every included filter), and some number of the instrument's detectors (for WFI, between 1 and 18), where each exposure will be repeated, with the appropriate inter-detector offset, for every included director, a single chosen sky background value, a single exposure time (applied to each exposure in the observation), and one or more offsets. - \• An *offset* is a single telescope pointing. For each offset specified in the observation, +* An *offset* is a single telescope pointing. For each offset specified in the observation, an exposure will be created for each detector and each filter at the offset. STIPS may, optionally, create one or more mosaics at each offset, with a single mosaic including all detectors with the same filter. In addition, STIPS can create a single combined @@ -152,39 +155,39 @@ STIPS uses a bit of specialized terminology to describe its observations. In pa In this case, we will create an observation with: - \• Roman WFI F129 + * Roman WFI F129 - \• 1 detector + * 1 detector - \• No distortion + * No distortion - \• Sky background of 0.15 counts/s/pixel + * Sky background of 0.15 counts/s/pixel - \• The ID 1 + * The ID 1 - \• An exposure of 1000 seconds + * An exposure of 1000 seconds We will use a single offset with: - \• An ID of 1 + * An ID of 1 - \• No centering (if an offset is centered, then, for a multi-detector observation, each - detector is centered on the offset co-coordinates individually rather than the instrument - as a whole beinf centered there) + * No centering (if an offset is centered, then, for a multi-detector observation, each + detector is centered on the offset co-coordinates individually rather than the instrument + as a whole beinf centered there) - \• No change in RA, DEC, or PA from the center of the observation + * No change in RA, DEC, or PA from the center of the observation We will use the following residual settings: - \• Flatfield residual: off + * Flatfield residual: off - \• Dark current residual: off + * Dark current residual: off - \• Cosmic ray removal residual: off + * Cosmic ray removal residual: off - \• Poisson noise residual: on + * Poisson noise residual: on - \• Readnoise residual: on + * Readnoise residual: on .. code-block:: python @@ -197,7 +200,7 @@ We will use the following residual settings: 'offset_dec': 0.0, 'offset_pa': 0.0 } - + residuals = { 'residual_flat': False, 'residual_dark': False, diff --git a/docs/bugs.rst b/docs/bugs.rst index 6ac8448..7ee688b 100644 --- a/docs/bugs.rst +++ b/docs/bugs.rst @@ -5,23 +5,23 @@ Known Issues (Pending Validation) Below is a list of known issues with STIPS that are pending validation. Items will be removed from this list as they are addressed in future STIPS updates. - \• The orientation of the image is non-standard, with east pointing to the right of the +* The orientation of the image is non-standard, with east pointing to the right of the image. The relative position of sources is not affected. - \• There exists a function to place extended sources given a set of Sersic parameters, but +* There exists a function to place extended sources given a set of Sersic parameters, but its validity and functionality have not been tested. - \• The flux measurements of sources are only validated in the absence of noise/background. +* The flux measurements of sources are only validated in the absence of noise/background. As such, the net flux of the image may be inaccurate. - \• STIPS interpolates nine PSFs generated by WebbPSF across the detector to the location of the +* STIPS interpolates nine PSFs generated by WebbPSF across the detector to the location of the source being placed. However, there is currently a known offset of 20 pixels (in both the X and Y directions) between the location at which the PSF is interpolated, and the location at which the target is displayed. .. note:: - This does not affect the target position or center on the detector, it only means that, + This does not affect the target position or center on the detector. It only means that, if a point source is created with a position of (x, y), it will be placed at location (x, y) but the interpolated PSF shape will be the shape of the PSF at location (x+20, y+20). Because the WFI PSF shape does not vary strongly over this distance, the effect is @@ -29,7 +29,7 @@ removed from this list as they are addressed in future STIPS updates. are required, the user should generate them directly with WebbPSF at the desired pixel location. - \• Bright sources will have large diffraction spikes that are visible beyond 22 pixels away +* Bright sources will have large diffraction spikes that are visible beyond 22 pixels away from the center of the source. A ``psf_bright_limit`` and ``psf_xbright_limit`` (extra-bright limit) magnitude can be specified –– objects brighter than this threshold will be computed out to a radius of 44 (bright) and 88 (extra-bright) pixels, respectively. Note that, @@ -37,7 +37,7 @@ removed from this list as they are addressed in future STIPS updates. roughly four times as long (and extra-bright sources 16 times as long) to compute as standard sources, so these limits should only be set when it matters. - \• The STIPS star generation sub-module (``stips.star_generator``) depends on the ``esutil`` +* The STIPS star generation sub-module (``stips.star_generator``) depends on the ``esutil`` Python module. This module can currently be installed as part of a Conda environment, but can't be installed in a working state from a pip/setup environment. As such, if you are installing STIPS without using Conda, *and* you need to use the star generation sub-module, diff --git a/docs/conf.py b/docs/conf.py index 437559d..ac96f5d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -48,7 +48,7 @@ def setup(app): # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../')) - +''' extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', @@ -59,6 +59,15 @@ def setup(app): 'sphinx_automodapi.automodapi', 'autoapi.extension', ] +''' +extensions = [ + 'sphinx.ext.intersphinx', + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.mathjax', + 'sphinx.ext.viewcode', + 'sphinx_rtd_theme', +] numpydoc_show_class_members = False @@ -92,6 +101,7 @@ def setup(app): # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' +#pygments_style = 'default' # -- Options for HTML output -------------------------------------------------- diff --git a/docs/installation.rst b/docs/installation.rst index bade03a..33f82ba 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -9,34 +9,34 @@ in this section along with instructions. STIPS Requirements ################## - \• ``pandeia>=1.7``: Exposure time calculator. +* ``pandeia>=3.0``: Exposure time calculator. - \• ``webbpsf>=1.0.0``: Nancy Grace Roman PSF calculator. STIPS also requires that ``poppy``, a +* ``webbpsf==1.1.1``: Nancy Grace Roman PSF calculator. STIPS also requires that ``poppy``, a support package used by WebbPSF, have version ``>=1.0.3``. - \• ``astropy``: STIPS uses Astropy in order to: +* ``astropy``: STIPS uses Astropy in order to: - \• Read and write FITS files. + * Read and write FITS files. - \• Read and write ASCII tables (specifically in the IPAC format). + * Read and write ASCII tables (specifically in the IPAC format). - \• Generate Sersic profile models (if any are in the generated scene). + * Generate Sersic profile models (if any are in the generated scene). - \• ``montage_wrapper``: STIPS uses ``montage`` to generate mosaics. It is +* ``montage_wrapper``: STIPS uses ``montage`` to generate mosaics. It is only imported if STIPS is asked to generate a multi-detector image. - \• ``numpy``: STIPS uses ``numpy`` extensively for almost everything that it does. +* ``numpy``: STIPS uses ``numpy`` extensively for almost everything that it does. - \• ``photutils``: STIPS uses ``photutils`` to determine the flux inside the half-light radius +* ``photutils``: STIPS uses ``photutils`` to determine the flux inside the half-light radius in generated Sersic profiles. - \• ``synphot>=1.1.1`` and ``stsynphot>=1.1.0``: STIPS uses ``synphot`` and +* ``synphot>=1.1.1`` and ``stsynphot>=1.1.0``: STIPS uses ``synphot`` and ``stsynphot`` to generate bandpasses, count rates, and zero points. Note that the reference data must also be downloaded, as described below in :ref:`downloading-required-ref-data`. - \• ``scipy``: STIPS uses SciPy to manipulate its internal images (zoom and rotate). +* ``scipy``: STIPS uses SciPy to manipulate its internal images (zoom and rotate). - \• ``esutil``: Used for retrieving data from sqlite databases in the form of ``numpy`` arrays. +* ``esutil``: Used for retrieving data from sqlite databases in the form of ``numpy`` arrays. .. warning:: ``esutil`` is not installed by the STIPS ``setup.py`` file because its pip diff --git a/docs/using_stips/catalogue_formats.rst b/docs/using_stips/catalogue_formats.rst index e13bf44..8f41d36 100644 --- a/docs/using_stips/catalogue_formats.rst +++ b/docs/using_stips/catalogue_formats.rst @@ -27,23 +27,23 @@ In FITS tables, metadata is included as header keywords. STIPS Table Formats ------------------- - \• :ref:`phoenix-tables` +* :ref:`phoenix-tables` - \• :ref:`realtime-phoenix-tables` +* :ref:`realtime-phoenix-tables` - \• :ref:`pandeia-tables` +* :ref:`pandeia-tables` - \• :ref:`bc95-catalog` +* :ref:`bc95-catalog` - \• :ref:`internal-catalog` +* :ref:`internal-catalog` - \• :ref:`mixed-catalog` +* :ref:`mixed-catalog` - \• :ref:`multifilter-catalog` +* :ref:`multifilter-catalog` - \• :ref:`generic-catalog` +* :ref:`generic-catalog` - \• Output +* Output .. _phoenix-tables: @@ -58,29 +58,28 @@ combination is available in the grid) or at runtime (if the filter throughput can be calculated but is not present in the grid). A Phoenix table specifies each source with the following columns: - \• ID (used to report the source in output tables) +* ID (used to report the source in output tables) - \• Dataset (all sources within a dataset must have the same age and metallicity) +* Dataset (all sources within a dataset must have the same age and metallicity) - \• RA (decimal degrees) +* RA (decimal degrees) - \• DEC (decimal degrees) +* DEC (decimal degrees) - \• Age (years) +* Age (years) - \• Metallicity ([Fe/H]) +* Metallicity ([Fe/H]) - \• Distance (kpc) +* Distance (kpc) - \• Mass (solar masses) +* Mass (solar masses) - \• Binary status (whether or not the source is a binary companion of the previous - source) +* Binary status (whether or not the source is a binary companion of the previous source) In order to identify a table as a Phoenix table, the following metadata must be present: - \• ``type=phoenix`` +* ``type=phoenix`` Phoenix tables will be converted to the Internal input format by the STIPS Observation Module, and observed sources will be listed in an output table. @@ -96,26 +95,26 @@ generated and observed at runtime. As a result, Realtime Phoenix tables are much slower to generate. A Realtime Phoenix table specifies each source with the following columns: - \• ID +* ID - \• RA (decimal degrees) +* RA (decimal degrees) - \• DEC (decimal degrees) +* DEC (decimal degrees) - \• Teff +* Teff - \• Log(g) +* Log(g) - \• Metallicity ([Fe/H]) +* Metallicity ([Fe/H]) - \• Apparent Magnitude +* Apparent Magnitude In order to use a Realtime Phoenix table, the following metadata must be present: - \• ``type=phoenix_realtime`` +* ``type=phoenix_realtime`` - \• ``bandpass`` (this must be set to the bandpass in which the effective magnitude +* ``bandpass`` (this must be set to the bandpass in which the effective magnitude is measured) Realtime Phoenix tables will be converted to the Internal input format by the @@ -132,15 +131,15 @@ in order to ensure that STIPS results are sufficiently close to the results produced by Pandeia. They are a variant of the Realtime Phoenix table, in which the following columns are present: - \• ID +* ID - \• RA (decimal degrees) +* RA (decimal degrees) - \• DEC (decimal degrees) +* DEC (decimal degrees) - \• Key +* Key - \• Apparent Magnitude +* Apparent Magnitude Here the Key column replaces effective temperature, log(g), and metallicity, and it is set to the key that Pandeia uses in producing its own Phoenix model @@ -150,9 +149,9 @@ keys in Pandeia. In order to use a Pandeia table, the following metadata must be present: - \• ``type=pandeia`` +* ``type=pandeia`` - \• ``bandpass`` +* ``bandpass`` Bandpass is treated as it is in Realtime Phoenix tables. Table conversions are done in exactly the same way as Realtime Phoenix tables. @@ -168,38 +167,38 @@ Charlot Isochrone Synthesis Spectral Evolutionary Code (December 1995 version) A BC95 catalog is an extended-source catalog, and specifies sources with the following columns: - \• ID +* ID - \• RA (decimal degrees) +* RA (decimal degrees) - \• DEC (decimal degrees) +* DEC (decimal degrees) - \• Redshift +* Redshift - \• Model (one of 'a', 'b', 'c', 'd', or 'e', with the description of each model +* Model (one of 'a', 'b', 'c', 'd', or 'e', with the description of each model provided in the `BC95 README `_ file. - \• Age (one of 10E5, 25E5, 50E5, 76E5, 10E6, 25E6, 50E6, 10E7, 50E7, 10E8, 50E8, +* Age (one of 10E5, 25E5, 50E5, 76E5, 10E6, 25E6, 50E6, 10E7, 50E7, 10E8, 50E8, 10E9, years) - \• Profile (one of 'expdisk' or 'devauc') +* Profile (one of 'expdisk' or 'devauc') - \• Radius (arcseconds) +* Radius (arcseconds) - \• Axial Ratio +* Axial Ratio - \• PA (degrees) +* PA (degrees) - \• Apparent Surface Brightness +* Apparent Surface Brightness In order to identify the catalog as a BC95 catalog, the following metadata must be present: - \• ``type=bc95`` +* ``type=bc95`` - \• ``bandpass`` +* ``bandpass`` During the observation, the catalog will be converted into an Internal format, with any necessary additional metadata added at this point. Galaxy spectra will @@ -215,35 +214,35 @@ Internal Catalog An Internal catalog is intended to include either point or extended sources, but is limited to a single filter. It must contain the following columns: - \• ID +* ID - \• RA (decimal degrees) +* RA (decimal degrees) - \• DEC (decimal degrees) +* DEC (decimal degrees) - \• FLUX (for point sources, count rate in the specified filter, counts/s. For +* FLUX (for point sources, count rate in the specified filter, counts/s. For sersic profiles, surface brightness inside Re in the specified filter, counts/s) - \• TYPE (either 'point' or 'sersic') +* TYPE (either 'point' or 'sersic') - \• N (Sersic profile index if TYPE is 'sersic', otherwise ignored) +* N (Sersic profile index if TYPE is 'sersic', otherwise ignored) - \• Re (half-light radius in pixels if TYPE is 'sersic', otherwise ignored) +* Re (half-light radius in pixels if TYPE is 'sersic', otherwise ignored) - \• Phi (angle of PA in degrees if TYPE is 'sersic', otherwise ignored) +* Phi (angle of PA in degrees if TYPE is 'sersic', otherwise ignored) - \• Ratio (axial ratio if TYPE is 'sersic', otherwise ignored) +* Ratio (axial ratio if TYPE is 'sersic', otherwise ignored) - \• Notes (any notes that are needed. Not used directly, but any notes will be +* Notes (any notes that are needed. Not used directly, but any notes will be retained in the observed catalog produced during the observation.) In order to identify the catalog as an Internal catalog, and in order to use it for STIPS observations, the following columns must be present: - \• ``type=internal`` +* ``type=internal`` - \• ``filter`` +* ``filter`` ``filter`` is the filter to which the catalog has been calibrated. This catalog type will not be converted during observation, but an observed source @@ -257,15 +256,15 @@ Mixed Catalog A Mixed catalog is identical to an Internal catalog, except that it contains one additional column: - \• Units (one of 'p' for photons/s, 'e' for electrons/s, 'j' for Jansky, or 'c' +* Units (one of 'p' for photons/s, 'e' for electrons/s, 'j' for Jansky, or 'c' for counts/s.) In order to identify the catalog as a Mixed catalog, the following metadata must be present: - \• ``type=mixed`` +* ``type=mixed`` - \• ``filter`` +* ``filter`` This catalog will have its flux values converted to counts/s, and will then be treated as an Internal catalog. @@ -282,7 +281,7 @@ provide the source count rate in that filter. A Multifilter catalog must have the following metadata: - \• ``type=multifilter`` +* ``type=multifilter`` The appropriate filter's count rate will be renamed as 'flux' as the catalog is converted to Internal format. @@ -294,13 +293,13 @@ Generic Catalog A Generic catalog is a point-source catalog with the following columns: - \• RA (decimal degrees) +* RA (decimal degrees) - \• DEC (decimal degrees) +* DEC (decimal degrees) - \• One column for each desired filter, showing the count rate in that filter. +* One column for each desired filter, showing the count rate in that filter. - \• (Optional) an ID column for each source. +* (Optional) an ID column for each source. No specific metadata is required. diff --git a/docs/using_stips/config_file.rst b/docs/using_stips/config_file.rst index 495bb5f..f6afc9f 100644 --- a/docs/using_stips/config_file.rst +++ b/docs/using_stips/config_file.rst @@ -81,15 +81,15 @@ Observation Keywords -------------------- observation_default_background (default *0.0*) - The default sky background in counts/s/detector pixel. Currently this keyword can be set to + The default sky background in counts/s/detector pixel. Currently this keyword can be set to: - \• any integer or floating point value, in which case that value will be - used directly. + * any integer or floating point value, in which case that value will be + used directly. - \• any of the string values 'none', 'low', 'avg', or 'high'. In this - case, 'none' is always treated as zero, and for any other keyword if the - value is defined for the instrument/detector selected, that value will - be used. If no such value can be found, the background will be set to 0. + * any of the string values 'none', 'low', 'avg', or 'high'. In this + case, 'none' is always treated as zero, and for any other keyword if the + value is defined for the instrument/detector selected, that value will + be used. If no such value can be found, the background will be set to 0. If used as a keyword argument, ``background`` can be used instead of ``observation_default_background`` for historical reasons. diff --git a/environment.yml b/environment.yml index 3ee99ab..216ea1a 100644 --- a/environment.yml +++ b/environment.yml @@ -31,21 +31,16 @@ dependencies: - esutil # installed from conda because you need the pre-compiled binaries. - pip: - # # Special Modules. These are temporary entries and require documentation. - # - # poppy 1.0.3 fixes a bug with the way poppy was calling factorial with non-integer - # values. Currently this is the only way to ensure that webbpsf picks up the correct - # poppy version. This peg should be removed as soon as webbpsf has updated to only - # use poppy versions that include this fix. - poppy==1.0.3 - + # Core Modules - - webbpsf==1.0.0 - - pandeia.engine==1.7 + - webbpsf==1.1.1 + - pandeia.engine==3.0 - synphot==1.1.1 - stsynphot==1.1.0 - + - soc_roman_tools + # Major modules - astropy - photutils @@ -64,15 +59,18 @@ dependencies: - docutils - sphinx - sphinx_rtd_theme - - stsci_rtd_theme + # - stsci_rtd_theme + # (install stsci_rtd_theme from master until a post 1.0 release occurs + # to prevent issues with bullets in the documentation) + - git+https://github.com/spacetelescope/stsci_rtd_theme.git@master - sphinx_automodapi - sphinx_autoapi - - astropy_helpers + - sphinx_astropy # Testing - tox - tox-conda - pytest-astropy - + # Current package - . diff --git a/environment_dev.yml b/environment_dev.yml index 30cde4c..809f8a2 100644 --- a/environment_dev.yml +++ b/environment_dev.yml @@ -1,5 +1,5 @@ # This file describes a conda environment that can be to install STIPS for development and -# testing purposes. Note that this environment set-up installs the STIPS module in +# testing purposes. Note that this environment set-up installs the STIPS module in # editable mode, so any changes you make to the STIPS source will be reflected the next # time you run python. # @@ -42,13 +42,14 @@ dependencies: # poppy version. This peg should be removed as soon as webbpsf has updated to only # use poppy versions that include this fix. - poppy==1.0.3 - + # Core Modules - - webbpsf==1.0.0 - - pandeia.engine==1.7 + - webbpsf==1.1.1 + - pandeia.engine==3.0 - synphot==1.1.1 - stsynphot==1.1.0 - + - soc_roman_tools + # Major modules - astropy - photutils @@ -67,15 +68,18 @@ dependencies: - docutils - sphinx - sphinx_rtd_theme - - stsci_rtd_theme + # - stsci_rtd_theme + # (install stsci_rtd_theme from master until a post 1.0 release occurs + # to prevent issues with bullets in the documentation) + - git+https://github.com/spacetelescope/stsci_rtd_theme.git@master - sphinx_automodapi - sphinx_autoapi - - astropy_helpers + - sphinx_astropy # Testing - tox - tox-conda - pytest-astropy - + # Current package - -e . diff --git "a/notebooks/STIPS Advanced I \342\200\223 Further Observations, Noise and Distortion.ipynb" "b/notebooks/STIPS Advanced I \342\200\223 Further Observations, Noise and Distortion.ipynb" new file mode 100644 index 0000000..9fa9bde --- /dev/null +++ "b/notebooks/STIPS Advanced I \342\200\223 Further Observations, Noise and Distortion.ipynb" @@ -0,0 +1,722 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "e85a8bfb", + "metadata": {}, + "source": [ + "# STIPS Advanced Tutorial I – Further Observations (Noise and Distortion)\n", + "\n", + "The contents of this notebook assume both that you already have STIPS installed (see [Installing STIPS](https://stsci-stips.readthedocs.io/en/latest/installation.html) if not) and that you are comfortable with basic usage of STIPS functionalities (see [STIPS Basic Tutorial](https://stips.readthedocs.io/en/latest/basic_tutorial.html), or the Basic Tutorial notebook, if not). Procedures in this tutorial will use scenes generated by the Basic Tutorial to explore further STIPS functionalities." + ] + }, + { + "cell_type": "markdown", + "id": "50af6027", + "metadata": {}, + "source": [ + "## Checking STIPS Import\n", + "\n", + "Before beginning, check again that the STIPS import is correct and configure and import and configure pyplot." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a5ac5c9b", + "metadata": {}, + "outputs": [], + "source": [ + "from astropy.io import fits\n", + "from matplotlib import style\n", + "from stips.observation_module import ObservationModule\n", + "from stips.scene_module import SceneModule\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "import stips\n", + "\n", + "%matplotlib inline\n", + "%config InlineBackend.figure_format = 'svg'\n", + "\n", + "matplotlib.rcParams['axes.grid'] = False\n", + "matplotlib.rcParams['image.origin'] = 'lower'\n", + "\n", + "print(stips.__env__report__)" + ] + }, + { + "cell_type": "markdown", + "id": "beabca1e", + "metadata": {}, + "source": [ + "## Generating a Smaller Scene\n", + "\n", + "To better see the scope of possible observations, we will generate a scene smaller in scale than in the Basic Tutorial –– we do that below, following the same procedure as before. The specifications of our scene are as follows:\n", + "\n", + "* A stellar population representing a globular cluster with\n", + " * 100 stars\n", + " * An age of 7.5 billion years\n", + " * A metallicity of -2.0\n", + " * A Salpeter IMF with alpha=-2.35\n", + " * A binary fraction of 10%\n", + " * A clustered distribution (higher-mass stars closer to the population centre)\n", + " * An inverse power-law distribution\n", + " * A radius of 100 parsecs\n", + " * A distance of 10 kpc\n", + " * No offset from the centre of the scene being created\n", + "* A collection of background galaxies with\n", + " * 2 galaxies\n", + " * Redshifts between 0 and 0.2\n", + " * Radii between 0.01 and 2.0 arcsec\n", + " * V-band surface brightness magnitudes between 28 and 25\n", + " * Uniform spatial distribution (unclustered) over 200 arcsec\n", + " * No offset from the centre of the scene being created" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8adc0804", + "metadata": {}, + "outputs": [], + "source": [ + "obs_prefix = 'adv_notebook'\n", + "obs_ra = 150.0\n", + "obs_dec = -2.5" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ef258eda", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "scm = SceneModule(out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec, out_path='notebooks_data/')\n", + "\n", + "stellar_parameters = {\n", + " 'n_stars': 100,\n", + " 'age_low': 7.5e12, \n", + " 'age_high': 7.5e12,\n", + " 'z_low': -2.0, \n", + " 'z_high': -2.0,\n", + " 'imf': 'salpeter', \n", + " 'alpha': -2.35,\n", + " 'binary_fraction': 0.1,\n", + " 'clustered': True,\n", + " 'distribution': 'invpow',\n", + " 'radius': 100.0, \n", + " 'radius_units': 'pc',\n", + " 'distance_low': 10.0, \n", + " 'distance_high': 10.0,\n", + " 'offset_ra': 0.0, \n", + " 'offset_dec': 0.0\n", + " }\n", + "\n", + "stellar_cat_file = scm.CreatePopulation(stellar_parameters)\n", + "print(\"Stellar population saved to file {}\".format(stellar_cat_file))\n", + "\n", + "galaxy_parameters = {\n", + " 'n_gals': 2,\n", + " 'z_low': 0.0, \n", + " 'z_high': 0.2,\n", + " 'rad_low': 0.01, \n", + " 'rad_high': 2.0,\n", + " 'sb_v_low': 28.0, \n", + " 'sb_v_high': 25.0,\n", + " 'distribution': 'uniform', \n", + " 'clustered': False,\n", + " 'radius': 200.0, \n", + " 'radius_units': 'arcsec',\n", + " 'offset_ra': 0.0, \n", + " 'offset_dec': 0.0\n", + " }\n", + "\n", + "galaxy_cat_file = scm.CreateGalaxies(galaxy_parameters)\n", + "print(\"Galaxy population saved to file {}\".format(galaxy_cat_file))" + ] + }, + { + "cell_type": "markdown", + "id": "d513f7e7", + "metadata": {}, + "source": [ + "## Observations\n", + "\n", + "With the scene now created, we can move forward with our observations. For the purpose of seeing multiple observation styles, we'll run through a series of observations, with the following parameters. This is meant to demonstrate the range of STIPS observation capabilities –– there are, of course, a great many possibilities for other observations, and this is only a small sample of options. We will leave all other parameters unchanged beyond the specified modifications in each observation. We will begin with the observation setup used in the Basic Tutorial, and compare other observations against that.\n", + "\n", + "### Observation I: Offset Modifications\n", + "\n", + "* Offset of 10 degrees in RA\n", + "* Rotation of 27 degrees\n", + "\n", + "### Observation II: Other Residuals\n", + "\n", + "* Flat residuals –– True\n", + "* Dark residauls –– True\n", + "* Cosmic residuals –– True\n", + "* Poisson residuals –– False\n", + "* Readnoise residuals –– False\n", + "\n", + "### Observation III: Distortions and Exposures\n", + "\n", + "#### III.1\n", + "\n", + "* Introduce distortion\n", + "\n", + "#### III.2\n", + "\n", + "* Increase exposure time to 5000\n", + "\n", + "#### III.3 \n", + "\n", + "* Decrease exposure time to 100" + ] + }, + { + "cell_type": "markdown", + "id": "99345d76", + "metadata": {}, + "source": [ + "## Basic Tutorial Observation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "66df9b43", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "offset = {\n", + " 'offset_id': 1,\n", + " 'offset_centre': False,\n", + " 'offset_ra': 0.0,\n", + " 'offset_dec': 0.0,\n", + " 'offset_pa': 0.0\n", + " }\n", + "\n", + "residuals = {\n", + " 'residual_flat': False,\n", + " 'residual_dark': False,\n", + " 'residual_cosmic': False,\n", + " 'residual_poisson': True,\n", + " 'residual_readnoise': True,\n", + " }\n", + "\n", + "observation_parameters = {\n", + " 'instrument': 'WFI',\n", + " 'filters': ['F129'],\n", + " 'detectors': 1,\n", + " 'distortion': False,\n", + " 'background': 0.15,\n", + " 'observations_id': 1,\n", + " 'exptime': 1000,\n", + " 'offsets': [offset]\n", + " }\n", + "\n", + "obm = ObservationModule(observation_parameters, out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec,\n", + " residuals=residuals, out_path='notebooks_data/')\n", + "\n", + "obm.nextObservation()\n", + "\n", + "output_stellar_catalogues = obm.addCatalogue(stellar_cat_file)\n", + "output_galaxy_catalogues = obm.addCatalogue(galaxy_cat_file)\n", + "\n", + "psf_file = obm.addError()\n", + "\n", + "fits_file, mosaic_file, params = obm.finalize(mosaic=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5cd11414", + "metadata": {}, + "outputs": [], + "source": [ + "with fits.open(fits_file) as result_file:\n", + " result_data = result_file[1].data\n", + " \n", + "crop_result_data = result_data[1900:2200, 1900:2200]\n", + "norm = matplotlib.colors.LogNorm(vmin=crop_result_data.min(), vmax=crop_result_data.max())\n", + "\n", + "fig1 = plt.figure()\n", + "im = plt.matshow(crop_result_data, norm=norm)" + ] + }, + { + "cell_type": "markdown", + "id": "12f6d721", + "metadata": {}, + "source": [ + "## Observation I" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "df0eee89", + "metadata": {}, + "outputs": [], + "source": [ + "offset_1 = {\n", + " 'offset_id': 1,\n", + " 'offset_centre': False,\n", + " 'offset_ra': 40.0,\n", + " 'offset_dec': 20.0,\n", + " 'offset_pa': 27.0\n", + " }\n", + "\n", + "\n", + "residuals_1 = {\n", + " 'residual_flat': False,\n", + " 'residual_dark': False,\n", + " 'residual_cosmic': False,\n", + " 'residual_poisson': True,\n", + " 'residual_readnoise': True\n", + " }\n", + "\n", + "\n", + "observation_parameters_1 = {\n", + " 'instrument': 'WFI',\n", + " 'filters': ['F129'],\n", + " 'detectors': 1,\n", + " 'distortion': False,\n", + " 'background': 0.15,\n", + " 'observations_id': 1,\n", + " 'exptime': 1000,\n", + " 'offsets': [offset]\n", + " }\n", + "\n", + "obm_1 = ObservationModule(observation_parameters_1, out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec,\n", + " residual=residuals_1, out_path='notebooks_data/')\n", + "\n", + "obm_1.nextObservation()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "667dfe26", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "output_stellar_catalogues_1 = obm_1.addCatalogue(stellar_cat_file)\n", + "output_galaxy_catalogues_1 = obm_1.addCatalogue(galaxy_cat_file)\n", + "\n", + "psf_file_1 = obm_1.addError()\n", + "\n", + "fits_file_1, mosaic_file_1, params_1 = obm_1.finalize(mosaic=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7ed6e399", + "metadata": {}, + "outputs": [], + "source": [ + "with fits.open(fits_file_1) as result_file_1:\n", + " result_data_1 = result_file_1[1].data\n", + "\n", + "crop_result_data_1 = result_data_1[1900:2200, 1900:2200]\n", + "norm = matplotlib.colors.LogNorm(vmin=crop_result_data_1.min(), vmax=crop_result_data_1.max())\n", + "\n", + "fig_1 = plt.figure()\n", + "im_1 = plt.matshow(crop_result_data_1, norm=norm)" + ] + }, + { + "cell_type": "markdown", + "id": "1a85b9a5", + "metadata": {}, + "source": [ + "## Observation II" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de45ba51", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "offset_2 = {\n", + " 'offset_id': 1,\n", + " 'offset_centre': False,\n", + " 'offset_ra': 0.0,\n", + " 'offset_dec': 0.0,\n", + " 'offset_pa': 0.0\n", + " }\n", + "\n", + "\n", + "residuals_2 = {\n", + " 'residual_flat': True,\n", + " 'residual_dark': True,\n", + " 'residual_cosmic': True,\n", + " 'residual_poisson': False,\n", + " 'residual_readnoise': False\n", + " }\n", + "\n", + "\n", + "observation_parameters_2 = {\n", + " 'instrument': 'WFI',\n", + " 'filters': ['F129'],\n", + " 'detectors': 1,\n", + " 'distortion': False,\n", + " 'background': 0.15,\n", + " 'observations_id': 1,\n", + " 'exptime': 1000,\n", + " 'offsets': [offset]\n", + " }\n", + "\n", + "obm_2 = ObservationModule(observation_parameters_2, out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec,\n", + " residual=residuals_2, out_path='notebooks_data/')\n", + "\n", + "obm_2.nextObservation()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fba6bd12", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "output_stellar_catalogues_2 = obm_2.addCatalogue(stellar_cat_file)\n", + "output_galaxy_catalogues_2 = obm_2.addCatalogue(galaxy_cat_file)\n", + "\n", + "psf_file_2 = obm_2.addError()\n", + "\n", + "fits_file_2, mosaic_file_2, params_2 = obm_2.finalize(mosaic=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4ee3f7ac", + "metadata": {}, + "outputs": [], + "source": [ + "with fits.open(fits_file_2) as result_file_2:\n", + " result_data_2 = result_file_2[1].data\n", + "\n", + "crop_result_data_2 = result_data[1900:2200, 1900:2200]\n", + "norm = matplotlib.colors.LogNorm(vmin=crop_result_data_2.min(), vmax=crop_result_data_2.max())\n", + "\n", + "fig_2 = plt.figure()\n", + "im_2 = plt.matshow(crop_result_data_2, norm=norm)" + ] + }, + { + "cell_type": "markdown", + "id": "9b14565e", + "metadata": {}, + "source": [ + "## Observation III.1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6f2db88c", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "offset_3_1 = {\n", + " 'offset_id': 1,\n", + " 'offset_centre': False,\n", + " 'offset_ra': 0.0,\n", + " 'offset_dec': 0.0,\n", + " 'offset_pa': 0.0\n", + " }\n", + "\n", + "\n", + "residuals_3_1 = {\n", + " 'residual_flat': False,\n", + " 'residual_dark': False,\n", + " 'residual_cosmic': False,\n", + " 'residual_poisson': True,\n", + " 'residual_readnoise': True\n", + " }\n", + "\n", + "\n", + "observation_parameters_3_1 = {\n", + " 'instrument': 'WFI',\n", + " 'filters': ['F129'],\n", + " 'detectors': 1,\n", + " 'distortion': True,\n", + " 'background': 0.15,\n", + " 'observations_id': 1,\n", + " 'exptime': 1000,\n", + " 'offsets': [offset]\n", + " }\n", + "\n", + "obm_3_1 = ObservationModule(observation_parameters_3_1, out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec,\n", + " residual=residuals_3_1, out_path='notebooks_data/')\n", + "\n", + "obm_3_1.nextObservation()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31a416ef", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "output_stellar_catalogues_3_1 = obm_3_1.addCatalogue(stellar_cat_file)\n", + "output_galaxy_catalogues_3_1 = obm_3_1.addCatalogue(galaxy_cat_file)\n", + "\n", + "psf_file_3_1 = obm_3_1.addError()\n", + "\n", + "fits_file_3_1, mosaic_file_3_1, params_3_1 = obm_3_1.finalize(mosaic=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fd6932d4", + "metadata": {}, + "outputs": [], + "source": [ + "with fits.open(fits_file_3_1) as result_file_3_1:\n", + " result_data_3_1 = result_file_3_1[1].data\n", + "\n", + "crop_result_data_3_1 = result_data_3_1 [1900:2200, 1900:2200]\n", + "norm = matplotlib.colors.LogNorm(vmin=crop_result_data_3_1.min(), vmax=crop_result_data_3_1.max())\n", + "\n", + "fig_3_1 = plt.figure()\n", + "im_3_1 = plt.matshow(crop_result_data_3_1, norm=norm)" + ] + }, + { + "cell_type": "markdown", + "id": "4a9eb7d5", + "metadata": {}, + "source": [ + "## Observation III.2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f201031c", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "offset_3_2 = {\n", + " 'offset_id': 1,\n", + " 'offset_centre': False,\n", + " 'offset_ra': 0.0,\n", + " 'offset_dec': 0.0,\n", + " 'offset_pa': 0.0\n", + " }\n", + "\n", + "\n", + "residuals_3_2 = {\n", + " 'residual_flat': False,\n", + " 'residual_dark': False,\n", + " 'residual_cosmic': False,\n", + " 'residual_poisson': False,\n", + " 'residual_readnoise': False\n", + " }\n", + "\n", + "\n", + "observation_parameters_3_2 = {\n", + " 'instrument': 'WFI',\n", + " 'filters': ['F129'],\n", + " 'detectors': 1,\n", + " 'distortion': False,\n", + " 'background': 0.15,\n", + " 'observations_id': 1,\n", + " 'exptime': 15000,\n", + " 'offsets': [offset]\n", + " }\n", + "\n", + "obm_3_2 = ObservationModule(observation_parameters, out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec,\n", + " residual=residuals, out_path='notebooks_data/')\n", + "\n", + "obm_3_2.nextObservation()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7565cf33", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "output_stellar_catalogues_3_2 = obm_3_2.addCatalogue(stellar_cat_file)\n", + "output_galaxy_catalogues_3_2 = obm_3_2.addCatalogue(galaxy_cat_file)\n", + "\n", + "psf_file_3_2 = obm_3_2.addError()\n", + "\n", + "fits_file_3_2, mosaic_file_3_2, params_3_2 = obm_3_2.finalize(mosaic=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7982bf44", + "metadata": {}, + "outputs": [], + "source": [ + "with fits.open(fits_file_3_2) as result_file_3_2:\n", + " result_data_3_2 = result_file_3_2[1].data\n", + "\n", + "crop_result_data_3_2 = result_data_3_2[1900:2200, 1900:2200]\n", + "norm = matplotlib.colors.LogNorm(vmin=crop_result_data_3_2.min(), vmax=crop_result_data_3_2.max())\n", + "\n", + "fig_3_2 = plt.figure()\n", + "im_3_2 = plt.matshow(crop_result_data_3_2, norm=norm)" + ] + }, + { + "cell_type": "markdown", + "id": "51839190", + "metadata": {}, + "source": [ + "## Observation III.3" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bde642c8", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "offset_3_3 = {\n", + " 'offset_id': 1,\n", + " 'offset_centre': False,\n", + " 'offset_ra': 0.0,\n", + " 'offset_dec': 0.0,\n", + " 'offset_pa': 0.0\n", + " }\n", + "\n", + "\n", + "residuals_3_3 = {\n", + " 'residual_flat': False,\n", + " 'residual_dark': False,\n", + " 'residual_cosmic': False,\n", + " 'residual_poisson': False,\n", + " 'residual_readnoise': False\n", + " }\n", + "\n", + "\n", + "observation_parameters_3_3 = {\n", + " 'instrument': 'WFI',\n", + " 'filters': ['F129'],\n", + " 'detectors': 1,\n", + " 'distortion': False,\n", + " 'background': 0.15,\n", + " 'observations_id': 1,\n", + " 'exptime': 100,\n", + " 'offsets': [offset]\n", + " }\n", + "\n", + "obm_3_3 = ObservationModule(observation_parameters, out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec,\n", + " residual=residuals, out_path='notebooks_data/')\n", + "\n", + "obm_3_3.nextObservation()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dd84a271", + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "output_stellar_catalogues_3_3 = obm_3_3.addCatalogue(stellar_cat_file)\n", + "output_galaxy_catalogues_3_3 = obm_3_3.addCatalogue(galaxy_cat_file)\n", + "\n", + "psf_file_3_3 = obm_3_3.addError()\n", + "\n", + "fits_file_3_3, mosaic_file_3_3, params_3_3 = obm_3_3.finalize(mosaic=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "154d74c7", + "metadata": {}, + "outputs": [], + "source": [ + "with fits.open(fits_file_3_3) as result_file_3_3:\n", + " result_data_3_3 = result_file_3_3[1].data\n", + "\n", + "crop_result_data_3_3 = result_data_3_3[1900:2200, 1900:2200]\n", + "norm = matplotlib.colors.LogNorm(vmin=crop_result_data_3_3.min(), vmax=crop_result_data_3_3.max())\n", + "\n", + "fig_3_3 = plt.figure()\n", + "im_3_3 = plt.matshow(crop_result_data_3_3, norm = norm)" + ] + }, + { + "cell_type": "markdown", + "id": "9323b6e5", + "metadata": {}, + "source": [ + "## Conclusion\n", + "\n", + "This concludes the first portion of the advanced tutorial. For information on PSFs, please see STIPS Advanced II. If you have further questions, check out the [STIPS documentation](https://stips.readthedocs.io/en/latest/), or reach out to the STIPS Helpdesk via email at help@stsci.edu with the subject line \"STIPS Question\"." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/STIPS Advanced II - PSFs.ipynb b/notebooks/STIPS Advanced II - PSFs.ipynb new file mode 100644 index 0000000..414ba55 --- /dev/null +++ b/notebooks/STIPS Advanced II - PSFs.ipynb @@ -0,0 +1,394 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "2f9bfcc8", + "metadata": {}, + "source": [ + "# STIPS Advanced II – PSFs, Adding Sources\n", + "\n", + "This portion of the Advanced tutorial concerns the generation and manipulation of PSFs using STIPS functions. As with Advanced I, this notebook assumes both that you already have STIPS installed (see [Installing STIPS](https://stsci-stips.readthedocs.io/en/latest/installation.html) if not) and that you are comfortable with basic usage of STIPS functionalities (see [STIPS Basic Tutorial](https://stips.readthedocs.io/en/latest/basic_tutorial.html), or the Basic Tutorial notebook, if not).\n", + "\n", + "#### Note:\n", + "The ```makePSF``` package is still undergoing development –– pending the inclusion of logging functions and error catching, as well as some functional optimization. This tutorial will be updated to reflect this in future releases." + ] + }, + { + "cell_type": "markdown", + "id": "04736e27", + "metadata": {}, + "source": [ + "## Checking STIPS Import, Adding Modules\n", + "\n", + "Before beginning, check again that the STIPS import is correct, and import and configure a few modules we'll be needing. Additionally, ensure that the generated files from the Basic Tutorial are visible in your \"notebooks\" folder/the directory from which you are working." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1d1ee27f", + "metadata": {}, + "outputs": [], + "source": [ + "from astropy.io import fits\n", + "from glob import glob\n", + "from matplotlib import style\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.ticker as ticker\n", + "import numpy as np\n", + "import stips\n", + "\n", + "%matplotlib inline\n", + "%config InlineBackend.figure_format = 'svg'\n", + "\n", + "matplotlib.rcParams['axes.grid'] = False\n", + "matplotlib.rcParams['image.origin'] = 'lower'\n", + "\n", + "print(stips.__env__report__)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fe582f27", + "metadata": {}, + "outputs": [], + "source": [ + "file_list = glob('notebooks_data/notebook_example*')\n", + "print(file_list)" + ] + }, + { + "cell_type": "markdown", + "id": "ccedccb0", + "metadata": {}, + "source": [ + "## Generating and Manipulating a PSF\n", + "\n", + "PSFs can be generated using FITS input files and the makePSF module within STIPS. For this example, we will be using the example PSF file \"psf_WFI_2.0.0_F129_sca01.fits\", a PSF generated with Roman WFI F129. This file is available in the \"notebooks\" directory of STIPS. The generated PSF will be returned as an array." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c214dd85", + "metadata": {}, + "outputs": [], + "source": [ + "example_file = fits.open('notebooks_data/psf_WFI_2.0.0_F129_sca01.fits')\n", + "\n", + "test_psf = stips.utilities.makePSF.make_epsf(example_file[0].data[0])\n", + "\n", + "print(test_psf)" + ] + }, + { + "cell_type": "markdown", + "id": "a2d545fd", + "metadata": {}, + "source": [ + "We can view a plot of our generated PSF using pyplot." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8036af8b", + "metadata": {}, + "outputs": [], + "source": [ + "fig1 = plt.figure()\n", + "im1 = plt.matshow(test_psf)\n", + "plt.savefig('notebooks_data/adv_notebook_epsf_orig.jpeg')" + ] + }, + { + "cell_type": "markdown", + "id": "c6a64402", + "metadata": {}, + "source": [ + "The PSF can be manipulated in a variety of ways –– for purposes of this tutorial, we will perform a bicubic interpolation on our PSF, as well as calculate the fraction of light that should fall on a given pixel. This will employ two of makePSFs primary functions, ```makePSF.bicubic``` and ```makePSF.real_psf```." + ] + }, + { + "cell_type": "markdown", + "id": "fc7e970b", + "metadata": {}, + "source": [ + "### Bicubic Interpolation\n", + "\n", + "For our bicubic interpolation, we will use the following input parameters, selecting a reference point close to the source in the hopes of seeing more engaging results:\n", + "\n", + "* Reference x pixel (```ix```) – 85\n", + "* Reference y pixel (```iy```) – 85\n", + "* Pixel phase along the x-direction (```fx```) – 5\n", + "* Pixel phase along the y-direction (```fy```) – 5" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b17a4ba6", + "metadata": {}, + "outputs": [], + "source": [ + "ix = iy = 85\n", + "fx = fy = 5\n", + "\n", + "stips.utilities.makePSF.bicubic(test_psf, iy, ix, fx, fy)" + ] + }, + { + "cell_type": "markdown", + "id": "9a0f5a0b", + "metadata": {}, + "source": [ + "Noted in the image below is the location at which the interpolation was performed (cropped for visibility)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b2249868", + "metadata": {}, + "outputs": [], + "source": [ + "def plot_square_cutout(data, x_mid, y_mid, half):\n", + " '''\n", + " x_mid: index of middle x pixel in desired cutout region\n", + " y_mid: index of middle y pixel in desired cutout region\n", + " half: distance in pixels from middle pixel to edge of cutout\n", + " '''\n", + " \n", + " # create plot\n", + " fig, ax = plt.subplots(figsize=(4,4))\n", + " ax.imshow(data[x_mid - half:x_mid + half,\n", + " y_mid - half:y_mid + half])\n", + " \n", + " # create custom formatters for cutout region axes (run once)\n", + " ticks_x = ticker.FuncFormatter(lambda x, pos: f\"{x + x_mid - half:.0f}\")\n", + " ticks_y = ticker.FuncFormatter(lambda y, pos: f\"{y + y_mid - half:.0f}\")\n", + " \n", + " # replace default tick labels with cutout pixel indices\n", + " ax.set_xticks([x for x in ax.get_xticks() if 0 <= x < half*2])\n", + " ax.set_yticks([y for y in ax.get_xticks() if 0 <= y < half*2])\n", + " ax.xaxis.set_major_formatter(ticks_x)\n", + " ax.yaxis.set_major_formatter(ticks_y)\n", + " \n", + " return fig, ax" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eb827956", + "metadata": {}, + "outputs": [], + "source": [ + "# Plot parameters\n", + "x_mid = 88\n", + "y_mid = 88\n", + "half = 30\n", + "\n", + "fig0, ax0 = plot_square_cutout(test_psf, x_mid, y_mid, half)\n", + "\n", + "circ_bicubic = plt.Circle((half - 3, half - 3), 3, color='r', fill=False)\n", + "\n", + "ax0.add_patch(circ_bicubic)" + ] + }, + { + "cell_type": "markdown", + "id": "a5f81adf", + "metadata": {}, + "source": [ + "### Fractional Light\n", + "\n", + "Using ```makePSF.real_psf```, we can caculate the fraction of light from our epsf that should fall on any given pixel. We will use the following parameters:\n", + "\n", + "* Relative location of source along x-axis (```dx```) (note that 0 the default the center of the boxsize) –– 5\n", + "* Relative location of the source along y-axis (```dy```) –– 5\n", + "* Center of the input psf model (```psf_center```) (our PSF has dimensions of 177 by 177 px) –– 88\n", + "* PSF boxsize (```boxsize```) –– 177\n", + "\n", + "#### Note:\n", + "\n", + "Currently, the default value of ```psf_center``` is set to 177, and default boxsize 44 –– this will be adjusted in future updates." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b51d47ec", + "metadata": {}, + "outputs": [], + "source": [ + "dx = dy = 5\n", + "\n", + "#The code below is designed to modify the default values for the center\n", + "#and size of the PSF to fit the PSF being used –– this will be addressed\n", + "#in future releases.\n", + "\n", + "psf_middle = round((test_psf[0].shape[0]-1)/2)\n", + "\n", + "PSF_BOXSIZE = np.floor(psf_middle)/4\n", + "\n", + "stips.utilities.makePSF.real_psf(dx, dy, test_psf, boxsize=PSF_BOXSIZE, psf_center=psf_middle)" + ] + }, + { + "cell_type": "markdown", + "id": "af41953b", + "metadata": {}, + "source": [ + "Noted below is the location at which fractional light was evaluated." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17f972a4", + "metadata": {}, + "outputs": [], + "source": [ + "fig1_2, ax1_2 = plot_square_cutout(test_psf, x_mid, y_mid, half)\n", + "\n", + "circ_real_psf = plt.Circle((half + 5, half + 5), 3, color='r', fill=False)\n", + "ax1_2.add_patch(circ_real_psf)" + ] + }, + { + "cell_type": "markdown", + "id": "0626186d", + "metadata": {}, + "source": [ + "## Adding New Sources to a Scene\n", + "\n", + "To add a new source to our generated scene from the Basic Tutorial, we will use the ```place_source``` function from ```makePSF``` and insert a source with the following characteristics:\n", + "\n", + "* Location (px)\n", + " * x = 2000\n", + " * y = 2000\n", + "* Flux = 15\n", + "\n", + "Note that, by default, the PSF upscale is 4.\n", + "\n", + "#### Note:\n", + "\n", + "Currently, the default value of ```psf_center``` is set to 177, and default boxsize 44 –– this will be adjusted in future updates." + ] + }, + { + "cell_type": "markdown", + "id": "a0de3c1b", + "metadata": {}, + "source": [ + "### Original Scene\n", + "\n", + "Re-open the original scene from the saved output file –– by default, this file is \"notebook_example_1_0.fits\". We will limit our area of view to the range in which the added source will be visible, between 1950 and 2150 pixels on each axis." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7b022fab", + "metadata": {}, + "outputs": [], + "source": [ + "with fits.open('notebooks_data/notebook_example_1_0.fits') as result_file:\n", + " result_data = result_file[1].data\n", + "\n", + "# Plot parameters\n", + "x_mid2 = 2000\n", + "y_mid2 = 2000\n", + "half2 = 50\n", + "\n", + "fig2, ax2 = plot_square_cutout(result_data, x_mid2, y_mid2, half2)\n", + "\n", + "circ_no_source = plt.Circle((half2, half2), 3, color='r', fill=False)\n", + "ax2.add_patch(circ_no_source)" + ] + }, + { + "cell_type": "markdown", + "id": "c407d0eb", + "metadata": {}, + "source": [ + "### Add New Source\n", + "\n", + "We now use the ```makePSF.place_source``` function to add in our test PSF as a source (noting that the ```boxsize``` and ```psf_center``` parameters must be modified from their defaults for this particular PSF, as was the case with the ```makePSF.real_psf example```)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "93345760", + "metadata": {}, + "outputs": [], + "source": [ + "#The code below is designed to modify the default values for the center\n", + "#and size of the PSF to fit the PSF being used –– this will be addressed\n", + "#in future releases.\n", + "\n", + "psf_middle = round((test_psf[0].shape[0]-1)/2)\n", + "\n", + "PSF_BOXSIZE = np.floor(psf_middle)/4\n", + "\n", + "added_source = stips.utilities.makePSF.place_source(2000, 2000, 3000, result_data, test_psf, boxsize=PSF_BOXSIZE, psf_center=psf_middle)" + ] + }, + { + "cell_type": "markdown", + "id": "6d2fa392", + "metadata": {}, + "source": [ + "We can now open our modified image and view our added source." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8d416fd1", + "metadata": {}, + "outputs": [], + "source": [ + "fig2_1, ax2_1 = plot_square_cutout(result_data, x_mid2, y_mid2, half2)\n", + "\n", + "circ_source = plt.Circle((half2, half2), 3, color='r', fill=False)\n", + "ax2_1.add_patch(circ_source)" + ] + }, + { + "cell_type": "markdown", + "id": "b0b44ac2", + "metadata": {}, + "source": [ + "## Conclusion\n", + "\n", + "This concludes the second portion of the advanced tutorial. If you have further questions, check out the [STIPS documentation](https://stips.readthedocs.io/en/latest/), or reach out to the STIPS Helpdesk via email at help@stsci.edu with the subject line \"STIPS Question\"." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/STIPS Basic Tutorial.ipynb b/notebooks/STIPS Basic Tutorial.ipynb new file mode 100644 index 0000000..dc39c9d --- /dev/null +++ b/notebooks/STIPS Basic Tutorial.ipynb @@ -0,0 +1,354 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# STIPS Basic Tutorial\n", + "\n", + "If you prefer to work within a Jupyter notebook, this notebook will follow the same processes as the Basic Tutorial to create a STIPS observation. Like the Basic Tutorial, it assumes that you already have STIPS installed; see [Installing STIPS](https://stsci-stips.readthedocs.io/en/latest/installation.html) in the STIPS documentation if not." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Importing STIPS and checking the STIPS environment\n", + "\n", + "In order to use STIPS, you must have several sets of data files installed ([Installing STIPS](https://stsci-stips.readthedocs.io/en/latest/installation.html) contains instructions on doing this). In order to test your STIPS installation, STIPS includes an environment report utility that shows which version of STIPS you have installed, as well as the versions of the most important support packages that STIPS uses. When you run the cell below, you should get output something like this:\n", + "\n", + "> STIPS Version x.y.z with Data Version x.y.z at /Some/Path/To/stips_data\n", + ">\n", + "> STIPS Grid Generated with x.y.z\n", + ">\n", + "> Pandeia Version a.b.c with Data Version a.b.c at /Some/Path/To/pandeia_refdata\n", + ">\n", + "> Webbpsf Version d.e.f with Data Version d.e.f at /Some/Path/To/WEBBPSF_PATH" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from astropy.io import fits\n", + "from matplotlib import style\n", + "from stips.scene_module import SceneModule\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "import stips\n", + "\n", + "%matplotlib inline\n", + "%config InlineBackend.figure_format = 'svg'\n", + "\n", + "matplotlib.rcParams['axes.grid'] = False\n", + "matplotlib.rcParams['image.origin'] = 'lower'\n", + "\n", + "print(stips.__env__report__)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setting up some Basics\n", + "\n", + "STIPS allows you to set up some basic elements of your observation and pass them when creating and running observations. The section below shows one way to set these up." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "obs_prefix = 'notebook_example'\n", + "obs_ra = 150.0\n", + "obs_dec = -2.5" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Creating a Scene to Observe\n", + "\n", + "STIPS contains functions to generate stellar populations as well as background galaxies. These functions are all present in the `SceneModule` class. In order to know what sort of populations to generate, the Scene Module requires input dictionaries to specify population parameters. In this example, we will create the following:\n", + "\n", + "* A stellar population representing a globular cluster with\n", + " * 100 stars\n", + " * An age of 7.5 billion years\n", + " * A metallicity of -2.0\n", + " * A Salpeter IMF with alpha=-2.35\n", + " * A binary fraction of 10%\n", + " * A clustered distribution (higher-mass stars closer to the population centre)\n", + " * An inverse power-law distribution\n", + " * A radius of 100 parsecs\n", + " * A distance of 10 kpc\n", + " * No offset from the centre of the scene being created\n", + "* A collection of background galaxies with\n", + " * 10 galaxies\n", + " * Redshifts between 0 and 0.2\n", + " * Radii between 0.01 and 2.0 arcsec\n", + " * V-band surface brightness magnitudes between 28 and 24\n", + " * Uniform spatial distribution (unclustered) over 200 arcsec\n", + " * No offset from the center of the scene being created\n", + " \n", + "\n", + "##### Note: \n", + "Background galaxies are available in STIPS, but are neither supported nor tested.\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "scm = SceneModule(out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec, out_path='notebooks_data/')\n", + "\n", + "stellar_parameters = {\n", + " 'n_stars': 100,\n", + " 'age_low': 7.5e12, \n", + " 'age_high': 7.5e12,\n", + " 'z_low': -2.0, \n", + " 'z_high': -2.0,\n", + " 'imf': 'salpeter', \n", + " 'alpha': -2.35,\n", + " 'binary_fraction': 0.1,\n", + " 'clustered': True,\n", + " 'distribution': 'invpow',\n", + " 'radius': 100.0, \n", + " 'radius_units': 'pc',\n", + " 'distance_low': 10.0, \n", + " 'distance_high': 10.0,\n", + " 'offset_ra': 0.0, \n", + " 'offset_dec': 0.0\n", + " }\n", + "\n", + "stellar_cat_file = scm.CreatePopulation(stellar_parameters)\n", + "print(\"Stellar population saved to file {}\".format(stellar_cat_file))\n", + "\n", + "galaxy_parameters = {\n", + " 'n_gals': 10,\n", + " 'z_low': 0.0, \n", + " 'z_high': 0.2,\n", + " 'rad_low': 0.01, \n", + " 'rad_high': 2.0,\n", + " 'sb_v_low': 28.0, \n", + " 'sb_v_high': 24.0,\n", + " 'distribution': 'uniform', \n", + " 'clustered': False,\n", + " 'radius': 200.0, \n", + " 'radius_units': 'arcsec',\n", + " 'offset_ra': 0.0, \n", + " 'offset_dec': 0.0\n", + " }\n", + "\n", + "galaxy_cat_file = scm.CreateGalaxies(galaxy_parameters)\n", + "print(\"Galaxy population saved to file {}\".format(galaxy_cat_file))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Creating a STIPS observation\n", + "\n", + "Once a scene has been created, it's possible to observe that scene as many times as you wish (and from as many places as you wish, although obviously any observation that doesn't include at least some of the scene will simply be an empty exposure). In this case, we create a single Roman WFI observation.\n", + "\n", + "STIPS uses a bit of specialialized terminology to describe its observations. In particular:\n", + "\n", + "* An *observation* is a set of exposures with a single instrument (e.g. Roman WFI), one or more filters (where each\n", + " exposure in the observation will be repeated for every included filter), some number of the instrument's \n", + " detectors (for WFI between 1 and 18), where each exposure in the observation will be repeated, with the \n", + " appropriate inter-detector offset, for every included detector, a single chosen sky background value, and a \n", + " single exposure time (applied to each exposure in the observation), and one or more offsets.\n", + "* An *offset* is a single telescope pointing. For each offset specified in the observation, an exposure will be \n", + " created for each detector and each filter at the offset. STIPS may, optionally, create one or more mosaics at \n", + " each offset, with a single mosaic including all detectors with the same filter. In addition, STIPS can create a \n", + " single combined mosaic for each filter in the combined Observation.\n", + "\n", + "In this case, we will create an observation with:\n", + "\n", + "* Roman WFI F129\n", + "* 1 detector\n", + "* No distortion\n", + "* A sky background of 0.15 counts/s/pixel\n", + "* The ID 1\n", + "* An exposure time of 1000 seconds\n", + "\n", + "We will use a single offset with:\n", + "\n", + "* An ID of 1\n", + "* No centering (if an offset is centred then, for a multi-detector observation, each detector is centred on the \n", + " offset co-ordinates individually rather than the instrument as a whole being centred there)\n", + "* No change in RA, DEC, or PA from the centre of the observation\n", + "\n", + "and the following residual settings:\n", + "\n", + "* Flatfield residual: off\n", + "* Dark current residual: off\n", + "* Cosmic ray removal residual: off\n", + "* Poisson noise residual: on\n", + "* Readnoise residual: on\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from stips.observation_module import ObservationModule\n", + "\n", + "offset = {\n", + " 'offset_id': 1,\n", + " 'offset_centre': False,\n", + " 'offset_ra': 0.0,\n", + " 'offset_dec': 0.0,\n", + " 'offset_pa': 0.0\n", + " }\n", + "\n", + "residuals = {\n", + " 'residual_flat': False,\n", + " 'residual_dark': False,\n", + " 'residual_cosmic': False,\n", + " 'residual_poisson': True,\n", + " 'residual_readnoise': True,\n", + " }\n", + "\n", + "observation_parameters = {\n", + " 'instrument': 'WFI',\n", + " 'filters': ['F129'],\n", + " 'detectors': 1,\n", + " 'distortion': False,\n", + " 'background': 0.15,\n", + " 'observations_id': 1,\n", + " 'exptime': 1000,\n", + " 'offsets': [offset]\n", + " }\n", + "\n", + "obm = ObservationModule(observation_parameters, out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec,\n", + " residuals=residuals, out_path='notebooks_data/')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we call nextObservation() -– this function is used to move between different combinations of offset and filter. It must be called once in order to initialize the observation module to the first observatiom before adding catalogues." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "obm.nextObservation()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Observing the Scene\n", + "\n", + "In order to observe the scene, we must add the scene catalogues created above to it, add in error residuals, and finalize the observation. In so doing, we create output catalogues which are taken from the input catalogues, but only contain the sources visible to the detectors, and convert source brightnesses into units of counts/s for the detectors." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true, + "tags": [] + }, + "outputs": [], + "source": [ + "output_stellar_catalogues = obm.addCatalogue(stellar_cat_file)\n", + "output_galaxy_catalogues = obm.addCatalogue(galaxy_cat_file)\n", + "\n", + "print(\"Output Catalogues are {} and {}.\".format(output_stellar_catalogues, output_galaxy_catalogues))\n", + "\n", + "psf_file = obm.addError()\n", + "\n", + "print(\"PSF File is {}\".format(psf_file))\n", + "\n", + "fits_file, mosaic_file, params = obm.finalize(mosaic=False)\n", + "\n", + "print(\"Output FITS file is {}\".format(fits_file))\n", + "print(\"Output Mosaic File is {}\".format(mosaic_file))\n", + "print(\"Observation Parameters are {}\".format(params))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "## Show the Result\n", + "\n", + "We use pyplot to plot the resulting simulated image." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "with fits.open(fits_file) as result_file:\n", + " result_data = result_file[1].data\n", + "\n", + "fig1 = plt.figure()\n", + "norm = matplotlib.colors.LogNorm(vmin=result_data.min(), vmax=result_data.max())\n", + "im = plt.matshow(result_data, norm=norm)\n", + "plt.savefig('notebooks_data/notebook_example_basic_scene.jpeg')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As well as a detail from the detector center." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig2 = plt.figure()\n", + "im2 = plt.matshow(result_data[1800:2300, 1800:2300], norm=norm)\n", + "plt.savefig('notebooks_data/notebook_example_basic_scene.jpeg')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/notebooks/STIPS Tutorial.ipynb b/notebooks/STIPS Tutorial.ipynb deleted file mode 100644 index 667f177..0000000 --- a/notebooks/STIPS Tutorial.ipynb +++ /dev/null @@ -1,496 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# STIPS Basic Usage Example\n", - "\n", - "If you prefer to work within a Jupyter notebook, this notebook will follow the same processes as the Basic Tutorial to create a STIPS observation. Like the Basic Tutorial, it assumes that you already have STIPS installed; see [Installing STIPS](https://stsci-stips.readthedocs.io/en/latest/installation.html) in the STIPS documentation if not." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Importing STIPS and checking the STIPS environment\n", - "\n", - "In order to use STIPS, you must have several sets of data files installed ([Installing STIPS](https://stsci-stips.readthedocs.io/en/latest/installation.html) contains instructions on doing this). In order to test your STIPS installation, STIPS includes an environment report utility that shows which version of STIPS you have installed, as well as the versions of the most important support packages that STIPS uses. When you run the cell below, you should get output something like this:\n", - "\n", - "> STIPS Version x.y.z with Data Version x.y.z at /Some/Path/To/stips_data\n", - ">\n", - "> STIPS Grid Generated with x.y.z\n", - ">\n", - "> Pandeia Version a.b.c with Data Version a.b.c at /Some/Path/To/pandeia_refdata\n", - ">\n", - "> Webbpsf Version d.e.f with Data Version d.e.f at /Some/Path/To/WEBBPSF_PATH" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "STIPS Version 2.0.0 with Data Version 1.0.9 at /Users/halkowsi/STScI-STIPS/ref_data/stips_data.\n", - "\tSTIPS Grid Generated with 1.0.8\n", - "Pandeia Version 1.7 with Data Version 1.7 at /Users/halkowsi/STScI-STIPS/ref_data/pandeia_data-1.7_roman.\n", - "Webbpsf Version 1.0.0 with Data Version 1.0.0 at /Users/halkowsi/STScI-STIPS/ref_data/webbpsf-data.\n", - "\n" - ] - } - ], - "source": [ - "import stips\n", - "\n", - "print(stips.__env__report__)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Setting up some Basics\n", - "\n", - "STIPS allows you to set up some basic elements of your observation and pass them when creating and running observations. The section below shows one way to set these up." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "obs_prefix = 'notebook_example'\n", - "obs_ra = 150.0\n", - "obs_dec = -2.5" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Creating a Scene to Observe\n", - "\n", - "STIPS contains functions to generate stellar populations as well as background galaxies. These functions are all present in the `SceneModule` class. In order to know what sort of populations to generate, the Scene Module requires input dictionaries to specify population parameters. In this example, we will create the following:\n", - "\n", - "* A stellar population representing a globular cluster with\n", - " * 10,000 stars\n", - " * An age of 7.5 billion years\n", - " * A metallicity of -2.0\n", - " * A Salpeter IMF with alpha=-2.35\n", - " * A binary fraction of 10%\n", - " * A clustered distribution (higher-mass stars closer to the population centre)\n", - " * An inverse power-law distribution\n", - " * A radius of 100 parsecs\n", - " * A distance of 20 kpc\n", - " * No offset from the centre of the scene being created\n", - "* A collection of background galaxies with\n", - " * 100 galaxies\n", - " * Redshifts between 0 and 0.2\n", - " * Radii between 0.01 and 2.0 arcsec\n", - " * V-band surface brightness magnitudes between 25 and 30\n", - " * Uniform spatial distribution (unclustered) over 200 arcsec\n", - " * No offset from the centre of the scene being created\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2020-10-14 13:03:10,044 INFO: Creating catalogue /Users/york/Projects/stips/tmp/notebook_example_stars_000.fits\n", - "2020-10-14 13:03:10,045 INFO: Creating age and metallicity numbers\n", - "2020-10-14 13:03:10,045 INFO: Created age and metallicity numbers\n", - "2020-10-14 13:03:10,046 INFO: Creating stars\n", - "2020-10-14 13:03:10,046 INFO: Age 1.35e+10\n", - "2020-10-14 13:03:10,047 INFO: Metallicity -2.000000\n", - "2020-10-14 13:03:10,047 INFO: Creating 50000 stars\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Log level: INFO\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2020-10-14 13:03:10,419 INFO: Creating 50000 objects, max radius 100.0, function invpow, scale 2.8\n", - "2020-10-14 13:03:10,579 INFO: Chunk 1: 54987 stars\n", - "WARNING: UnitsWarning: 'degrees' did not parse as fits unit: At col 0, Unit 'degrees' not supported by the FITS standard. If this is meant to be a custom unit, define it with 'u.def_unit'. To have it recognized inside a file reader or other code, enable it with 'u.add_enabled_units'. For details, see https://docs.astropy.org/en/latest/units/combining_and_defining.html [astropy.units.core]\n", - "WARNING: UnitsWarning: 'years' did not parse as fits unit: At col 0, Unit 'years' not supported by the FITS standard. If this is meant to be a custom unit, define it with 'u.def_unit'. To have it recognized inside a file reader or other code, enable it with 'u.add_enabled_units'. For details, see https://docs.astropy.org/en/latest/units/combining_and_defining.html [astropy.units.core]\n", - "WARNING: UnitsWarning: 'solar masses' did not parse as fits unit: At col 0, Unit 'solar' not supported by the FITS standard. If this is meant to be a custom unit, define it with 'u.def_unit'. To have it recognized inside a file reader or other code, enable it with 'u.add_enabled_units'. For details, see https://docs.astropy.org/en/latest/units/combining_and_defining.html [astropy.units.core]\n", - "WARNING: UnitsWarning: 'johnson,i' did not parse as fits unit: At col 0, Unit 'johnson' not supported by the FITS standard. If this is meant to be a custom unit, define it with 'u.def_unit'. To have it recognized inside a file reader or other code, enable it with 'u.add_enabled_units'. For details, see https://docs.astropy.org/en/latest/units/combining_and_defining.html [astropy.units.core]\n", - "WARNING: VerifyWarning: Keyword name 'distribution' is greater than 8 characters or contains characters not allowed by the FITS standard; a HIERARCH card will be created. [astropy.io.fits.card]\n", - "WARNING: VerifyWarning: Keyword name 'clustered' is greater than 8 characters or contains characters not allowed by the FITS standard; a HIERARCH card will be created. [astropy.io.fits.card]\n", - "WARNING: VerifyWarning: Keyword name 'radius_units' is greater than 8 characters or contains characters not allowed by the FITS standard; a HIERARCH card will be created. [astropy.io.fits.card]\n", - "WARNING: VerifyWarning: Keyword name 'offset_ra' is greater than 8 characters or contains characters not allowed by the FITS standard; a HIERARCH card will be created. [astropy.io.fits.card]\n", - "WARNING: VerifyWarning: Keyword name 'offset_dec' is greater than 8 characters or contains characters not allowed by the FITS standard; a HIERARCH card will be created. [astropy.io.fits.card]\n", - "2020-10-14 13:03:10,695 INFO: Done creating catalogue\n", - "2020-10-14 13:03:10,696 INFO: Creating catalogue /Users/york/Projects/stips/tmp/notebook_example_gals_000.fits\n", - "2020-10-14 13:03:10,697 INFO: Wrote preamble\n", - "2020-10-14 13:03:10,697 INFO: Parameters are: {'n_gals': 1000, 'z_low': 0.0, 'z_high': 0.2, 'rad_low': 0.01, 'rad_high': 2.0, 'sb_v_low': 30.0, 'sb_v_high': 25.0, 'distribution': 'uniform', 'clustered': False, 'radius': 200.0, 'radius_units': 'arcsec', 'offset_ra': 0.0, 'offset_dec': 0.0}\n", - "2020-10-14 13:03:10,717 INFO: Making Co-ordinates\n", - "2020-10-14 13:03:10,718 INFO: Creating 1000 objects, max radius 200.0, function uniform, scale 2.8\n", - "2020-10-14 13:03:10,718 INFO: Converting Co-ordinates into RA,DEC\n", - "WARNING: UnitsWarning: 'johnson,v' did not parse as fits unit: At col 0, Unit 'johnson' not supported by the FITS standard. If this is meant to be a custom unit, define it with 'u.def_unit'. To have it recognized inside a file reader or other code, enable it with 'u.add_enabled_units'. For details, see https://docs.astropy.org/en/latest/units/combining_and_defining.html [astropy.units.core]\n", - "2020-10-14 13:03:10,776 INFO: Done creating catalogue\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Stellar population saved to file /Users/york/Projects/stips/tmp/notebook_example_stars_000.fits\n", - "Galaxy population saved to file /Users/york/Projects/stips/tmp/notebook_example_gals_000.fits\n" - ] - } - ], - "source": [ - "from stips.scene_module import SceneModule\n", - "\n", - "scm = SceneModule(out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec)\n", - "\n", - "stellar_parameters = {\n", - " 'n_stars': 100,\n", - " 'age_low': 7.5e12, \n", - " 'age_high': 7.5e12,\n", - " 'z_low': -2.0, \n", - " 'z_high': -2.0,\n", - " 'imf': 'salpeter', \n", - " 'alpha': -2.35,\n", - " 'binary_fraction': 0.1,\n", - " 'clustered': True,\n", - " 'distribution': 'invpow',\n", - " 'radius': 100.0, \n", - " 'radius_units': 'pc',\n", - " 'distance_low': 20.0, \n", - " 'distance_high': 20.0,\n", - " 'offset_ra': 0.0, \n", - " 'offset_dec': 0.0\n", - " }\n", - "\n", - "stellar_cat_file = scm.CreatePopulation(stellar_parameters)\n", - "print(\"Stellar population saved to file {}\".format(stellar_cat_file))\n", - "\n", - "galaxy_parameters = {\n", - " 'n_gals': 10,\n", - " 'z_low': 0.0, \n", - " 'z_high': 0.2,\n", - " 'rad_low': 0.01, \n", - " 'rad_high': 2.0,\n", - " 'sb_v_low': 30.0, \n", - " 'sb_v_high': 25.0,\n", - " 'distribution': 'uniform', \n", - " 'clustered': False,\n", - " 'radius': 200.0, \n", - " 'radius_units': 'arcsec',\n", - " 'offset_ra': 0.0, \n", - " 'offset_dec': 0.0\n", - " }\n", - "\n", - "galaxy_cat_file = scm.CreateGalaxies(galaxy_parameters)\n", - "print(\"Galaxy population saved to file {}\".format(galaxy_cat_file))\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Creating a STIPS observation\n", - "\n", - "Once a scene has been created, it's possible to observe that scene as many times as you wish (and from as many places as you wish, although obviously any observation that doesn't include at least some of the scene will simply be an empty exposure). In this case, we create a single Roman WFI observation.\n", - "\n", - "STIPS uses a bit of specialialized terminology to describe its observations. In particular:\n", - "\n", - "* An Observation is a set of exposures with a single instrument (e.g. Roman WFI), one or more filters (where each\n", - " exposure in the observation will be repeated for every included filter), some number of the instrument's \n", - " detectors (for WFI between 1 and 18), where each exposure in the observation will be repeated, with the \n", - " appropriate inter-detector offset, for every included detector, a single chosen sky background value, and a \n", - " single exposure time (applied to each exposure in the observation), and one or more offsets.\n", - "* An Offset is a single telescope pointing. For each offset specified in the observation, an exposure will be \n", - " created for each detector and each filter at the offset. STIPS may, optionally, create one or more mosaics at \n", - " each offset, with a single mosaic including all detectors with the same filter. In addition, STIPS can create a \n", - " single combined mosaic for each filter in the combined Observation.\n", - "\n", - "In this case, we will create an observation with:\n", - "\n", - "* Roman WFI F129\n", - "* 1 detector\n", - "* No distortion\n", - "* A background rate of 0.15 counts/s/pixel\n", - "* The ID 1\n", - "* An exposure time of 1000 seconds\n", - "\n", - "We will use a single offset with\n", - "\n", - "* An ID of 1\n", - "* No centring (if an offset is centred then, for a multi-detector observation, each detector is centred on the \n", - " offset co-ordinates individually rather than the instrument as a whole being centred there)\n", - "* No change in RA, DEC, or PA from the centre of the observation\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2020-10-14 13:03:25,686 INFO: Got offsets as [{'offset_id': 1, 'offset_centre': False, 'offset_ra': 0.0, 'offset_dec': 0.0, 'offset_pa': 0.0}]\n", - "2020-10-14 13:03:25,812 INFO: Adding observation with filter F129 and offset (0.0,0.0,0.0)\n", - "2020-10-14 13:03:25,813 INFO: Added 1 observations\n", - "2020-10-14 13:03:25,844 INFO: WFI with 1 detectors. Central offset (0.0, 0.0, 0.0)\n", - "2020-10-14 13:03:25,844 INFO: Initializing Observation 0 of 1\n", - "2020-10-14 13:03:25,845 INFO: Observation Filter is F129\n", - "2020-10-14 13:03:25,845 INFO: Observation (RA,DEC) = (150.000000,-2.500000) with PA=0.000000\n", - "2020-10-14 13:03:25,846 INFO: Resetting\n", - "2020-10-14 13:03:25,846 INFO: Creating Detector SCA01 with (RA,DEC,PA) = (150.0,-2.5,0.0)\n", - "2020-10-14 13:03:25,847 INFO: Creating Detector SCA01 with offset (0.0,0.0)\n", - "2020-10-14 13:03:25,885 INFO: Creating Instrument with Configuration {'aperture': 'any', 'disperser': None, 'filter': 'f129', 'instrument': 'wfirstimager', 'mode': 'imaging'}\n", - "2020-10-14 13:03:25,982 INFO: PSF File psf_WFI_1.0.8_F129_1_1_sca01.fits to be put at /Users/york/Data/Reference/stips_data-1.0.9/psf_cache\n", - "2020-10-14 13:03:25,983 INFO: PSF File is /Users/york/Data/Reference/stips_data-1.0.9/psf_cache/psf_WFI_1.0.8_F129_1_1_sca01.fits\n", - "2020-10-14 13:03:26,164 INFO: PSF choosing between 4096, 4096 and 176\n", - "2020-10-14 13:03:26,171 INFO: SCA01: Starting 1x1 PSF Grid creation at Wed Oct 14 13:03:26 2020\n", - "Pysynphot unavailable (or invalid source supplied)! Assuming flat # of counts versus wavelength.\n", - "CAUTION: Just interpolating rather than integrating filter profile, over 10 steps\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "Running instrument: WFI, filter: F129\n", - " Running detector: SCA01\n", - " Position 1/1: (2047, 2047) pixels\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2020-10-14 13:03:40,074 INFO: SCA01: Finished PSF Grid creation at Wed Oct 14 13:03:40 2020\n", - "2020-10-14 13:03:40,148 INFO: SCA01: (RA, DEC, PA) := (150.0, -2.5, 0.0), detected as (150.0, -2.5, 0.0)\n", - "2020-10-14 13:03:40,149 INFO: Detector SCA01 created\n", - "2020-10-14 13:03:40,150 INFO: Reset Instrument\n", - "2020-10-14 13:03:40,150 ERROR: Error Parsing Background: 0.15\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - " Saving file: /Users/york/Data/Reference/stips_data-1.0.9/psf_cache/psf_WFI_1.0.8_F129_1_1_sca01.fits\n" - ] - }, - { - "data": { - "text/plain": [ - "0" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "from stips.observation_module import ObservationModule\n", - "\n", - "offset = {\n", - " 'offset_id': 1,\n", - " 'offset_centre': False,\n", - " 'offset_ra': 0.0,\n", - " 'offset_dec': 0.0,\n", - " 'offset_pa': 0.0\n", - " }\n", - "\n", - "observation_parameters = {\n", - " 'instrument': 'WFI',\n", - " 'filters': ['F129'],\n", - " 'detectors': 1,\n", - " 'distortion': False,\n", - " 'background': 0.15,\n", - " 'observations_id': 1,\n", - " 'exptime': 1000,\n", - " 'offsets': [offset]\n", - " }\n", - "\n", - "obm = ObservationModule(observation_parameters, out_prefix=obs_prefix, ra=obs_ra, dec=obs_dec)\n", - "\n", - "# nextObservation is called to move between different combinations of offset and filter.\n", - "# It must be called once in order to initialize the observation module to the first observation before\n", - "# adding catalogues.\n", - "obm.nextObservation()\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Observing the Scene\n", - "\n", - "In order to observe the scene, we must add the scene catalogues created above to it, add in error residuals, and finalize the observation. In so doing, we create output catalogues which are taken from the input catalogues, but only contain the sources visible to the detectors, and convert source brightnesses into units of counts/s for the detectors." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2020-10-14 13:03:56,238 INFO: Running catalogue /Users/york/Projects/stips/tmp/notebook_example_stars_000.fits\n", - "2020-10-14 13:03:56,239 INFO: Adding catalogue /Users/york/Projects/stips/tmp/notebook_example_stars_000.fits\n", - "2020-10-14 13:03:56,375 INFO: Converting phoenix catalogue\n", - "2020-10-14 13:03:56,376 INFO: Preparing output table\n", - "2020-10-14 13:03:56,425 INFO: Converting chunk 2\n", - "2020-10-14 13:03:56,425 INFO: Converting Phoenix Table to Internal format\n", - "2020-10-14 13:03:56,427 INFO: 1 datasets\n", - "2020-10-14 13:03:57,202 INFO: Finished converting catalogue to internal format\n", - "2020-10-14 13:03:57,203 INFO: Adding catalogue to detector SCA01\n", - "2020-10-14 13:03:57,203 INFO: Adding catalogue notebook_example_stars_000_01_conv_F129.fits to AstroImage SCA01\n", - "2020-10-14 13:03:57,224 INFO: Determining pixel co-ordinates\n", - "2020-10-14 13:03:57,233 INFO: Keeping 37520 items\n", - "2020-10-14 13:03:57,239 INFO: Writing 37520 stars\n", - "2020-10-14 13:03:57,240 INFO: Adding 37520 point sources to AstroImage SCA01\n", - "WARNING: UnitsWarning: 'counts/s' did not parse as fits unit: At col 0, Unit 'counts' not supported by the FITS standard. Did you mean count? If this is meant to be a custom unit, define it with 'u.def_unit'. To have it recognized inside a file reader or other code, enable it with 'u.add_enabled_units'. For details, see https://docs.astropy.org/en/latest/units/combining_and_defining.html [astropy.units.core]\n", - "WARNING: VerifyWarning: Keyword name 'input_catalogue' is greater than 8 characters or contains characters not allowed by the FITS standard; a HIERARCH card will be created. [astropy.io.fits.card]\n", - "2020-10-14 13:03:57,457 INFO: Added catalogue notebook_example_stars_000_01_conv_F129.fits to AstroImage SCA01\n", - "2020-10-14 13:03:57,459 INFO: Finished catalogue /Users/york/Projects/stips/tmp/notebook_example_stars_000.fits\n", - "2020-10-14 13:03:57,459 INFO: Running catalogue /Users/york/Projects/stips/tmp/notebook_example_gals_000.fits\n", - "2020-10-14 13:03:57,460 INFO: Adding catalogue /Users/york/Projects/stips/tmp/notebook_example_gals_000.fits\n", - "2020-10-14 13:03:57,486 INFO: Converting bc95 catalogue\n", - "2020-10-14 13:03:57,486 INFO: Preparing output table\n", - "2020-10-14 13:03:57,533 INFO: Converting chunk 2\n", - "2020-10-14 13:03:57,534 INFO: Converting BC95 Catalogue\n", - "2020-10-14 13:03:57,534 INFO: Normalization Bandpass is johnson,v ()\n", - "2020-10-14 13:03:57,535 INFO: Normalization Bandpass is johnson,v\n", - "2020-10-14 13:04:01,825 WARNING: Source 78 of 1000: Pysynphot Error Error using Synphot to perform bandpass normalization in SpectralElement\n", - "Model: Empirical1D\n", - "N_inputs: 1\n", - "N_outputs: 1\n", - "Parameters: \n", - " points: (array([4700., 4750., 4800., 4850., 4900., 4950., 5000., 5050., 5100.,\n", - " 5150., 5200., 5250., 5300., 5350., 5400., 5450., 5500., 5550.,\n", - " 5600., 5650., 5700., 5750., 5800., 5850., 5900., 5950., 6000.,\n", - " 6050., 6100., 6150., 6200., 6250., 6300., 6350., 6400., 6450.,\n", - " 6500., 6550., 6600., 6650., 6700., 6750., 6800., 6850., 6900.,\n", - " 6950., 7000.], dtype=float32),)\n", - " lookup_table: [0. 0.004 0.032 0.084 0.172 0.31 0.478 0.65 0.802 0.913 0.978 1.\n", - " 0.994 0.977 0.95 0.911 0.862 0.806 0.747 0.69 0.634 0.579 0.523 0.467\n", - " 0.413 0.363 0.317 0.274 0.234 0.2 0.168 0.14 0.114 0.089 0.067 0.05\n", - " 0.037 0.027 0.02 0.016 0.013 0.012 0.01 0.009 0.007 0.004 0. ]\n", - " method: linear\n", - " fill_value: 0\n", - " bounds_error: FalseSynphotError('Integrated flux is <= 0') encountered\n", - "2020-10-14 13:04:03,366 WARNING: Source 108 of 1000: Pysynphot Error Error using Synphot to perform bandpass normalization in SpectralElement\n", - "Model: Empirical1D\n", - "N_inputs: 1\n", - "N_outputs: 1\n", - "Parameters: \n", - " points: (array([4700., 4750., 4800., 4850., 4900., 4950., 5000., 5050., 5100.,\n", - " 5150., 5200., 5250., 5300., 5350., 5400., 5450., 5500., 5550.,\n", - " 5600., 5650., 5700., 5750., 5800., 5850., 5900., 5950., 6000.,\n", - " 6050., 6100., 6150., 6200., 6250., 6300., 6350., 6400., 6450.,\n", - " 6500., 6550., 6600., 6650., 6700., 6750., 6800., 6850., 6900.,\n", - " 6950., 7000.], dtype=float32),)\n", - " lookup_table: [0. 0.004 0.032 0.084 0.172 0.31 0.478 0.65 0.802 0.913 0.978 1.\n", - " 0.994 0.977 0.95 0.911 0.862 0.806 0.747 0.69 0.634 0.579 0.523 0.467\n", - " 0.413 0.363 0.317 0.274 0.234 0.2 0.168 0.14 0.114 0.089 0.067 0.05\n", - " 0.037 0.027 0.02 0.016 0.013 0.012 0.01 0.009 0.007 0.004 0. ]\n", - " method: linear\n", - " fill_value: 0\n", - " bounds_error: FalseSynphotError('Integrated flux is <= 0') encountered\n" - ] - } - ], - "source": [ - "output_stellar_catalogues = obm.addCatalogue(stellar_cat_file)\n", - "output_galaxy_catalogues = obm.addCatalogue(galaxy_cat_file)\n", - "\n", - "print(\"Output Catalogues are {} and {}.\".format(output_stellar_catalogues, output_galaxy_catalogues))\n", - "\n", - "psf_file = obm.addError()\n", - "\n", - "print(\"PSF File is {}\".format(psf_file))\n", - "\n", - "fits_file, mosaic_file, params = obm.finalize(mosaic=False)\n", - "\n", - "print(\"Output FITS file is {}\".format(fits_file))\n", - "print(\"Output Mosaic File is {}\".format(mosaic_file))\n", - "print(\"Observation Parameters are {}\".format(params))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Show the Result\n", - "\n", - "We use pyplot to plot the resulting simulated image." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%matplotlib inline\n", - "%config InlineBackend.figure_format = 'svg'\n", - "import matplotlib\n", - "from matplotlib import style\n", - "matplotlib.rcParams['axes.grid'] = False\n", - "matplotlib.rcParams['image.origin'] = 'lower'\n", - "import matplotlib.pyplot as plt\n", - "from astropy.io import fits\n", - "\n", - "with fits.open(fits_file) as result_file:\n", - " result_data = result_file[1].data\n", - "\n", - "fig1 = plt.figure()\n", - "im = plt.matshow(result_data, norm=matplotlib.colors.LogNorm())\n" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.12" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/notebooks/notebooks_data/psf_WFI_2.0.0_F129_sca01.fits b/notebooks/notebooks_data/psf_WFI_2.0.0_F129_sca01.fits new file mode 100644 index 0000000..fb47220 Binary files /dev/null and b/notebooks/notebooks_data/psf_WFI_2.0.0_F129_sca01.fits differ diff --git a/setup.cfg b/setup.cfg index 58c0f66..5ea99b0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,7 @@ [metadata] name = stips # version should be PEP440 compatible (https://www.python.org/dev/peps/pep-0440/) -version = 2.0.0 +version = 2.1.0 author = Space Telescope Science Institute author_email = york@stsci.edu description = STIPS is the Space Telescope Imaging Product Simulator. @@ -10,23 +10,24 @@ license = BSD 3-Clause url = https://github.com/spacetelescope/STScI-STIPS edit_on_github = False github_project = https://github.com/spacetelescope/STScI-STIPS -python_requires = ">=3.8" +python_requires = ">=3.10" [options] packages = stips setup_requires = numpy -install_requires = +install_requires = astropy numpy scipy photutils - synphot - stsynphot - webbpsf==1.0.0 - pandeia.engine==1.7 + synphot==1.1.1 + stsynphot==1.1.0 + webbpsf==1.1.1 + pandeia.engine==3.0 montage-wrapper pyyaml + soc_roman_tools zip_safe = False use_2to3 = False include_package_data = True @@ -59,7 +60,7 @@ markers = slow: takes more than 12.5 minutes. Deselect with '-m "not slow"' veryslow: takes more than 1 hour. Deselect with '-m "not veryslow"' network: requires internet connection. Deselect with '-m "not network"' - + [flake8] exclude = extern,sphinx,*parsetab.py,astropy_helpers,ah_bootstrap.py,conftest.py,docs/conf.py,setup.py diff --git a/stips/__init__.py b/stips/__init__.py index 3ada0cf..7c64987 100755 --- a/stips/__init__.py +++ b/stips/__init__.py @@ -4,7 +4,7 @@ __all__ = ['observation_module', 'scene_module'] -__version__ = "2.0.0" +__version__ = "2.1.0" version = __version__ from .utilities import SetupDataPaths diff --git a/stips/astro_image/astro_image.py b/stips/astro_image/astro_image.py index 94747da..db48bd8 100755 --- a/stips/astro_image/astro_image.py +++ b/stips/astro_image/astro_image.py @@ -354,10 +354,10 @@ def imageHdu(self): hdu.header['CDELT2'] = self.scale[0]/3600. # Apparently astropy refuses to add the identity matrix to a header if ('PA1_1' not in hdu.header) and ('CD1_1' not in hdu.header): - hdu.header['PA1_1'] = 1. - hdu.header['PA1_2'] = 0. - hdu.header['PA2_1'] = 0. - hdu.header['PA2_2'] = 1. + hdu.header['CD1_1'] = -self.scale[0]/3600. + hdu.header['CD1_2'] = 0. + hdu.header['CD2_1'] = 0. + hdu.header['CD2_2'] = self.scale[0]/3600. for k, v in self.header.items(): hdu.header[k] = v for item in self.history: @@ -782,7 +782,8 @@ def addSersicProfile(self, posX, posY, flux, n, re, phi, axialRatio, *args, **kw g = coords.Grid(1., 1., self.xsize, self.ysize) xc, yc = ix, iy - src = source.Source(config=source_dict) + # Roman is the only telescope supported + src = source.Source('roman', config=source_dict) src.grid = g sersic = profile.SersicDistribution(src) img = deepcopy(sersic.prof)*flux/np.sum(sersic.prof) @@ -967,9 +968,20 @@ def setExptime(self, exptime): not affect the data values, but it does affect errors residuals (e.g. dark current and cosmic ray count) """ + factor = exptime / self.exptime + self.data *= factor self.exptime = exptime self.updateHeader('exptime', self.exptime) + #self.exptime = exptime + #self.updateHeader('exptime', self.exptime) + + def setUnits(self): + """ + Divide the image by the exposure time to the output is in e/s as opposed to e. + """ + self.data /= self.exptime + def addBackground(self, background): """ Add a constant value per-pixel. Automatically scale that value based on oversampling. diff --git a/stips/data/psf_WFI_2.0.0_F129_sca01.fits b/stips/data/psf_WFI_2.0.0_F129_sca01.fits new file mode 100755 index 0000000..fb47220 Binary files /dev/null and b/stips/data/psf_WFI_2.0.0_F129_sca01.fits differ diff --git a/stips/instruments/instrument.py b/stips/instruments/instrument.py index 30efad4..3641499 100755 --- a/stips/instruments/instrument.py +++ b/stips/instruments/instrument.py @@ -18,13 +18,9 @@ # Local Modules from ..stellar_module import StarGenerator from ..astro_image import AstroImage -from ..utilities import GetStipsData -from ..utilities import OffsetPosition -from ..utilities import SelectParameter -from ..utilities import StipsDataTable +from ..utilities import GetStipsData, OffsetPosition, SelectParameter, get_pandeia_background, StipsDataTable from ..utilities.makePSF import PSF_GRID_SIZE - class Instrument(object): """ The Instrument class represents a virtual base class which will be implemented as a variety of @@ -82,7 +78,8 @@ def __init__(self, **kwargs): self.DETECTOR_OFFSETS = self.DETECTOR_OFFSETS[:n_detectors] self.OFFSET_NAMES = self.OFFSET_NAMES[:n_detectors] if hasattr(self, "N_OFFSET"): - self.CENTRAL_OFFSET = self.N_OFFSET[n_detectors] + # Set the central offset to SCA01 + self.CENTRAL_OFFSET = self.N_OFFSET[1] msg = "{} with {} detectors. Central offset {}" self._log('info', msg.format(self.DETECTOR, n_detectors, self.CENTRAL_OFFSET)) @@ -725,7 +722,7 @@ def stringify(num): note = "BC95_{}_{}_{}".format(model, stringify(age), mag) notes = np.append(notes, note) t = Table() - t['ra'] = Column(data=ras.data, dtype=np.float) + t['ra'] = Column(data=ras.data, dtype=float) t['dec'] = Column(data=decs.data) t['flux'] = Column(data=rates.data) t['type'] = Column(data=np.full_like(ras, 'sersic', dtype='S7')) @@ -974,6 +971,7 @@ def addError(self, *args, **kwargs): detector.introduceCosmicRayResidual(self.PIXEL_SIZE) if 'cr' in snapshots or 'all' in snapshots: detector.toFits(self.imgbase+"_{}_{}_snapshot_cr.fits".format(self.obs_count, detector.name)) + detector.setUnits() self._log("info", "Finished adding error") def normalize(self, source_spectrum_or_wave_flux, norm_flux, bandpass): @@ -1128,11 +1126,15 @@ def pixel_background_unit(self): msg = "Returning background {} for '{}'" self._log("info", msg.format(bkg, self.background_value)) return bkg*u.ct/u.s + elif self.background_value == 'pandeia': + msg = "Returning background {} for 'pandeia'" + self.custom_background = get_pandeia_background(self.filter) + self._log("info", msg.format(self.custom_background)) + return self.custom_background*u.ct/u.s elif self.background_value == 'custom': msg = "Returning background {} for 'custom'" self._log("info", msg.format(self.custom_background)) return self.custom_background*u.ct/u.s - msg = "Unknown Background {}. Returning 0." self._log("warning", msg.format(self.background_value)) return 0.*u.ct/u.second diff --git a/stips/instruments/roman_instrument.py b/stips/instruments/roman_instrument.py index a1d343f..6791e40 100755 --- a/stips/instruments/roman_instrument.py +++ b/stips/instruments/roman_instrument.py @@ -29,6 +29,9 @@ def __init__(self, **kwargs): } TELESCOPE = 'ROMAN' + # WARNING : The definition of the area is currently under discussion, the + # value presented here is simply pi * r^2, where r = 0.6m, half the diameter + # of the Roman mirror. This is subject to change. AREA = 45238.93416 DBNAME = "IsochroneGrid.db" MODE = 'imaging' diff --git a/stips/instruments/wfi.py b/stips/instruments/wfi.py index eae7af6..f44712e 100755 --- a/stips/instruments/wfi.py +++ b/stips/instruments/wfi.py @@ -1,7 +1,11 @@ __filetype__ = "detector" # Local Modules +from astropy import units as u +from astropy.coordinates import SkyCoord, ICRS from .roman_instrument import RomanInstrument +from soc_roman_tools.siaf import siaf +import numpy as np class WFI(RomanInstrument): @@ -20,6 +24,163 @@ def __init__(self, **kwargs): Initializes detectors (single detector for now). """ self.classname = self.__class__.__name__ + + # N_DETECTORS is a set of options on how many of the instrument's detectors you want to use + self.N_DETECTORS = [1] + self.INSTRUMENT_OFFSET = (0., 0., 0.) # Presumably there is one, but not determined + self.DETECTOR_SIZE = (4088, 4088) # pixels + self.PIXEL_SIZE = 10.0 # um (Assume for now) + self.SCALE = [0.11, 0.11] # Assume for now + self.FILTERS = ('F062', 'F087', 'F106', 'F129', 'F158', 'F184', 'F146') + self.DEFAULT_FILTER = 'F184' # Assume for now + self.SCA_ROTATION = -60 # Rotation of SCA with respect to SIAF + + # Get RA and DEC that will be used for detector offset calculations + # RA = kwargs.get('ra', 0) + DEC = kwargs.get('dec', 0) + + # Calculate the detector offsets based on the Roman SIAF file from soc_roman_tools + # The telescope (tel) frame is defined such that (0, 0) is the center of the boresight + # path of the WFI in the observatory coordinate system. + + # Get Roman SIAF + rsiaf = siaf.RomanSiaf() + + # Make array of SCA's + self.SCA_NAMES = ["WFI01_FULL", "WFI02_FULL", "WFI03_FULL", "WFI04_FULL", "WFI05_FULL", + "WFI06_FULL", "WFI07_FULL", "WFI08_FULL", "WFI09_FULL", "WFI10_FULL", + "WFI11_FULL", "WFI12_FULL", "WFI13_FULL", "WFI14_FULL", "WFI15_FULL", + "WFI16_FULL", "WFI17_FULL", "WFI18_FULL"] + + # Default detector offsets in (arcseconds_ra,arcseconds_dec,degrees_angle) + self.DETECTOR_OFFSETS = [# SCA01 SCA02 SCA03 + (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), + # SCA04 SCA05 SCA06 + (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), + # SCA07 SCA08 SCA09 + (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), + # SCA10 SCA11 SCA12 + (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), + # SCA13 SCA14 SCA15 + (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), + # SCA16 SCA17 SCA18 + (0.0, 0.0, 0.0), (0.0, 0.0, 0.0), (0.0, 0.0, 0.0)] + + # Reference point + s01 = rsiaf['WFI01_FULL'].sci_to_tel(self.DETECTOR_SIZE[0] / 2, self.DETECTOR_SIZE[1] / 2) + center = SkyCoord(s01[0] * u.arcsec, + s01[1] * u.arcsec).skyoffset_frame(self.SCA_ROTATION * u.deg) + + for i, sca in enumerate(self.SCA_NAMES): + # Get V2, V3 coordinate pair in the telescope frame at the center of the SCA + V2, V3 = rsiaf[sca].sci_to_tel(self.DETECTOR_SIZE[0] / 2, self.DETECTOR_SIZE[1] / 2) + # Transform these to the center reference point + out = ICRS(V2 * u.arcsec, V3 * u.arcsec).transform_to(center) + # Get out offsets in X and Y direction of SCA array, convert to arcsec + X, Y = out.lon.value * 3600, out.lat.value * 3600 + # Offset in arcsec, arcsec, and degrees angle, converting from deg to radians + self.DETECTOR_OFFSETS[i] = (X / np.cos(DEC / 180 * np.pi), Y, 0) + + # Copy the results into these variables needed by some functions in STIPS + self.OFFSET_NAMES = ("SCA01", "SCA02", "SCA03", "SCA04", "SCA05", "SCA06", + "SCA07", "SCA08", "SCA09", "SCA10", "SCA11", "SCA12", + "SCA13", "SCA14", "SCA15", "SCA16", "SCA17", "SCA18") + self.N_OFFSET = {1: self.DETECTOR_OFFSETS[0], 2: self.DETECTOR_OFFSETS[1], + 3: self.DETECTOR_OFFSETS[2], 4: self.DETECTOR_OFFSETS[3], + 5: self.DETECTOR_OFFSETS[4], 6: self.DETECTOR_OFFSETS[5], + 7: self.DETECTOR_OFFSETS[6], 8: self.DETECTOR_OFFSETS[7], + 9: self.DETECTOR_OFFSETS[8], 10: self.DETECTOR_OFFSETS[9], + 11: self.DETECTOR_OFFSETS[10], 12: self.DETECTOR_OFFSETS[11], + 13: self.DETECTOR_OFFSETS[12], 14: self.DETECTOR_OFFSETS[13], + 15: self.DETECTOR_OFFSETS[14], 16: self.DETECTOR_OFFSETS[15], + 17: self.DETECTOR_OFFSETS[16], 18: self.DETECTOR_OFFSETS[17]} + + # This is a set of offsets derived from "WFIRST-STSCI-TR1506A" + # Further, it assumes no rotation or imperfection (see ASCII diagrams). + # 07 16 + # 08 04 13 17 + # 09 01 10 18 + # 05 14 + # 06 02 11 15 + # 03 12 + # + # The following positions come from the spreadsheet + # "GRISM off orders position_Zernikes_efficiency data_v4_20200424.xlsx" on + # the "WSM-GRISM" tab, with values in mm. Note that for all detectors values + # are taken from Field Position 1 (centre) for a wavelength of 1 micron. + # + # These are referred to as FPA positions because they are the detector + # positions on the focal plane array, albeit with the Y axis inverted to + # yield WFI-local co-ordinates rather than FPA-local co-ordinates. + + self.FPA_GLOBAL_ROTATION = -120. # degrees + + # From WFIRST-STScI-TR1506A(3).pdf, 2.5mm=27.5", and 8.564mm=94.2" + self.DETECTOR_MM_ARCSEC = 11. + + # I can't currently figure out rotation angles (or rather figure out which + # rotation angle to use), so I'm setting everything to just zero for now. + self.DETECTOR_ROTATION = (0., 0., 0., 0., 0., 0., 0., 0., 0., + 0., 0., 0., 0., 0., 0., 0., 0., 0.) + + self.PSF_INSTRUMENT = "WFI" + + # Reference Files + self.FLATFILE = 'err_flat_wfi.fits' # Use for the moment + self.DARKFILE = 'err_rdrk_wfi.fits' # IREF, IHB (use for the moment) + + # Background Values + self.BACKGROUND = {'none': {'F062': 0., 'F087': 0., 'F106': 0., + 'F129': 0., 'F158': 0., 'F184': 0., 'F146': 0.}, + 'avg': {'F062': 1.401E+00, 'F087': 1.401E+00, 'F106': 1.401E+00, 'F129': 7.000E-01, + 'F158': 7.521E-01, 'F184': 8.500E-01, 'F146': 7.000E-01}} + self.BACKGROUNDS_V = ['none', 'avg', 'med', 'max', 'min'] + self.BACKGROUNDS = ['None', 'Average zodiacal background', 'Median zodiacal background', + 'Maximum zodiacal background', 'Minimum zodiacal background'] + self.BGTEXT = {'none': 'None', 'avg': 'Average zodiacal background', + 'med': 'Median zodiacal background', 'max': 'Maximum zodiacal background', + 'min': 'Minimum zodiacal background', 'custom': 'Custom thermal background rate', + 'pandeia': 'Pandeia background rate'} + # PHOTFNU has units of Jy + # For now, just assuming similar PHOTFNU to WFC3IR. + # For now, just put them in the middle + self.PHOTPLAM = {'F062': 0.620, 'F087': 0.869, 'F106': 1.060, 'F129': 1.293, + 'F158': 1.577, 'F184': 1.842, 'F146': 1.464} + + self.ZEROPOINTS_AB = {'F062': 26.73, 'F087': 26.39, 'F106': 26.41, 'F129': 26.43, + 'F158': 26.47, 'F184': 26.08, 'F146': 27.66} + self.PHOTFNU = {} + for i in self.ZEROPOINTS_AB: + self.PHOTFNU[i] = 10 ** (0.4 * (8.9 - self.ZEROPOINTS_AB[i])) + + self.DITHERS = ("SUBPIXEL ONLY", "BOX-UVIS", "BLOB") # Assume for now + self.DITHER_POINTS = { + "SUBPIXEL ONLY": ["0"], + "BOX-UVIS": ["4"], + "BLOB": ["1", "2"] + } + self.DITHER_SIZE = { + "SUBPIXEL ONLY": ["STABDARD"], + "BOX-UVIS": ["Standard"], + "BLOB": ["Standard"] + } + self.DITHER_SUBPIXEL = { + "SUBPIXEL ONLY": ["LINE", "LINE-3PT", "BOX-MIN"], + "BOX-UVIS": ["NONE", "LINE", "LINE-3PT", "BOX-MIN"], + "BLOB": ["NONE", "LINE", "LINE-3PT", "BOX-MIN"] + } + self.DITHER_OFFSETS = { + "BOX-UVIS": [(-11.071, -17.744), (11.947, -17.457), + (11.071, 17.744), (-11.947, 17.457)], + "BLOB": [(-1.930, -1.729), (1.930, 1.729)], + "SUBPIXEL": { + "NONE": [(0.000, 0.000)], + "BOX-MIN": [(0.000, 0.000), (0.542, 0.182), (0.339, 0.485), (-0.203, 0.303)], + "LINE": [(0.000, 0.000), (0.474, 0.424)], + "LINE-3PT": [(0.000, 0.000), (0.451, 0.403), (0.902, 0.806)] + } + } + # Initialize superclass super().__init__(**kwargs) @@ -51,170 +212,3 @@ def handleDithers(cls, form): INSTRUMENT = "WFI" DETECTOR = "WFI" - - OFFSET_NAMES = ("SCA01", "SCA02", "SCA03", "SCA04", "SCA05", "SCA06", - "SCA07", "SCA08", "SCA09", "SCA10", "SCA11", "SCA12", - "SCA13", "SCA14", "SCA15", "SCA16", "SCA17", "SCA18") - N_OFFSET = {1: (0.0, 0.0, 0.0), 2: (0.0, 464.31, 0.0), 3: (0.0, 995.32, 0.0), - 4: (-533.06, 131.74, 0.0), 5: (-533.06, 596.05, 0.0), 6: (-533.06, 1127.06, 0.0), - 7: (-1066.12, 338.76, 0.0), 8: (-1066.12, 803.07, 0.0), 9: (-1066.12, 1334.08, 0.0), - 10: (533.06, 0.0, 0.0), 11: (533.06, 464.31, 0.0), 12: (533.06, 995.32, 0.0), - 13: (1066.12, 131.74, 0.0), 14: (1066.12, 596.05, 0.0), 15: (1066.12, 1127.06, 0.0), - 16: (1599.18, 338.76, 0.0), 17: (1599.18, 803.07, 0.0), 18: (1599.18, 1334.08, 0.0)} - - # Each detector is 450.56 x 450.56", and according to the WFIRST-STSCI-TR1506A - # Document, the offsets are (27.5”) in the long direction and (94.2" and 27.5") - # in the short direction. Assuming 0 offset for SCA01. Then RA offsets are equal - # to 478.06", and DEC offsets are 478.06" for the lower SCAs and 544.76 for the - # upper ones. Assuming no rotation offset. - - # Offsets are in (arcseconds_ra,arcseconds_dec,degrees_angle) - ''' - DETECTOR_OFFSETS = (# SCA01 SCA02 SCA03 SCA04 - ((2*27.5+478.06)*( 0),(478.06-27.5/2)*( 0),0.), ((2*27.5+478.06)*( 0),(478.06-27.5/2)*( 1),0.), ((2*27.5+478.06)*( 0),(478.06-27.5/2)*( 2)+66.7,0.), ((2*27.5+478.06)*(-1),(478.06-27.5/2)*( 0)+188.2*0.7,0.), - # SCA05 SCA06 SCA07 SCA08 - ((2*27.5+478.06)*(-1),(478.06-27.5/2)*( 1)+188.2*0.7,0.), ((2*27.5+478.06)*(-1),(478.06-27.5/2)*( 2)+66.7+188.2*0.7,0.), ((2*27.5+478.06)*(-2),(478.06-27.5/2)*( 0)+376.4*0.9,0.), ((2*27.5+478.06)*(-2),(478.06-27.5/2)*( 1)+376.4*0.9,0.), - # SCA09 SCA10 SCA11 SCA12 - ((2*27.5+478.06)*(-2),(478.06-27.5/2)*( 2)+66.7+376.4*0.9,0.), ((2*27.5+478.06)*( 1),(478.06-27.5/2)*( 0),0.), ((2*27.5+478.06)*( 1),(478.06-27.5/2)*( 1),0.), ((2*27.5+478.06)*( 1),(478.06-27.5/2)*( 2)+66.7,0.), - # SCA13 SCA14 SCA15 SCA16 - ((2*27.5+478.06)*( 2),(478.06-27.5/2)*( 0)+188.2*0.7,0.), ((2*27.5+478.06)*( 2),(478.06-27.5/2)*( 1)+188.2*0.7,0.), ((2*27.5+478.06)*( 2),(478.06-27.5/2)*( 2)+66.7+188.2*0.7,0.), ((2*27.5+478.06)*( 3),(478.06-27.5/2)*( 0)+376.4*0.9,0.), - # SCA17 SCA18 - ((2*27.5+478.06)*( 3),(478.06-27.5/2)*( 1)+376.4*0.9,0.), ((2*27.5+478.06)*( 3),(478.06-27.5/2)*( 2)+66.7+376.4*0.9,0.)) - ''' - - DETECTOR_OFFSETS = ( # SCA01 SCA02 SCA03 - (0.0, 0.0, 0.0), (0.0, 464.31, 0.0), (0.0, 995.32, 0.0), - # SCA04 SCA05 SCA06 - (-533.06, 131.74, 0.0), (-533.06, 596.05, 0.0), (-533.06, 1127.06, 0.0), - # SCA07 SCA08 SCA09 - (-1066.12, 338.76, 0.0), (-1066.12, 803.07, 0.0), (-1066.12, 1334.08, 0.0), - # SCA10 SCA11 SCA12 - (533.06, 0.0, 0.0), (533.06, 464.31, 0.0), (533.06, 995.32, 0.0), - # SCA13 SCA14 SCA15 - (1066.12, 131.74, 0.0), (1066.12, 596.05, 0.0), (1066.12, 1127.06, 0.0), - # SCA16 SCA17 SCA18 - (1599.18, 338.76, 0.0), (1599.18, 803.07, 0.0), (1599.18, 1334.08, 0.0)) - - # This is a set of offsets derived from "WFIRST-STSCI-TR1506A" - # - # Since that document doesn't actually cover the column offsets, they are - # assumed to be 188.2" e.g. between 08 and 07, and 376.4" e.g. between - # 09 and 08 (and the same for each other column)(see ASCII diagrams). - # Further, it assumes no rotation or imperfection. - # 09 18 - # - # 08 06 15 17 - # 07 03 12 16 - # 05 14 - # 04 02 11 13 - # 01 10 - - # The detector positions WRT to WFI Local FOV are as follows (with the - # X-axis pointing right and the Y-axis pointing up): - # - # 01 10 - # 04 02 11 13 - # 05 14 - # 07 03 12 16 - # 08 06 15 17 - # - # 09 18 - # - # with the large gaps (e.g. 09-08) showing the larger offset between the - # bottom row of detectors and the top two rows, or, in the case of the - # offsets between columns (e.g. the Y positions of 07 vs. 04 vs. 01) the - # staggered offsets of the columns. - # - # The following positions come from the spreadsheet - # "GRISM off orders position_Zernikes_efficiency data_v4_20200424.xlsx" on - # the "WSM-GRISM" tab, with values in mm. Note that for all detectors values - # are taken from Field Position 1 (centre) for a wavelength of 1 micron. - # - # These are referred to as FPA positions because they are the detector - # positions on the focal plane array, albeit with the Y axis inverted to - # yield WFI-local co-ordinates rather than FPA-local co-ordinates. - - FPA_GLOBAL_ROTATION = -120. # degrees - - # Values in mm. From "FPA Position" X and Y columns (F/G in xlsx) - # Specifically, 1.55 um is supposed to be the "undistorted" wavelength, so - # I'm using the detector centre at 1.55um. - DETECTOR_FPA = ( # SCA01 SCA02 SCA03 - (-22.029, -11.956), (-22.181, 36.421), (-22.331, 80.749), - # SCA04 SCA05 SCA06 - (-65.558, -20.505), (-66.055, 27.856), (-66.543, 71.928), - # SCA07 SCA08 SCA09 - (-109.05, -41.316), (-109.83, 7.001), (-110.97, 50.358), - # SCA10 SCA11 SCA12 - (21.509, -11.955), (21.65, 36.42), (21.787, 80.747), - # SCA13 SCA14 SCA15 - (65.03, -20.503), (65.517, 27.854), (65.993, 71.923), - # SCA16 SCA17 SCA18 - (108.507, -41.309), (109.282, 7.002), (110.409, 50.353), - ) - - # From WFIRST-STScI-TR1506A(3).pdf, 2.5mm=27.5", and 8.564mm=94.2" - DETECTOR_MM_ARCSEC = 11. - - # I can't currently figure out rotation angles (or rather figure out which - # rotation angle to use), so I'm setting everything to just zero for now. - DETECTOR_ROTATION = (0., 0., 0., 0., 0., 0., 0., 0., 0., - 0., 0., 0., 0., 0., 0., 0., 0., 0.) - - # N_DETECTORS is a set of options on how many of the instrument's detectors you want to use - N_DETECTORS = [1] - INSTRUMENT_OFFSET = (0., 0., 0.) # Presumably there is one, but not determined - DETECTOR_SIZE = (4088, 4088) # pixels - PIXEL_SIZE = 10.0 # um (Assume for now) - SCALE = [0.11, 0.11] # Assume for now - FILTERS = ('F062', 'F087', 'F106', 'F129', 'F158', 'F184', 'F146') - DEFAULT_FILTER = 'F184' # Assume for now - - PSF_INSTRUMENT = "WFI" - - # Reference Files - FLATFILE = 'err_flat_wfi.fits' # Use for the moment - DARKFILE = 'err_rdrk_wfi.fits' # IREF, IHB (use for the moment) - - # Background Values - BACKGROUND = {'none': {'F062': 0., 'F087': 0., 'F106': 0., 'F129': 0., 'F158': 0., 'F184': 0., 'F146': 0.}, - 'avg': {'F062': 1.401E+00, 'F087': 1.401E+00, 'F106': 1.401E+00, 'F129': 7.000E-01, - 'F158': 7.521E-01, 'F184': 8.500E-01, 'F146': 7.000E-01}} - BACKGROUNDS_V = ['none', 'avg', 'med', 'max', 'min'] - BACKGROUNDS = ['None', 'Average zodiacal background', 'Median zodiacal background', 'Maximum zodiacal background', 'Minimum zodiacal background'] - BGTEXT = {'none': 'None', 'avg': 'Average zodiacal background', - 'med': 'Median zodiacal background', 'max': 'Maximum zodiacal background', - 'min': 'Minimum zodiacal background', 'custom': 'Custom thermal background rate'} - # PHOTFNU has units of Jy - # For now, just assuming similar PHOTFNU to WFC3IR. - PHOTFNU = {'F062': 3.25e-08, 'F087': 8.87e-08, 'F106': 3.94e-08, 'F129': 3.51e-08, 'F158': 3.13e-08, 'F184': 1.18e-07, 'F146': 1.63e-08} - # PHOTPLAM has units of um - # For now, just put them in the middle - PHOTPLAM = {'F062': 0.6700, 'F087': 0.8735, 'F106': 1.0595, 'F129': 1.2925, 'F158': 1.577, 'F184': 1.5815, 'F146': 1.4635} - # For now, just put in HST-style dithers. - DITHERS = ("SUBPIXEL ONLY", "BOX-UVIS", "BLOB") # Assume for now - DITHER_POINTS = { - "SUBPIXEL ONLY": ["0"], - "BOX-UVIS": ["4"], - "BLOB": ["1", "2"] - } - DITHER_SIZE = { - "SUBPIXEL ONLY": ["STABDARD"], - "BOX-UVIS": ["Standard"], - "BLOB": ["Standard"] - } - DITHER_SUBPIXEL = { - "SUBPIXEL ONLY": ["LINE", "LINE-3PT", "BOX-MIN"], - "BOX-UVIS": ["NONE", "LINE", "LINE-3PT", "BOX-MIN"], - "BLOB": ["NONE", "LINE", "LINE-3PT", "BOX-MIN"] - } - DITHER_OFFSETS = { - "BOX-UVIS": [(-11.071, -17.744), (11.947, -17.457), (11.071, 17.744), (-11.947, 17.457)], - "BLOB": [(-1.930, -1.729), (1.930, 1.729)], - "SUBPIXEL": { - "NONE": [(0.000, 0.000)], - "BOX-MIN": [(0.000, 0.000), (0.542, 0.182), (0.339, 0.485), (-0.203, 0.303)], - "LINE": [(0.000, 0.000), (0.474, 0.424)], - "LINE-3PT": [(0.000, 0.000), (0.451, 0.403), (0.902, 0.806)] - } - } diff --git a/stips/tests/test_roman.py b/stips/tests/test_roman.py deleted file mode 100755 index 418a718..0000000 --- a/stips/tests/test_roman.py +++ /dev/null @@ -1,120 +0,0 @@ -from stips.scene_module import SceneModule -from stips.observation_module import ObservationModule - -import pytest - -from tempfile import TemporaryDirectory - - -def create_catalogues(out_path): - star_data = { - 'n_stars': 100, - 'age_low': 1.0e12, 'age_high': 1.0e12, - 'z_low': -2.0, 'z_high': -2.0, - 'imf': 'salpeter', 'alpha': -2.35, - 'binary_fraction': 0.1, - 'distribution': 'invpow', 'clustered': True, - 'radius': 100.0, 'radius_units': 'pc', - 'distance_low': 10.0, 'distance_high': 10.0, - 'offset_ra': 0.0, 'offset_dec': 0.0 - } - - galaxy_data = { - 'n_gals': 25, - 'z_low': 0.0, 'z_high': 0.2, - 'rad_low': 0.01, 'rad_high': 2.0, - 'sb_v_low': 30.0, 'sb_v_high': 25.0, - 'distribution': 'uniform', 'clustered': False, - 'radius': 200.0, 'radius_units': 'arcsec', - 'offset_ra': 0.0, 'offset_dec': 0.0 - } - - scm = SceneModule(out_path=out_path) - stellar_cat_file = scm.CreatePopulation(star_data) - galaxy_cat_file = scm.CreateGalaxies(galaxy_data) - - return stellar_cat_file, galaxy_cat_file - - -def get_default_obs(): - obs = { - 'instrument': 'WFI', - 'filters': ['F158'], - 'detectors': 1, - 'distortion': False, - 'background': 'avg', - 'observations_id': 1, - 'exptime': 1000, - 'offsets': [ - { - 'offset_id': 1, - 'offset_centre': False, - 'offset_ra': 0.5, - 'offset_dec': 0.0, - 'offset_pa': 27.0 - } - ] - } - return obs - - -def test_roman_observation(): - - dir_name = TemporaryDirectory() - - stellar_cat_file, galaxy_cat_file = create_catalogues(dir_name.name) - - obs = get_default_obs() - - obm = ObservationModule(obs, out_path=dir_name.name, cat_path=dir_name.name) - obm.nextObservation() - obm.addCatalogue(stellar_cat_file) - obm.addCatalogue(galaxy_cat_file) - obm.addError() - obm.finalize(mosaic=False) - - -@pytest.mark.veryslow -def test_roman_observation_deluxe(): - - dir_name = TemporaryDirectory() - - stellar_cat_file, galaxy_cat_file = create_catalogues(dir_name.name) - - obs = get_default_obs() - - obm = ObservationModule(obs, out_path=dir_name.name, cat_path=dir_name.name) - obm.nextObservation() - obm.addCatalogue(stellar_cat_file) - obm.addCatalogue(galaxy_cat_file) - obm.addError() - obm.finalize(mosaic=False) - - -obs_data = [ - ( - { - 'filters': ['F106'], - }, - ) -] - - -@pytest.mark.veryslow -@pytest.mark.parametrize(("obs_changes"), obs_data) -def test_obs_parameters(obs_changes): - - dir_name = TemporaryDirectory() - - stellar_cat_file, galaxy_cat_file = create_catalogues(dir_name.name) - - obs = get_default_obs() - for key in obs_changes[0]: - obs[key] = obs_changes[0][key] - - obm = ObservationModule(obs, out_path=dir_name.name, cat_path=dir_name.name) - obm.nextObservation() - obm.addCatalogue(stellar_cat_file) - obm.addCatalogue(galaxy_cat_file) - obm.addError() - obm.finalize(mosaic=False) diff --git a/stips/utilities/__init__.py b/stips/utilities/__init__.py index 5709258..ab27141 100755 --- a/stips/utilities/__init__.py +++ b/stips/utilities/__init__.py @@ -1,16 +1,17 @@ __all__ = ['utilities'] # Local Definitions -from .utilities import StipsEnvironment -from .utilities import SetupDataPaths -from .utilities import DownloadReferenceData -from .utilities import GetStipsDataDir -from .utilities import GetStipsData -from .utilities import SelectParameter -from .utilities import OffsetPosition -from .utilities import InstrumentList -from .utilities import read_metadata -from .utilities import read_table -from .utilities import rind -from .utilities import sersic_lum from .DataTable import StipsDataTable +from .utilities import (StipsEnvironment, + SetupDataPaths, + DownloadReferenceData, + GetStipsDataDir, + GetStipsData, + SelectParameter, + OffsetPosition, + InstrumentList, + read_metadata, + read_table, + rind, + sersic_lum, + get_pandeia_background) diff --git a/stips/utilities/makePSF.py b/stips/utilities/makePSF.py index fe937a2..e0279f5 100644 --- a/stips/utilities/makePSF.py +++ b/stips/utilities/makePSF.py @@ -25,16 +25,16 @@ # Convolution Constants # Inter Pixel Capacitance -IPC = np.array([[0.21, 1.62, 0.2], +IPC = np.array([[0.21, 1.62, 0.20], [1.88, 91.59, 1.87], [0.21, 1.66, 0.22]]) / 100.0 # Convolution array for creating a 4x upscaled ePSF, resulting # in the summ of the inner 3 pixels and a corresponding fraction # of the edges and corners to create an effective 4x4 pixels. EPSF4 = np.array([[0.25, 0.5, 0.5, 0.5, 0.25], - [0.5, 1., 1., 1., 0.5], - [0.5, 1., 1., 1., 0.5], - [0.5, 1., 1., 1., 0.5], + [0.50, 1.0, 1.0, 1.0, 0.50], + [0.50, 1.0, 1.0, 1.0, 0.50], + [0.50, 1.0, 1.0, 1.0, 0.50], [0.25, 0.5, 0.5, 0.5, 0.25]]) # The ePSF is *always* 4x upscaled @@ -129,7 +129,7 @@ def bicubic(epsf, iy, ix, fx, fy): """ Perform the bi-cubic interpolation of an image from a center pixel and a pixel phase. Functions originally obtained from - Andrea Bellini's Fortran code + Andrea Bellini's Fortran code. This function is significantly faster than the more complex scipy.interpolate.interp2d, which differs from this at the level diff --git a/stips/utilities/tests/test_makePSF.py b/stips/utilities/tests/test_makePSF.py new file mode 100755 index 0000000..26c71ae --- /dev/null +++ b/stips/utilities/tests/test_makePSF.py @@ -0,0 +1,357 @@ +from astropy.io import fits +from scipy import ndimage +from stips.utilities import makePSF +from stips.utilities.utilities import StipsEnvironment +import numpy as np +import os +import pytest +stips_version = StipsEnvironment.__stips__version__ + +# Basic carryover constants from the original makePSF file +# This will save time in later calculations + +IPC = np.array([[0.21, 1.62, 0.2], + [1.88, 91.59, 1.87], + [0.21, 1.66, 0.22]]) / 100.0 + +EPSF4 = np.array([[0.25, 0.5, 0.5, 0.5, 0.25], + [0.5, 1., 1., 1., 0.5], + [0.5, 1., 1., 1., 0.5], + [0.5, 1., 1., 1., 0.5], + [0.25, 0.5, 0.5, 0.5, 0.25]]) + +PSF_UPSCALE = 4 + +PSF_BOXSIZE = 44 +PSF_BRIGHT_BOXSIZE = 88 +PSF_EXTRA_BRIGHT_BOXSIZE = 176 + +PSF_GRID_SIZE = 3 + + +# Rounding (just a modification to stop potential future issues) +def rind(x): + return np.round(x).astype(int) + + +# Import the .fits PSF file, which will be the test PSF against which we can run all +# comparative tests and select the part of the data we'll use to construct the test PSF +file_dir = os.path.dirname(os.path.abspath(__file__)) +data_dir = os.path.join(file_dir, "..", "..", "data", "psf_WFI_2.0.0_F129_sca01.fits") +file = fits.open(data_dir) + +file_in = file[0].data[0] + +# Generate the test PSF using the make_epsf function from makePSF +# We can now use this basic PSF as a comparative standard against which to check the makePSF code + +test_psf = makePSF.make_epsf(file[0].data[0]) + +# Set a few basic variables for use in testing the bicubic function of makePSF + +ix = 1 +iy = 2 +fx = 0.25 +fy = 0.75 + +# Generate both a bicubic interpolation (to see fractional light output) from the bicubic function, +# and a fractional light output from the real_psf function +# These will both be used in later tests –– it's more efficent to generate them here + +test_phot = makePSF.bicubic(test_psf, iy, ix, fx, fy) + +test_rpsf_phot = makePSF.real_psf(10, 15, test_psf, + psf_center=88, boxsize=PSF_EXTRA_BRIGHT_BOXSIZE) + + +def test_make_epsf_calcs(): + + # Running the individual steps of make_epsf, starting with setting the shape of the array. + # Empty the array to fill later + psf_test_mid = np.zeros_like(file_in) + + # Assuming the PSF is square + size_test = file_in.shape[0] + + # Convolve the input file with the EPSF4 array + psf_test_mid = ndimage.convolve(file_in, EPSF4, mode='constant', cval=0.0) + + # Rather than indvidually checking each calculation against the value generated by make_epsf, + # we nstead run a check on a randomly selected set of the values generated. + # This way, we can test a sample set of the calculations, + # without overburdening the test by checking all of them. + + # First, apply the correct scaling to the rows and columns + + psf_test_mid[0:size_test, 0] = file_in[0:size_test, 0]*16.0 + + psf_test_mid[0:size_test, size_test - 2] = file_in[0:size_test, size_test - 2] * 16. + + psf_test_mid[1, 3:(size_test - PSF_UPSCALE) + 1] = \ + file_in[1, 3:(size_test - PSF_UPSCALE) + 1] * 16.0 + + psf_test_mid[size_test - 2, 3:(size_test - PSF_UPSCALE) + 1] = \ + file_in[size_test - 2, 3:(size_test - PSF_UPSCALE) + 1] * 16.0 + + # Empty the array + + psf_test_out = np.zeros_like(file_in) + + # Run the final calculations for the pixels in the PSF image + + x_range = range(PSF_UPSCALE, size_test-PSF_UPSCALE-1) + y_range = range(PSF_UPSCALE, size_test-PSF_UPSCALE-1) + + # Add the IPC to the output PSF + + for idx in x_range: + for idy in y_range: + pcen = psf_test_mid[idy, idx] + + psf_test_out[idy-PSF_UPSCALE, idx-PSF_UPSCALE] += pcen*IPC[0, 0] + psf_test_out[idy-PSF_UPSCALE, idx] += pcen*IPC[0, 1] + psf_test_out[idy-PSF_UPSCALE, idx+PSF_UPSCALE] += pcen*IPC[0, 2] + + psf_test_out[idy, idx-PSF_UPSCALE] += pcen*IPC[1, 0] + psf_test_out[idy, idx] += pcen*IPC[1, 1] + psf_test_out[idy, idx+PSF_UPSCALE] += pcen*IPC[1, 2] + + psf_test_out[idy+PSF_UPSCALE, idx-PSF_UPSCALE] += pcen*IPC[2, 0] + psf_test_out[idy+PSF_UPSCALE, idx] += pcen*IPC[2, 1] + psf_test_out[idy+PSF_UPSCALE, idx+PSF_UPSCALE] += pcen*IPC[2, 2] + + # Check the generated values at the selected points against the values in the test psf, + # generate previously via make_epsf. + + np.testing.assert_allclose(test_psf[0:size_test, 0], psf_test_out[0:size_test, 0]) + + np.testing.assert_allclose(test_psf[0:size_test, size_test-2], + psf_test_out[0:size_test, size_test-2]) + + np.testing.assert_allclose(test_psf[1, 3:(size_test-PSF_UPSCALE)+1], + psf_test_out[1, 3:(size_test-PSF_UPSCALE)+1]) + + np.testing.assert_allclose(test_psf[size_test-2, 3:(size_test-PSF_UPSCALE)+1], + psf_test_out[size_test-2, 3:(size_test-PSF_UPSCALE)+1]) + + +def test_bicubic(): + + # Run the calculations of the bicubic function "manually". + + A1 = test_psf[2, 1] + B1 = (test_psf[2, 2]-test_psf[2, 0])/2 + C1 = (test_psf[3, 1]-test_psf[1, 1])/2 + D1 = (test_psf[2, 2]+test_psf[2, 0]-2*A1)/2 + E1 = (test_psf[3, 2]-A1) + F1 = (test_psf[3, 1]+test_psf[1, 1]-2*A1)/2 + V1 = (A1 + + B1*(fx) + + C1*(fy) + + D1*(fx)**2 + + E1*(fx)*(fy) + + F1*(fy)**2) + + # Lower Right Value + A2 = test_psf[2, 2] + B2 = (test_psf[2, 3]-test_psf[2, 1])/2 + C2 = (test_psf[3, 2]-test_psf[1, 2])/2 + D2 = (test_psf[2, 3]+test_psf[2, 1]-2*A2)/2 + E2 = -(test_psf[3, 1]-A2) + F2 = (test_psf[3, 2]+test_psf[1, 2]-2*A2)/2 + V2 = (A2 + + B2*(fx-1) + + C2*(fy) + + D2*(fx-1)**2 + + E2*(fx-1)*(fy) + + F2*(fy)**2) + + # Upper Left Value + A3 = test_psf[3, 1] + B3 = (test_psf[3, 2]-test_psf[3, 0])/2 + C3 = (test_psf[4, 1]-test_psf[2, 1])/2 + D3 = (test_psf[3, 2]+test_psf[3, 0]-2*A3)/2 + E3 = -(test_psf[2, 2]-A3) + F3 = (test_psf[4, 1]+test_psf[2, 1]-2*A3)/2 + V3 = (A3 + + B3*(fx) + + C3*(fy-1) + + D3*(fx)**2 + + E3*(fx)*(fy-1) + + F3*(fy-1)**2) + + # Upper Right Value + A4 = test_psf[3, 2] + B4 = (test_psf[3, 3]-test_psf[3, 1])/2 + C4 = (test_psf[4, 2]-test_psf[2, 2])/2 + D4 = (test_psf[3, 3]+test_psf[3, 1]-2*A4)/2 + E4 = (test_psf[2, 1]-A4) + F4 = (test_psf[4, 2]+test_psf[2, 2]-2*A4)/2 + V4 = (A4 + + B4*(fx-1) + + C4*(fy-1) + + D4*(fx-1)**2 + + E4*(fx-1)*(fy-1) + + F4*(fy-1)**2) + + check_phot = ((1-fx)*(1-fy)*V1 + + (fx)*(1-fy)*V2 + + (1-fx)*(fy)*V3 + + (fx)*(fy)*V4) + + # Check this calculated value against the value generated previously by the bicubic function. + + np.testing.assert_allclose(test_phot, check_phot) + + +def test_real_psf(): + + # "Manually" run the calculations for the real_psf function, using some preset + # variables to check against these same variables (dx = 10 and dy = 15) were used + # to generate the test value from real_psf previously. + + dx = 10 + dy = 15 + + rx = 88 + 10*4 + ry = 88 + 15*4 + ix_rpsf = int(rx) + iy_rpsf = int(ry) + fx_rpsf = rx-ix_rpsf # Pixel Phase + fy_rpsf = ry-iy_rpsf # Pixel Phase + dd = np.sqrt(dx**2+dy**2) + + check_rpsf_phot = ((1-fx_rpsf)*(1-fy_rpsf)*test_psf[iy_rpsf, ix_rpsf] + + (fx_rpsf)*(1-fy_rpsf)*test_psf[iy_rpsf, ix_rpsf+1] + + (1-fx_rpsf)*(fy_rpsf)*test_psf[iy_rpsf+1, ix_rpsf] + + (fx_rpsf)*(fy_rpsf)*test_psf[iy_rpsf+1, ix_rpsf+1]) + + # Check the "manually" calculated value against the value generated by real_psf. + + np.testing.assert_allclose(test_rpsf_phot, check_rpsf_phot) + + +def test_place_source(): + + # Place a test source in a blank image, to ensure that this doesn't throw any exceptions. + # Just a check to make sure the function runs with no issues. + + image = np.zeros(shape=[512, 512, 3], dtype=np.float64) + + psf_middle = rind((test_psf[0].shape[0]-1)/2) + + PSF_BOXSIZE = np.floor(psf_middle)/PSF_UPSCALE + + test_placement = makePSF.place_source(250, 250, 15, image, test_psf, + boxsize=PSF_BOXSIZE, psf_center=psf_middle) + + +def test_interpolate(): + + # This test is designed to test interpolations of input ePSFs in each quadrant + # to ensure the calculations for each quadrant run correctly. + + # Get image size read in from test_psf + + image_size = file_in.shape[0] + + half_image = round(image_size/2) + + # Generate a psf_array –– for simplicity, we set the array as test_psf three times + + test_psf_array = [test_psf, test_psf, test_psf] + + # Lower Left Quadrant + + # Set location to the lower left quadrant + + xpix_ll = 68 + ypix_ll = 68 + + # Run interpolate_epsf using the test psf array, the set image size, and the + # specified coordinates for this quadrant. + + interp_epsf_ll = makePSF.interpolate_epsf(xpix_ll, ypix_ll, test_psf_array, image_size) + + # "Manually" run the calculations of the interpolate_epsf function. + + xf_ll = ((xpix_ll + 4) / (half_image + 4)) + yf_ll = ((ypix_ll + 4) / (half_image + 4)) + check_interp_ll = (xf_ll * yf_ll * test_psf_array[1][1] + + (1 - xf_ll) * (1 - yf_ll) * test_psf_array[0][0] + + (xf_ll) * (1 - yf_ll) * test_psf_array[1][0] + + (1 - xf_ll) * (yf_ll) * test_psf_array[0][1]) + + # Upper Left Quadrant + + # Set location to upper left quadrant + + xpix_ul = 68 + ypix_ul = 108 + + # Run interpolate_epsf using the test psf array, the set image size, and the + # specified coordinates for this quadrant. + + interp_epsf_ul = makePSF.interpolate_epsf(xpix_ul, ypix_ul, test_psf_array, image_size) + + # "Manually" run the calculations of the interpolate_epsf function. + + xf_ul = ((xpix_ul + 4) / (half_image + 4)) + yf_ul = ((ypix_ul + 4 - (half_image + 4)) / (half_image + 4)) + check_interp_ul = (xf_ul * yf_ul * test_psf_array[1][2] + + (1 - xf_ul) * (1 - yf_ul) * test_psf_array[0][1] + + (xf_ul) * (1 - yf_ul) * test_psf_array[1][1] + + (1 - xf_ul) * (yf_ul) * test_psf_array[0][2]) + + # Lower Right Quadrant + + # Set location to lower right quadrant + + xpix_lr = 108 + ypix_lr = 68 + + # Run interpolate_epsf using the test psf array, the set image size, + # and the specified coordinates for this quadrant. + + interp_epsf_lr = makePSF.interpolate_epsf(xpix_lr, ypix_lr, test_psf_array, image_size) + + # "Manually" run the calculations of the interpolate_epsf function. + + xf_lr = ((xpix_lr + 4 - (half_image + 4)) / (half_image + 4)) + yf_lr = ((ypix_lr + 4) / (half_image + 4)) + check_interp_lr = (xf_lr * yf_lr * test_psf_array[2][1] + + (1-xf_lr) * (1-yf_lr) * test_psf_array[1][0] + + (xf_lr) * (1-yf_lr) * test_psf_array[2][0] + + (1-xf_lr) * (yf_lr) * test_psf_array[1][1]) + + # Upper Right Quadrant + + # Set location to upper right quadrant. + + xpix_ur = 108 + ypix_ur = 108 + + # Run interpolate_epsf using the test psf array, the set image size, + # and the specified coordinates for this quadrant. + + interp_epsf_ur = makePSF.interpolate_epsf(xpix_ur, ypix_ur, test_psf_array, image_size) + + # "Manually" run the calculations of the interpolate_epsf function. + + xf_ur = ((xpix_ur + 4 - (half_image + 4)) / (half_image + 4)) + yf_ur = ((ypix_ur + 4 - (half_image + 4)) / (half_image + 4)) + check_interp_ur = (xf_ur * yf_ur * test_psf_array[2][2] + + (1-xf_ur) * (1-yf_ur) * test_psf_array[1][1] + + (xf_ur) * (1-yf_ur) * test_psf_array[2][1] + + (1-xf_ur)*(yf_ur)*test_psf_array[1][2]) + + # Check that each interpolation in each qudrant generated by + # interpolate_epsf matches the check value. + + np.testing.assert_allclose(interp_epsf_ll, check_interp_ll, atol=0.001) + + np.testing.assert_allclose(interp_epsf_ul, check_interp_ul, atol=0.001) + + np.testing.assert_allclose(interp_epsf_lr, check_interp_lr, atol=0.001) + + np.testing.assert_allclose(interp_epsf_ur, check_interp_ur, atol=0.001) diff --git a/stips/utilities/utilities.py b/stips/utilities/utilities.py index 5e6fb99..523d785 100755 --- a/stips/utilities/utilities.py +++ b/stips/utilities/utilities.py @@ -29,6 +29,66 @@ def rind(x): """ return np.round(x).astype(int) +def get_pandeia_background(wfi_filter, webapp = False): + """ + Import Pandeia functions to calculate the image background in a given + filter, in units of electrons per second. + + Parameters + ---------- + wfi_filter : str + Name of WFI filter + webapp: bool + Toggle strict API checking + + Returns + ------- + total_back: float + Total background in units of electrons per second + """ + from pandeia.engine.calc_utils import build_default_calc + from pandeia.engine.etc3D import setup, Scene + from pandeia.engine.observation import Observation + from pandeia.engine.signal import DetectorSignal + from pandeia.engine.background import Background + import scipy.signal as sg + + # Create default configuration file from Pandeia + calc_input = build_default_calc('roman','wfi','imaging') + calc_input['configuration']['instrument']['filter'] = wfi_filter.lower() + + # Setup Observation + calc_config, instrument, strategy, scene_configuration, background, background_level, warnings = setup(calc_input, webapp=webapp) + + # Create Scence + scene = Scene('roman', input=scene_configuration, webapp=webapp) + + # Create Observation + observation = Observation( + scene=scene, + instrument=instrument, + strategy=strategy, + background=background, + background_level=background_level, + webapp=webapp + ) + + # Calculate signal from background, without IPC + observation.instrument.the_detector.ipc = False + my_detector_signal = DetectorSignal(observation, calc_config=calc_config, webapp=webapp, empty_scene=True) + + # Extract background rate + rate_plus_bg = my_detector_signal.rate_plus_bg_list[0] + rate_per_pix = rate_plus_bg['fp_pix'] + + # Include IPC effects + kernel = observation.instrument.get_ipc_kernel() + detector = sg.fftconvolve(rate_per_pix, kernel, mode='same') + + # Total background + total_back = np.mean(detector[2:-2,2:-2]) + + return total_back def remove_subfolder(tar_file, subfolder): """ @@ -265,7 +325,7 @@ def DownloadReferenceData(): # webbpsf print("Checking webbpsf data") - webbpsf_url = "https://stsci.box.com/shared/static/34o0keicz2iujyilg4uz617va46ks6u9.gz" + webbpsf_url = "https://stsci.box.com/shared/static/t90gqazqs82d8nh25249oq1obbjfstq8.gz" webbpsf_data_path = os.environ[GetParameter("webbpsf_data_name", use_data=False)] webbpsf_data_file = "webbpsf_data.tar.gz" if not os.path.isdir(webbpsf_data_path): @@ -278,14 +338,14 @@ def DownloadReferenceData(): # pandeia print("Checking pandeia data") - pandeia_data_file = "pandeia_data-1.7.tar.gz" - pandeia_url = "https://stsci.box.com/shared/static/ycbm34uxhzafgb7te74vyl2emnr1mdty.gz" + pandeia_data_file = "pandeia_data-3.0.tar.gz" + pandeia_url = "https://stsci.box.com/shared/static/3n9e05mxkjzquxaq1gl6nqp6l0ksz5c2.gz" pandeia_data_path = os.environ[GetParameter("pandeia_data_name", use_data=False)] if not os.path.isdir(pandeia_data_path): print("Downloading pandeia data to {}".format(pandeia_data_path)) os.makedirs(pandeia_data_path) get_compressed_file(pandeia_url, pandeia_data_file, pandeia_data_path, - "pandeia_data-1.7_roman/") + "pandeia_data-3.0_roman_rc3/") else: print("Found at {}".format(pandeia_data_path))